1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
114 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
115 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
116 enum machine_mode *, int *, int *,
118 static int all_ones_mask_p (const_tree, int);
119 static tree sign_bit_p (tree, const_tree);
120 static int simple_operand_p (const_tree);
121 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
122 static tree range_predecessor (tree);
123 static tree range_successor (tree);
124 static tree make_range (tree, int *, tree *, tree *, bool *);
125 static tree build_range_check (tree, tree, int, tree, tree);
126 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
128 static tree fold_range_test (enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
133 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
134 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
135 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
206 int sign_extended_type;
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = TYPE_PRECISION (type);
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 else if (prec == HOST_BITS_PER_WIDE_INT)
245 if ((HOST_WIDE_INT)l1 < 0)
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
285 int sign_extended_type;
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
293 overflow = fit_double_type (low, high, &low, &high, type);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
300 || (overflowable > 0 && sign_extended_type))
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 unsigned HOST_WIDE_INT l;
331 h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1
332 + (unsigned HOST_WIDE_INT) h2
339 return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
343 return OVERFLOW_SUM_SIGN (h1, h2, h);
346 /* Negate a doubleword integer with doubleword result.
347 Return nonzero if the operation overflows, assuming it's signed.
348 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
352 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
359 return (*hv & h1) < 0;
369 /* Multiply two doubleword integers with doubleword result.
370 Return nonzero if the operation overflows according to UNSIGNED_P.
371 Each argument is given as two `HOST_WIDE_INT' pieces.
372 One argument is L1 and H1; the other, L2 and H2.
373 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
376 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
377 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
378 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
381 HOST_WIDE_INT arg1[4];
382 HOST_WIDE_INT arg2[4];
383 HOST_WIDE_INT prod[4 * 2];
384 unsigned HOST_WIDE_INT carry;
386 unsigned HOST_WIDE_INT toplow, neglow;
387 HOST_WIDE_INT tophigh, neghigh;
389 encode (arg1, l1, h1);
390 encode (arg2, l2, h2);
392 memset (prod, 0, sizeof prod);
394 for (i = 0; i < 4; i++)
397 for (j = 0; j < 4; j++)
400 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
401 carry += arg1[i] * arg2[j];
402 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
404 prod[k] = LOWPART (carry);
405 carry = HIGHPART (carry);
410 decode (prod, lv, hv);
411 decode (prod + 4, &toplow, &tophigh);
413 /* Unsigned overflow is immediate. */
415 return (toplow | tophigh) != 0;
417 /* Check for signed overflow by calculating the signed representation of the
418 top half of the result; it should agree with the low half's sign bit. */
421 neg_double (l2, h2, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
426 neg_double (l1, h1, &neglow, &neghigh);
427 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
429 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
432 /* Shift the doubleword integer in L1, H1 left by COUNT places
433 keeping only PREC bits of result.
434 Shift right if COUNT is negative.
435 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
436 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
439 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
440 HOST_WIDE_INT count, unsigned int prec,
441 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
443 unsigned HOST_WIDE_INT signmask;
447 rshift_double (l1, h1, -count, prec, lv, hv, arith);
451 if (SHIFT_COUNT_TRUNCATED)
454 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
456 /* Shifting by the host word size is undefined according to the
457 ANSI standard, so we must handle this as a special case. */
461 else if (count >= HOST_BITS_PER_WIDE_INT)
463 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
468 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
469 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
473 /* Sign extend all bits that are beyond the precision. */
475 signmask = -((prec > HOST_BITS_PER_WIDE_INT
476 ? ((unsigned HOST_WIDE_INT) *hv
477 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
478 : (*lv >> (prec - 1))) & 1);
480 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
482 else if (prec >= HOST_BITS_PER_WIDE_INT)
484 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
485 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
490 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
491 *lv |= signmask << prec;
495 /* Shift the doubleword integer in L1, H1 right by COUNT places
496 keeping only PREC bits of result. COUNT must be positive.
497 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
498 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
501 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
502 HOST_WIDE_INT count, unsigned int prec,
503 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
506 unsigned HOST_WIDE_INT signmask;
509 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
512 if (SHIFT_COUNT_TRUNCATED)
515 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
517 /* Shifting by the host word size is undefined according to the
518 ANSI standard, so we must handle this as a special case. */
522 else if (count >= HOST_BITS_PER_WIDE_INT)
525 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
529 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
531 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
534 /* Zero / sign extend all bits that are beyond the precision. */
536 if (count >= (HOST_WIDE_INT)prec)
541 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
543 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
545 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
546 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
551 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
552 *lv |= signmask << (prec - count);
556 /* Rotate the doubleword integer in L1, H1 left by COUNT places
557 keeping only PREC bits of result.
558 Rotate right if COUNT is negative.
559 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
562 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
563 HOST_WIDE_INT count, unsigned int prec,
564 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
566 unsigned HOST_WIDE_INT s1l, s2l;
567 HOST_WIDE_INT s1h, s2h;
573 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
574 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
579 /* Rotate the doubleword integer in L1, H1 left by COUNT places
580 keeping only PREC bits of result. COUNT must be positive.
581 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
584 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
585 HOST_WIDE_INT count, unsigned int prec,
586 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
588 unsigned HOST_WIDE_INT s1l, s2l;
589 HOST_WIDE_INT s1h, s2h;
595 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
596 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
601 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
602 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
603 CODE is a tree code for a kind of division, one of
604 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
606 It controls how the quotient is rounded to an integer.
607 Return nonzero if the operation overflows.
608 UNS nonzero says do unsigned division. */
611 div_and_round_double (enum tree_code code, int uns,
612 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
613 HOST_WIDE_INT hnum_orig,
614 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
615 HOST_WIDE_INT hden_orig,
616 unsigned HOST_WIDE_INT *lquo,
617 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
621 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
622 HOST_WIDE_INT den[4], quo[4];
624 unsigned HOST_WIDE_INT work;
625 unsigned HOST_WIDE_INT carry = 0;
626 unsigned HOST_WIDE_INT lnum = lnum_orig;
627 HOST_WIDE_INT hnum = hnum_orig;
628 unsigned HOST_WIDE_INT lden = lden_orig;
629 HOST_WIDE_INT hden = hden_orig;
632 if (hden == 0 && lden == 0)
633 overflow = 1, lden = 1;
635 /* Calculate quotient sign and convert operands to unsigned. */
641 /* (minimum integer) / (-1) is the only overflow case. */
642 if (neg_double (lnum, hnum, &lnum, &hnum)
643 && ((HOST_WIDE_INT) lden & hden) == -1)
649 neg_double (lden, hden, &lden, &hden);
653 if (hnum == 0 && hden == 0)
654 { /* single precision */
656 /* This unsigned division rounds toward zero. */
662 { /* trivial case: dividend < divisor */
663 /* hden != 0 already checked. */
670 memset (quo, 0, sizeof quo);
672 memset (num, 0, sizeof num); /* to zero 9th element */
673 memset (den, 0, sizeof den);
675 encode (num, lnum, hnum);
676 encode (den, lden, hden);
678 /* Special code for when the divisor < BASE. */
679 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
681 /* hnum != 0 already checked. */
682 for (i = 4 - 1; i >= 0; i--)
684 work = num[i] + carry * BASE;
685 quo[i] = work / lden;
691 /* Full double precision division,
692 with thanks to Don Knuth's "Seminumerical Algorithms". */
693 int num_hi_sig, den_hi_sig;
694 unsigned HOST_WIDE_INT quo_est, scale;
696 /* Find the highest nonzero divisor digit. */
697 for (i = 4 - 1;; i--)
704 /* Insure that the first digit of the divisor is at least BASE/2.
705 This is required by the quotient digit estimation algorithm. */
707 scale = BASE / (den[den_hi_sig] + 1);
709 { /* scale divisor and dividend */
711 for (i = 0; i <= 4 - 1; i++)
713 work = (num[i] * scale) + carry;
714 num[i] = LOWPART (work);
715 carry = HIGHPART (work);
720 for (i = 0; i <= 4 - 1; i++)
722 work = (den[i] * scale) + carry;
723 den[i] = LOWPART (work);
724 carry = HIGHPART (work);
725 if (den[i] != 0) den_hi_sig = i;
732 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
734 /* Guess the next quotient digit, quo_est, by dividing the first
735 two remaining dividend digits by the high order quotient digit.
736 quo_est is never low and is at most 2 high. */
737 unsigned HOST_WIDE_INT tmp;
739 num_hi_sig = i + den_hi_sig + 1;
740 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
741 if (num[num_hi_sig] != den[den_hi_sig])
742 quo_est = work / den[den_hi_sig];
746 /* Refine quo_est so it's usually correct, and at most one high. */
747 tmp = work - quo_est * den[den_hi_sig];
749 && (den[den_hi_sig - 1] * quo_est
750 > (tmp * BASE + num[num_hi_sig - 2])))
753 /* Try QUO_EST as the quotient digit, by multiplying the
754 divisor by QUO_EST and subtracting from the remaining dividend.
755 Keep in mind that QUO_EST is the I - 1st digit. */
758 for (j = 0; j <= den_hi_sig; j++)
760 work = quo_est * den[j] + carry;
761 carry = HIGHPART (work);
762 work = num[i + j] - LOWPART (work);
763 num[i + j] = LOWPART (work);
764 carry += HIGHPART (work) != 0;
767 /* If quo_est was high by one, then num[i] went negative and
768 we need to correct things. */
769 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
772 carry = 0; /* add divisor back in */
773 for (j = 0; j <= den_hi_sig; j++)
775 work = num[i + j] + den[j] + carry;
776 carry = HIGHPART (work);
777 num[i + j] = LOWPART (work);
780 num [num_hi_sig] += carry;
783 /* Store the quotient digit. */
788 decode (quo, lquo, hquo);
791 /* If result is negative, make it so. */
793 neg_double (*lquo, *hquo, lquo, hquo);
795 /* Compute trial remainder: rem = num - (quo * den) */
796 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
797 neg_double (*lrem, *hrem, lrem, hrem);
798 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
803 case TRUNC_MOD_EXPR: /* round toward zero */
804 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
808 case FLOOR_MOD_EXPR: /* round toward negative infinity */
809 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
812 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
820 case CEIL_MOD_EXPR: /* round toward positive infinity */
821 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
823 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
831 case ROUND_MOD_EXPR: /* round to closest integer */
833 unsigned HOST_WIDE_INT labs_rem = *lrem;
834 HOST_WIDE_INT habs_rem = *hrem;
835 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
836 HOST_WIDE_INT habs_den = hden, htwice;
838 /* Get absolute values. */
840 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
842 neg_double (lden, hden, &labs_den, &habs_den);
844 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
845 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
846 labs_rem, habs_rem, <wice, &htwice);
848 if (((unsigned HOST_WIDE_INT) habs_den
849 < (unsigned HOST_WIDE_INT) htwice)
850 || (((unsigned HOST_WIDE_INT) habs_den
851 == (unsigned HOST_WIDE_INT) htwice)
852 && (labs_den <= ltwice)))
856 add_double (*lquo, *hquo,
857 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
860 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
872 /* Compute true remainder: rem = num - (quo * den) */
873 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
874 neg_double (*lrem, *hrem, lrem, hrem);
875 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
879 /* If ARG2 divides ARG1 with zero remainder, carries out the division
880 of type CODE and returns the quotient.
881 Otherwise returns NULL_TREE. */
884 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
886 unsigned HOST_WIDE_INT int1l, int2l;
887 HOST_WIDE_INT int1h, int2h;
888 unsigned HOST_WIDE_INT quol, reml;
889 HOST_WIDE_INT quoh, remh;
890 tree type = TREE_TYPE (arg1);
891 int uns = TYPE_UNSIGNED (type);
893 int1l = TREE_INT_CST_LOW (arg1);
894 int1h = TREE_INT_CST_HIGH (arg1);
895 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
896 &obj[some_exotic_number]. */
897 if (POINTER_TYPE_P (type))
900 type = signed_type_for (type);
901 fit_double_type (int1l, int1h, &int1l, &int1h,
905 fit_double_type (int1l, int1h, &int1l, &int1h, type);
906 int2l = TREE_INT_CST_LOW (arg2);
907 int2h = TREE_INT_CST_HIGH (arg2);
909 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
910 &quol, &quoh, &reml, &remh);
911 if (remh != 0 || reml != 0)
914 return build_int_cst_wide (type, quol, quoh);
917 /* This is nonzero if we should defer warnings about undefined
918 overflow. This facility exists because these warnings are a
919 special case. The code to estimate loop iterations does not want
920 to issue any warnings, since it works with expressions which do not
921 occur in user code. Various bits of cleanup code call fold(), but
922 only use the result if it has certain characteristics (e.g., is a
923 constant); that code only wants to issue a warning if the result is
926 static int fold_deferring_overflow_warnings;
928 /* If a warning about undefined overflow is deferred, this is the
929 warning. Note that this may cause us to turn two warnings into
930 one, but that is fine since it is sufficient to only give one
931 warning per expression. */
933 static const char* fold_deferred_overflow_warning;
935 /* If a warning about undefined overflow is deferred, this is the
936 level at which the warning should be emitted. */
938 static enum warn_strict_overflow_code fold_deferred_overflow_code;
940 /* Start deferring overflow warnings. We could use a stack here to
941 permit nested calls, but at present it is not necessary. */
944 fold_defer_overflow_warnings (void)
946 ++fold_deferring_overflow_warnings;
949 /* Stop deferring overflow warnings. If there is a pending warning,
950 and ISSUE is true, then issue the warning if appropriate. STMT is
951 the statement with which the warning should be associated (used for
952 location information); STMT may be NULL. CODE is the level of the
953 warning--a warn_strict_overflow_code value. This function will use
954 the smaller of CODE and the deferred code when deciding whether to
955 issue the warning. CODE may be zero to mean to always use the
959 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
964 gcc_assert (fold_deferring_overflow_warnings > 0);
965 --fold_deferring_overflow_warnings;
966 if (fold_deferring_overflow_warnings > 0)
968 if (fold_deferred_overflow_warning != NULL
970 && code < (int) fold_deferred_overflow_code)
971 fold_deferred_overflow_code = code;
975 warnmsg = fold_deferred_overflow_warning;
976 fold_deferred_overflow_warning = NULL;
978 if (!issue || warnmsg == NULL)
981 if (gimple_no_warning_p (stmt))
984 /* Use the smallest code level when deciding to issue the
986 if (code == 0 || code > (int) fold_deferred_overflow_code)
987 code = fold_deferred_overflow_code;
989 if (!issue_strict_overflow_warning (code))
993 locus = input_location;
995 locus = gimple_location (stmt);
996 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
999 /* Stop deferring overflow warnings, ignoring any deferred
1003 fold_undefer_and_ignore_overflow_warnings (void)
1005 fold_undefer_overflow_warnings (false, NULL, 0);
1008 /* Whether we are deferring overflow warnings. */
1011 fold_deferring_overflow_warnings_p (void)
1013 return fold_deferring_overflow_warnings > 0;
1016 /* This is called when we fold something based on the fact that signed
1017 overflow is undefined. */
1020 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1022 if (fold_deferring_overflow_warnings > 0)
1024 if (fold_deferred_overflow_warning == NULL
1025 || wc < fold_deferred_overflow_code)
1027 fold_deferred_overflow_warning = gmsgid;
1028 fold_deferred_overflow_code = wc;
1031 else if (issue_strict_overflow_warning (wc))
1032 warning (OPT_Wstrict_overflow, gmsgid);
1035 /* Return true if the built-in mathematical function specified by CODE
1036 is odd, i.e. -f(x) == f(-x). */
1039 negate_mathfn_p (enum built_in_function code)
1043 CASE_FLT_FN (BUILT_IN_ASIN):
1044 CASE_FLT_FN (BUILT_IN_ASINH):
1045 CASE_FLT_FN (BUILT_IN_ATAN):
1046 CASE_FLT_FN (BUILT_IN_ATANH):
1047 CASE_FLT_FN (BUILT_IN_CASIN):
1048 CASE_FLT_FN (BUILT_IN_CASINH):
1049 CASE_FLT_FN (BUILT_IN_CATAN):
1050 CASE_FLT_FN (BUILT_IN_CATANH):
1051 CASE_FLT_FN (BUILT_IN_CBRT):
1052 CASE_FLT_FN (BUILT_IN_CPROJ):
1053 CASE_FLT_FN (BUILT_IN_CSIN):
1054 CASE_FLT_FN (BUILT_IN_CSINH):
1055 CASE_FLT_FN (BUILT_IN_CTAN):
1056 CASE_FLT_FN (BUILT_IN_CTANH):
1057 CASE_FLT_FN (BUILT_IN_ERF):
1058 CASE_FLT_FN (BUILT_IN_LLROUND):
1059 CASE_FLT_FN (BUILT_IN_LROUND):
1060 CASE_FLT_FN (BUILT_IN_ROUND):
1061 CASE_FLT_FN (BUILT_IN_SIN):
1062 CASE_FLT_FN (BUILT_IN_SINH):
1063 CASE_FLT_FN (BUILT_IN_TAN):
1064 CASE_FLT_FN (BUILT_IN_TANH):
1065 CASE_FLT_FN (BUILT_IN_TRUNC):
1068 CASE_FLT_FN (BUILT_IN_LLRINT):
1069 CASE_FLT_FN (BUILT_IN_LRINT):
1070 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1071 CASE_FLT_FN (BUILT_IN_RINT):
1072 return !flag_rounding_math;
1080 /* Check whether we may negate an integer constant T without causing
1084 may_negate_without_overflow_p (const_tree t)
1086 unsigned HOST_WIDE_INT val;
1090 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1092 type = TREE_TYPE (t);
1093 if (TYPE_UNSIGNED (type))
1096 prec = TYPE_PRECISION (type);
1097 if (prec > HOST_BITS_PER_WIDE_INT)
1099 if (TREE_INT_CST_LOW (t) != 0)
1101 prec -= HOST_BITS_PER_WIDE_INT;
1102 val = TREE_INT_CST_HIGH (t);
1105 val = TREE_INT_CST_LOW (t);
1106 if (prec < HOST_BITS_PER_WIDE_INT)
1107 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1108 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1111 /* Determine whether an expression T can be cheaply negated using
1112 the function negate_expr without introducing undefined overflow. */
1115 negate_expr_p (tree t)
1122 type = TREE_TYPE (t);
1124 STRIP_SIGN_NOPS (t);
1125 switch (TREE_CODE (t))
1128 if (TYPE_OVERFLOW_WRAPS (type))
1131 /* Check that -CST will not overflow type. */
1132 return may_negate_without_overflow_p (t);
1134 return (INTEGRAL_TYPE_P (type)
1135 && TYPE_OVERFLOW_WRAPS (type));
1143 return negate_expr_p (TREE_REALPART (t))
1144 && negate_expr_p (TREE_IMAGPART (t));
1147 return negate_expr_p (TREE_OPERAND (t, 0))
1148 && negate_expr_p (TREE_OPERAND (t, 1));
1151 return negate_expr_p (TREE_OPERAND (t, 0));
1154 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1155 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1157 /* -(A + B) -> (-B) - A. */
1158 if (negate_expr_p (TREE_OPERAND (t, 1))
1159 && reorder_operands_p (TREE_OPERAND (t, 0),
1160 TREE_OPERAND (t, 1)))
1162 /* -(A + B) -> (-A) - B. */
1163 return negate_expr_p (TREE_OPERAND (t, 0));
1166 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1167 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1168 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1169 && reorder_operands_p (TREE_OPERAND (t, 0),
1170 TREE_OPERAND (t, 1));
1173 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1179 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1180 return negate_expr_p (TREE_OPERAND (t, 1))
1181 || negate_expr_p (TREE_OPERAND (t, 0));
1184 case TRUNC_DIV_EXPR:
1185 case ROUND_DIV_EXPR:
1186 case FLOOR_DIV_EXPR:
1188 case EXACT_DIV_EXPR:
1189 /* In general we can't negate A / B, because if A is INT_MIN and
1190 B is 1, we may turn this into INT_MIN / -1 which is undefined
1191 and actually traps on some architectures. But if overflow is
1192 undefined, we can negate, because - (INT_MIN / 1) is an
1194 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1195 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1197 return negate_expr_p (TREE_OPERAND (t, 1))
1198 || negate_expr_p (TREE_OPERAND (t, 0));
1201 /* Negate -((double)float) as (double)(-float). */
1202 if (TREE_CODE (type) == REAL_TYPE)
1204 tree tem = strip_float_extensions (t);
1206 return negate_expr_p (tem);
1211 /* Negate -f(x) as f(-x). */
1212 if (negate_mathfn_p (builtin_mathfn_code (t)))
1213 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1217 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1218 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1220 tree op1 = TREE_OPERAND (t, 1);
1221 if (TREE_INT_CST_HIGH (op1) == 0
1222 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1223 == TREE_INT_CST_LOW (op1))
1234 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1235 simplification is possible.
1236 If negate_expr_p would return true for T, NULL_TREE will never be
1240 fold_negate_expr (tree t)
1242 tree type = TREE_TYPE (t);
1245 switch (TREE_CODE (t))
1247 /* Convert - (~A) to A + 1. */
1249 if (INTEGRAL_TYPE_P (type))
1250 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1251 build_int_cst (type, 1));
1255 tem = fold_negate_const (t, type);
1256 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1257 || !TYPE_OVERFLOW_TRAPS (type))
1262 tem = fold_negate_const (t, type);
1263 /* Two's complement FP formats, such as c4x, may overflow. */
1264 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1269 tem = fold_negate_const (t, type);
1274 tree rpart = negate_expr (TREE_REALPART (t));
1275 tree ipart = negate_expr (TREE_IMAGPART (t));
1277 if ((TREE_CODE (rpart) == REAL_CST
1278 && TREE_CODE (ipart) == REAL_CST)
1279 || (TREE_CODE (rpart) == INTEGER_CST
1280 && TREE_CODE (ipart) == INTEGER_CST))
1281 return build_complex (type, rpart, ipart);
1286 if (negate_expr_p (t))
1287 return fold_build2 (COMPLEX_EXPR, type,
1288 fold_negate_expr (TREE_OPERAND (t, 0)),
1289 fold_negate_expr (TREE_OPERAND (t, 1)));
1293 if (negate_expr_p (t))
1294 return fold_build1 (CONJ_EXPR, type,
1295 fold_negate_expr (TREE_OPERAND (t, 0)));
1299 return TREE_OPERAND (t, 0);
1302 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1303 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1305 /* -(A + B) -> (-B) - A. */
1306 if (negate_expr_p (TREE_OPERAND (t, 1))
1307 && reorder_operands_p (TREE_OPERAND (t, 0),
1308 TREE_OPERAND (t, 1)))
1310 tem = negate_expr (TREE_OPERAND (t, 1));
1311 return fold_build2 (MINUS_EXPR, type,
1312 tem, TREE_OPERAND (t, 0));
1315 /* -(A + B) -> (-A) - B. */
1316 if (negate_expr_p (TREE_OPERAND (t, 0)))
1318 tem = negate_expr (TREE_OPERAND (t, 0));
1319 return fold_build2 (MINUS_EXPR, type,
1320 tem, TREE_OPERAND (t, 1));
1326 /* - (A - B) -> B - A */
1327 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1328 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1329 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1330 return fold_build2 (MINUS_EXPR, type,
1331 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1335 if (TYPE_UNSIGNED (type))
1341 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1343 tem = TREE_OPERAND (t, 1);
1344 if (negate_expr_p (tem))
1345 return fold_build2 (TREE_CODE (t), type,
1346 TREE_OPERAND (t, 0), negate_expr (tem));
1347 tem = TREE_OPERAND (t, 0);
1348 if (negate_expr_p (tem))
1349 return fold_build2 (TREE_CODE (t), type,
1350 negate_expr (tem), TREE_OPERAND (t, 1));
1354 case TRUNC_DIV_EXPR:
1355 case ROUND_DIV_EXPR:
1356 case FLOOR_DIV_EXPR:
1358 case EXACT_DIV_EXPR:
1359 /* In general we can't negate A / B, because if A is INT_MIN and
1360 B is 1, we may turn this into INT_MIN / -1 which is undefined
1361 and actually traps on some architectures. But if overflow is
1362 undefined, we can negate, because - (INT_MIN / 1) is an
1364 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1366 const char * const warnmsg = G_("assuming signed overflow does not "
1367 "occur when negating a division");
1368 tem = TREE_OPERAND (t, 1);
1369 if (negate_expr_p (tem))
1371 if (INTEGRAL_TYPE_P (type)
1372 && (TREE_CODE (tem) != INTEGER_CST
1373 || integer_onep (tem)))
1374 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1375 return fold_build2 (TREE_CODE (t), type,
1376 TREE_OPERAND (t, 0), negate_expr (tem));
1378 tem = TREE_OPERAND (t, 0);
1379 if (negate_expr_p (tem))
1381 if (INTEGRAL_TYPE_P (type)
1382 && (TREE_CODE (tem) != INTEGER_CST
1383 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1384 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1385 return fold_build2 (TREE_CODE (t), type,
1386 negate_expr (tem), TREE_OPERAND (t, 1));
1392 /* Convert -((double)float) into (double)(-float). */
1393 if (TREE_CODE (type) == REAL_TYPE)
1395 tem = strip_float_extensions (t);
1396 if (tem != t && negate_expr_p (tem))
1397 return fold_convert (type, negate_expr (tem));
1402 /* Negate -f(x) as f(-x). */
1403 if (negate_mathfn_p (builtin_mathfn_code (t))
1404 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1408 fndecl = get_callee_fndecl (t);
1409 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1410 return build_call_expr (fndecl, 1, arg);
1415 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1416 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1418 tree op1 = TREE_OPERAND (t, 1);
1419 if (TREE_INT_CST_HIGH (op1) == 0
1420 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1421 == TREE_INT_CST_LOW (op1))
1423 tree ntype = TYPE_UNSIGNED (type)
1424 ? signed_type_for (type)
1425 : unsigned_type_for (type);
1426 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1427 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1428 return fold_convert (type, temp);
1440 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1441 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1442 return NULL_TREE. */
1445 negate_expr (tree t)
1452 type = TREE_TYPE (t);
1453 STRIP_SIGN_NOPS (t);
1455 tem = fold_negate_expr (t);
1457 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1458 return fold_convert (type, tem);
1461 /* Split a tree IN into a constant, literal and variable parts that could be
1462 combined with CODE to make IN. "constant" means an expression with
1463 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1464 commutative arithmetic operation. Store the constant part into *CONP,
1465 the literal in *LITP and return the variable part. If a part isn't
1466 present, set it to null. If the tree does not decompose in this way,
1467 return the entire tree as the variable part and the other parts as null.
1469 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1470 case, we negate an operand that was subtracted. Except if it is a
1471 literal for which we use *MINUS_LITP instead.
1473 If NEGATE_P is true, we are negating all of IN, again except a literal
1474 for which we use *MINUS_LITP instead.
1476 If IN is itself a literal or constant, return it as appropriate.
1478 Note that we do not guarantee that any of the three values will be the
1479 same type as IN, but they will have the same signedness and mode. */
1482 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1483 tree *minus_litp, int negate_p)
1491 /* Strip any conversions that don't change the machine mode or signedness. */
1492 STRIP_SIGN_NOPS (in);
1494 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1495 || TREE_CODE (in) == FIXED_CST)
1497 else if (TREE_CODE (in) == code
1498 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1499 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1500 /* We can associate addition and subtraction together (even
1501 though the C standard doesn't say so) for integers because
1502 the value is not affected. For reals, the value might be
1503 affected, so we can't. */
1504 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1505 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1507 tree op0 = TREE_OPERAND (in, 0);
1508 tree op1 = TREE_OPERAND (in, 1);
1509 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1510 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1512 /* First see if either of the operands is a literal, then a constant. */
1513 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1514 || TREE_CODE (op0) == FIXED_CST)
1515 *litp = op0, op0 = 0;
1516 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1517 || TREE_CODE (op1) == FIXED_CST)
1518 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1520 if (op0 != 0 && TREE_CONSTANT (op0))
1521 *conp = op0, op0 = 0;
1522 else if (op1 != 0 && TREE_CONSTANT (op1))
1523 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1525 /* If we haven't dealt with either operand, this is not a case we can
1526 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1527 if (op0 != 0 && op1 != 0)
1532 var = op1, neg_var_p = neg1_p;
1534 /* Now do any needed negations. */
1536 *minus_litp = *litp, *litp = 0;
1538 *conp = negate_expr (*conp);
1540 var = negate_expr (var);
1542 else if (TREE_CONSTANT (in))
1550 *minus_litp = *litp, *litp = 0;
1551 else if (*minus_litp)
1552 *litp = *minus_litp, *minus_litp = 0;
1553 *conp = negate_expr (*conp);
1554 var = negate_expr (var);
1560 /* Re-associate trees split by the above function. T1 and T2 are either
1561 expressions to associate or null. Return the new expression, if any. If
1562 we build an operation, do it in TYPE and with CODE. */
1565 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1572 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1573 try to fold this since we will have infinite recursion. But do
1574 deal with any NEGATE_EXPRs. */
1575 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1576 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1578 if (code == PLUS_EXPR)
1580 if (TREE_CODE (t1) == NEGATE_EXPR)
1581 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1582 fold_convert (type, TREE_OPERAND (t1, 0)));
1583 else if (TREE_CODE (t2) == NEGATE_EXPR)
1584 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1585 fold_convert (type, TREE_OPERAND (t2, 0)));
1586 else if (integer_zerop (t2))
1587 return fold_convert (type, t1);
1589 else if (code == MINUS_EXPR)
1591 if (integer_zerop (t2))
1592 return fold_convert (type, t1);
1595 return build2 (code, type, fold_convert (type, t1),
1596 fold_convert (type, t2));
1599 return fold_build2 (code, type, fold_convert (type, t1),
1600 fold_convert (type, t2));
1603 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1604 for use in int_const_binop, size_binop and size_diffop. */
1607 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1609 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1611 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1626 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1627 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1628 && TYPE_MODE (type1) == TYPE_MODE (type2);
1632 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1633 to produce a new constant. Return NULL_TREE if we don't know how
1634 to evaluate CODE at compile-time.
1636 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1639 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1641 unsigned HOST_WIDE_INT int1l, int2l;
1642 HOST_WIDE_INT int1h, int2h;
1643 unsigned HOST_WIDE_INT low;
1645 unsigned HOST_WIDE_INT garbagel;
1646 HOST_WIDE_INT garbageh;
1648 tree type = TREE_TYPE (arg1);
1649 int uns = TYPE_UNSIGNED (type);
1651 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1654 int1l = TREE_INT_CST_LOW (arg1);
1655 int1h = TREE_INT_CST_HIGH (arg1);
1656 int2l = TREE_INT_CST_LOW (arg2);
1657 int2h = TREE_INT_CST_HIGH (arg2);
1662 low = int1l | int2l, hi = int1h | int2h;
1666 low = int1l ^ int2l, hi = int1h ^ int2h;
1670 low = int1l & int2l, hi = int1h & int2h;
1676 /* It's unclear from the C standard whether shifts can overflow.
1677 The following code ignores overflow; perhaps a C standard
1678 interpretation ruling is needed. */
1679 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1686 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1691 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1695 neg_double (int2l, int2h, &low, &hi);
1696 add_double (int1l, int1h, low, hi, &low, &hi);
1697 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1701 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1704 case TRUNC_DIV_EXPR:
1705 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1706 case EXACT_DIV_EXPR:
1707 /* This is a shortcut for a common special case. */
1708 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1709 && !TREE_OVERFLOW (arg1)
1710 && !TREE_OVERFLOW (arg2)
1711 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1713 if (code == CEIL_DIV_EXPR)
1716 low = int1l / int2l, hi = 0;
1720 /* ... fall through ... */
1722 case ROUND_DIV_EXPR:
1723 if (int2h == 0 && int2l == 0)
1725 if (int2h == 0 && int2l == 1)
1727 low = int1l, hi = int1h;
1730 if (int1l == int2l && int1h == int2h
1731 && ! (int1l == 0 && int1h == 0))
1736 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1737 &low, &hi, &garbagel, &garbageh);
1740 case TRUNC_MOD_EXPR:
1741 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1742 /* This is a shortcut for a common special case. */
1743 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1744 && !TREE_OVERFLOW (arg1)
1745 && !TREE_OVERFLOW (arg2)
1746 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1748 if (code == CEIL_MOD_EXPR)
1750 low = int1l % int2l, hi = 0;
1754 /* ... fall through ... */
1756 case ROUND_MOD_EXPR:
1757 if (int2h == 0 && int2l == 0)
1759 overflow = div_and_round_double (code, uns,
1760 int1l, int1h, int2l, int2h,
1761 &garbagel, &garbageh, &low, &hi);
1767 low = (((unsigned HOST_WIDE_INT) int1h
1768 < (unsigned HOST_WIDE_INT) int2h)
1769 || (((unsigned HOST_WIDE_INT) int1h
1770 == (unsigned HOST_WIDE_INT) int2h)
1773 low = (int1h < int2h
1774 || (int1h == int2h && int1l < int2l));
1776 if (low == (code == MIN_EXPR))
1777 low = int1l, hi = int1h;
1779 low = int2l, hi = int2h;
1788 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1790 /* Propagate overflow flags ourselves. */
1791 if (((!uns || is_sizetype) && overflow)
1792 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1795 TREE_OVERFLOW (t) = 1;
1799 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1800 ((!uns || is_sizetype) && overflow)
1801 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1806 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1807 constant. We assume ARG1 and ARG2 have the same data type, or at least
1808 are the same kind of constant and the same machine mode. Return zero if
1809 combining the constants is not allowed in the current operating mode.
1811 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1814 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1816 /* Sanity check for the recursive cases. */
1823 if (TREE_CODE (arg1) == INTEGER_CST)
1824 return int_const_binop (code, arg1, arg2, notrunc);
1826 if (TREE_CODE (arg1) == REAL_CST)
1828 enum machine_mode mode;
1831 REAL_VALUE_TYPE value;
1832 REAL_VALUE_TYPE result;
1836 /* The following codes are handled by real_arithmetic. */
1851 d1 = TREE_REAL_CST (arg1);
1852 d2 = TREE_REAL_CST (arg2);
1854 type = TREE_TYPE (arg1);
1855 mode = TYPE_MODE (type);
1857 /* Don't perform operation if we honor signaling NaNs and
1858 either operand is a NaN. */
1859 if (HONOR_SNANS (mode)
1860 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1863 /* Don't perform operation if it would raise a division
1864 by zero exception. */
1865 if (code == RDIV_EXPR
1866 && REAL_VALUES_EQUAL (d2, dconst0)
1867 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1870 /* If either operand is a NaN, just return it. Otherwise, set up
1871 for floating-point trap; we return an overflow. */
1872 if (REAL_VALUE_ISNAN (d1))
1874 else if (REAL_VALUE_ISNAN (d2))
1877 inexact = real_arithmetic (&value, code, &d1, &d2);
1878 real_convert (&result, mode, &value);
1880 /* Don't constant fold this floating point operation if
1881 the result has overflowed and flag_trapping_math. */
1882 if (flag_trapping_math
1883 && MODE_HAS_INFINITIES (mode)
1884 && REAL_VALUE_ISINF (result)
1885 && !REAL_VALUE_ISINF (d1)
1886 && !REAL_VALUE_ISINF (d2))
1889 /* Don't constant fold this floating point operation if the
1890 result may dependent upon the run-time rounding mode and
1891 flag_rounding_math is set, or if GCC's software emulation
1892 is unable to accurately represent the result. */
1893 if ((flag_rounding_math
1894 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1895 && (inexact || !real_identical (&result, &value)))
1898 t = build_real (type, result);
1900 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1904 if (TREE_CODE (arg1) == FIXED_CST)
1906 FIXED_VALUE_TYPE f1;
1907 FIXED_VALUE_TYPE f2;
1908 FIXED_VALUE_TYPE result;
1913 /* The following codes are handled by fixed_arithmetic. */
1919 case TRUNC_DIV_EXPR:
1920 f2 = TREE_FIXED_CST (arg2);
1925 f2.data.high = TREE_INT_CST_HIGH (arg2);
1926 f2.data.low = TREE_INT_CST_LOW (arg2);
1934 f1 = TREE_FIXED_CST (arg1);
1935 type = TREE_TYPE (arg1);
1936 sat_p = TYPE_SATURATING (type);
1937 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1938 t = build_fixed (type, result);
1939 /* Propagate overflow flags. */
1940 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1942 TREE_OVERFLOW (t) = 1;
1943 TREE_CONSTANT_OVERFLOW (t) = 1;
1945 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1946 TREE_CONSTANT_OVERFLOW (t) = 1;
1950 if (TREE_CODE (arg1) == COMPLEX_CST)
1952 tree type = TREE_TYPE (arg1);
1953 tree r1 = TREE_REALPART (arg1);
1954 tree i1 = TREE_IMAGPART (arg1);
1955 tree r2 = TREE_REALPART (arg2);
1956 tree i2 = TREE_IMAGPART (arg2);
1963 real = const_binop (code, r1, r2, notrunc);
1964 imag = const_binop (code, i1, i2, notrunc);
1968 real = const_binop (MINUS_EXPR,
1969 const_binop (MULT_EXPR, r1, r2, notrunc),
1970 const_binop (MULT_EXPR, i1, i2, notrunc),
1972 imag = const_binop (PLUS_EXPR,
1973 const_binop (MULT_EXPR, r1, i2, notrunc),
1974 const_binop (MULT_EXPR, i1, r2, notrunc),
1981 = const_binop (PLUS_EXPR,
1982 const_binop (MULT_EXPR, r2, r2, notrunc),
1983 const_binop (MULT_EXPR, i2, i2, notrunc),
1986 = const_binop (PLUS_EXPR,
1987 const_binop (MULT_EXPR, r1, r2, notrunc),
1988 const_binop (MULT_EXPR, i1, i2, notrunc),
1991 = const_binop (MINUS_EXPR,
1992 const_binop (MULT_EXPR, i1, r2, notrunc),
1993 const_binop (MULT_EXPR, r1, i2, notrunc),
1996 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1997 code = TRUNC_DIV_EXPR;
1999 real = const_binop (code, t1, magsquared, notrunc);
2000 imag = const_binop (code, t2, magsquared, notrunc);
2009 return build_complex (type, real, imag);
2015 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2016 indicates which particular sizetype to create. */
2019 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2021 return build_int_cst (sizetype_tab[(int) kind], number);
2024 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2025 is a tree code. The type of the result is taken from the operands.
2026 Both must be equivalent integer types, ala int_binop_types_match_p.
2027 If the operands are constant, so is the result. */
2030 size_binop (enum tree_code code, tree arg0, tree arg1)
2032 tree type = TREE_TYPE (arg0);
2034 if (arg0 == error_mark_node || arg1 == error_mark_node)
2035 return error_mark_node;
2037 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2040 /* Handle the special case of two integer constants faster. */
2041 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2043 /* And some specific cases even faster than that. */
2044 if (code == PLUS_EXPR)
2046 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2048 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2051 else if (code == MINUS_EXPR)
2053 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2056 else if (code == MULT_EXPR)
2058 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2062 /* Handle general case of two integer constants. */
2063 return int_const_binop (code, arg0, arg1, 0);
2066 return fold_build2 (code, type, arg0, arg1);
2069 /* Given two values, either both of sizetype or both of bitsizetype,
2070 compute the difference between the two values. Return the value
2071 in signed type corresponding to the type of the operands. */
2074 size_diffop (tree arg0, tree arg1)
2076 tree type = TREE_TYPE (arg0);
2079 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2082 /* If the type is already signed, just do the simple thing. */
2083 if (!TYPE_UNSIGNED (type))
2084 return size_binop (MINUS_EXPR, arg0, arg1);
2086 if (type == sizetype)
2088 else if (type == bitsizetype)
2089 ctype = sbitsizetype;
2091 ctype = signed_type_for (type);
2093 /* If either operand is not a constant, do the conversions to the signed
2094 type and subtract. The hardware will do the right thing with any
2095 overflow in the subtraction. */
2096 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2097 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2098 fold_convert (ctype, arg1));
2100 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2101 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2102 overflow) and negate (which can't either). Special-case a result
2103 of zero while we're here. */
2104 if (tree_int_cst_equal (arg0, arg1))
2105 return build_int_cst (ctype, 0);
2106 else if (tree_int_cst_lt (arg1, arg0))
2107 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2109 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2110 fold_convert (ctype, size_binop (MINUS_EXPR,
2114 /* A subroutine of fold_convert_const handling conversions of an
2115 INTEGER_CST to another integer type. */
2118 fold_convert_const_int_from_int (tree type, const_tree arg1)
2122 /* Given an integer constant, make new constant with new type,
2123 appropriately sign-extended or truncated. */
2124 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2125 TREE_INT_CST_HIGH (arg1),
2126 /* Don't set the overflow when
2127 converting from a pointer, */
2128 !POINTER_TYPE_P (TREE_TYPE (arg1))
2129 /* or to a sizetype with same signedness
2130 and the precision is unchanged.
2131 ??? sizetype is always sign-extended,
2132 but its signedness depends on the
2133 frontend. Thus we see spurious overflows
2134 here if we do not check this. */
2135 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2136 == TYPE_PRECISION (type))
2137 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2138 == TYPE_UNSIGNED (type))
2139 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2140 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2141 || (TREE_CODE (type) == INTEGER_TYPE
2142 && TYPE_IS_SIZETYPE (type)))),
2143 (TREE_INT_CST_HIGH (arg1) < 0
2144 && (TYPE_UNSIGNED (type)
2145 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2146 | TREE_OVERFLOW (arg1));
2151 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2152 to an integer type. */
2155 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2160 /* The following code implements the floating point to integer
2161 conversion rules required by the Java Language Specification,
2162 that IEEE NaNs are mapped to zero and values that overflow
2163 the target precision saturate, i.e. values greater than
2164 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2165 are mapped to INT_MIN. These semantics are allowed by the
2166 C and C++ standards that simply state that the behavior of
2167 FP-to-integer conversion is unspecified upon overflow. */
2169 HOST_WIDE_INT high, low;
2171 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2175 case FIX_TRUNC_EXPR:
2176 real_trunc (&r, VOIDmode, &x);
2183 /* If R is NaN, return zero and show we have an overflow. */
2184 if (REAL_VALUE_ISNAN (r))
2191 /* See if R is less than the lower bound or greater than the
2196 tree lt = TYPE_MIN_VALUE (type);
2197 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2198 if (REAL_VALUES_LESS (r, l))
2201 high = TREE_INT_CST_HIGH (lt);
2202 low = TREE_INT_CST_LOW (lt);
2208 tree ut = TYPE_MAX_VALUE (type);
2211 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2212 if (REAL_VALUES_LESS (u, r))
2215 high = TREE_INT_CST_HIGH (ut);
2216 low = TREE_INT_CST_LOW (ut);
2222 REAL_VALUE_TO_INT (&low, &high, r);
2224 t = force_fit_type_double (type, low, high, -1,
2225 overflow | TREE_OVERFLOW (arg1));
2229 /* A subroutine of fold_convert_const handling conversions of a
2230 FIXED_CST to an integer type. */
2233 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2236 double_int temp, temp_trunc;
2239 /* Right shift FIXED_CST to temp by fbit. */
2240 temp = TREE_FIXED_CST (arg1).data;
2241 mode = TREE_FIXED_CST (arg1).mode;
2242 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2244 lshift_double (temp.low, temp.high,
2245 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2246 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2248 /* Left shift temp to temp_trunc by fbit. */
2249 lshift_double (temp.low, temp.high,
2250 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2251 &temp_trunc.low, &temp_trunc.high,
2252 SIGNED_FIXED_POINT_MODE_P (mode));
2259 temp_trunc.high = 0;
2262 /* If FIXED_CST is negative, we need to round the value toward 0.
2263 By checking if the fractional bits are not zero to add 1 to temp. */
2264 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2265 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2270 temp = double_int_add (temp, one);
2273 /* Given a fixed-point constant, make new constant with new type,
2274 appropriately sign-extended or truncated. */
2275 t = force_fit_type_double (type, temp.low, temp.high, -1,
2277 && (TYPE_UNSIGNED (type)
2278 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2279 | TREE_OVERFLOW (arg1));
2284 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2285 to another floating point type. */
2288 fold_convert_const_real_from_real (tree type, const_tree arg1)
2290 REAL_VALUE_TYPE value;
2293 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2294 t = build_real (type, value);
2296 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2300 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2301 to a floating point type. */
2304 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2306 REAL_VALUE_TYPE value;
2309 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2310 t = build_real (type, value);
2312 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2313 TREE_CONSTANT_OVERFLOW (t)
2314 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2318 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2319 to another fixed-point type. */
2322 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2324 FIXED_VALUE_TYPE value;
2328 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2329 TYPE_SATURATING (type));
2330 t = build_fixed (type, value);
2332 /* Propagate overflow flags. */
2333 if (overflow_p | TREE_OVERFLOW (arg1))
2335 TREE_OVERFLOW (t) = 1;
2336 TREE_CONSTANT_OVERFLOW (t) = 1;
2338 else if (TREE_CONSTANT_OVERFLOW (arg1))
2339 TREE_CONSTANT_OVERFLOW (t) = 1;
2343 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2344 to a fixed-point type. */
2347 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2349 FIXED_VALUE_TYPE value;
2353 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2354 TREE_INT_CST (arg1),
2355 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2356 TYPE_SATURATING (type));
2357 t = build_fixed (type, value);
2359 /* Propagate overflow flags. */
2360 if (overflow_p | TREE_OVERFLOW (arg1))
2362 TREE_OVERFLOW (t) = 1;
2363 TREE_CONSTANT_OVERFLOW (t) = 1;
2365 else if (TREE_CONSTANT_OVERFLOW (arg1))
2366 TREE_CONSTANT_OVERFLOW (t) = 1;
2370 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2371 to a fixed-point type. */
2374 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2376 FIXED_VALUE_TYPE value;
2380 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2381 &TREE_REAL_CST (arg1),
2382 TYPE_SATURATING (type));
2383 t = build_fixed (type, value);
2385 /* Propagate overflow flags. */
2386 if (overflow_p | TREE_OVERFLOW (arg1))
2388 TREE_OVERFLOW (t) = 1;
2389 TREE_CONSTANT_OVERFLOW (t) = 1;
2391 else if (TREE_CONSTANT_OVERFLOW (arg1))
2392 TREE_CONSTANT_OVERFLOW (t) = 1;
2396 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2397 type TYPE. If no simplification can be done return NULL_TREE. */
2400 fold_convert_const (enum tree_code code, tree type, tree arg1)
2402 if (TREE_TYPE (arg1) == type)
2405 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2406 || TREE_CODE (type) == OFFSET_TYPE)
2408 if (TREE_CODE (arg1) == INTEGER_CST)
2409 return fold_convert_const_int_from_int (type, arg1);
2410 else if (TREE_CODE (arg1) == REAL_CST)
2411 return fold_convert_const_int_from_real (code, type, arg1);
2412 else if (TREE_CODE (arg1) == FIXED_CST)
2413 return fold_convert_const_int_from_fixed (type, arg1);
2415 else if (TREE_CODE (type) == REAL_TYPE)
2417 if (TREE_CODE (arg1) == INTEGER_CST)
2418 return build_real_from_int_cst (type, arg1);
2419 else if (TREE_CODE (arg1) == REAL_CST)
2420 return fold_convert_const_real_from_real (type, arg1);
2421 else if (TREE_CODE (arg1) == FIXED_CST)
2422 return fold_convert_const_real_from_fixed (type, arg1);
2424 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2426 if (TREE_CODE (arg1) == FIXED_CST)
2427 return fold_convert_const_fixed_from_fixed (type, arg1);
2428 else if (TREE_CODE (arg1) == INTEGER_CST)
2429 return fold_convert_const_fixed_from_int (type, arg1);
2430 else if (TREE_CODE (arg1) == REAL_CST)
2431 return fold_convert_const_fixed_from_real (type, arg1);
2436 /* Construct a vector of zero elements of vector type TYPE. */
2439 build_zero_vector (tree type)
2444 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2445 units = TYPE_VECTOR_SUBPARTS (type);
2448 for (i = 0; i < units; i++)
2449 list = tree_cons (NULL_TREE, elem, list);
2450 return build_vector (type, list);
2453 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2456 fold_convertible_p (const_tree type, const_tree arg)
2458 tree orig = TREE_TYPE (arg);
2463 if (TREE_CODE (arg) == ERROR_MARK
2464 || TREE_CODE (type) == ERROR_MARK
2465 || TREE_CODE (orig) == ERROR_MARK)
2468 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2471 switch (TREE_CODE (type))
2473 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2474 case POINTER_TYPE: case REFERENCE_TYPE:
2476 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2477 || TREE_CODE (orig) == OFFSET_TYPE)
2479 return (TREE_CODE (orig) == VECTOR_TYPE
2480 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2483 case FIXED_POINT_TYPE:
2487 return TREE_CODE (type) == TREE_CODE (orig);
2494 /* Convert expression ARG to type TYPE. Used by the middle-end for
2495 simple conversions in preference to calling the front-end's convert. */
2498 fold_convert (tree type, tree arg)
2500 tree orig = TREE_TYPE (arg);
2506 if (TREE_CODE (arg) == ERROR_MARK
2507 || TREE_CODE (type) == ERROR_MARK
2508 || TREE_CODE (orig) == ERROR_MARK)
2509 return error_mark_node;
2511 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2512 return fold_build1 (NOP_EXPR, type, arg);
2514 switch (TREE_CODE (type))
2516 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2517 case POINTER_TYPE: case REFERENCE_TYPE:
2519 if (TREE_CODE (arg) == INTEGER_CST)
2521 tem = fold_convert_const (NOP_EXPR, type, arg);
2522 if (tem != NULL_TREE)
2525 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2526 || TREE_CODE (orig) == OFFSET_TYPE)
2527 return fold_build1 (NOP_EXPR, type, arg);
2528 if (TREE_CODE (orig) == COMPLEX_TYPE)
2530 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2531 return fold_convert (type, tem);
2533 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2534 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2535 return fold_build1 (NOP_EXPR, type, arg);
2538 if (TREE_CODE (arg) == INTEGER_CST)
2540 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2541 if (tem != NULL_TREE)
2544 else if (TREE_CODE (arg) == REAL_CST)
2546 tem = fold_convert_const (NOP_EXPR, type, arg);
2547 if (tem != NULL_TREE)
2550 else if (TREE_CODE (arg) == FIXED_CST)
2552 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2553 if (tem != NULL_TREE)
2557 switch (TREE_CODE (orig))
2560 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2561 case POINTER_TYPE: case REFERENCE_TYPE:
2562 return fold_build1 (FLOAT_EXPR, type, arg);
2565 return fold_build1 (NOP_EXPR, type, arg);
2567 case FIXED_POINT_TYPE:
2568 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2571 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2572 return fold_convert (type, tem);
2578 case FIXED_POINT_TYPE:
2579 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2580 || TREE_CODE (arg) == REAL_CST)
2582 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2583 if (tem != NULL_TREE)
2587 switch (TREE_CODE (orig))
2589 case FIXED_POINT_TYPE:
2594 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2597 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2598 return fold_convert (type, tem);
2605 switch (TREE_CODE (orig))
2608 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2609 case POINTER_TYPE: case REFERENCE_TYPE:
2611 case FIXED_POINT_TYPE:
2612 return fold_build2 (COMPLEX_EXPR, type,
2613 fold_convert (TREE_TYPE (type), arg),
2614 fold_convert (TREE_TYPE (type),
2615 integer_zero_node));
2620 if (TREE_CODE (arg) == COMPLEX_EXPR)
2622 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2623 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2624 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2627 arg = save_expr (arg);
2628 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2629 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2630 rpart = fold_convert (TREE_TYPE (type), rpart);
2631 ipart = fold_convert (TREE_TYPE (type), ipart);
2632 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2640 if (integer_zerop (arg))
2641 return build_zero_vector (type);
2642 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2643 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2644 || TREE_CODE (orig) == VECTOR_TYPE);
2645 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2648 tem = fold_ignored_result (arg);
2649 if (TREE_CODE (tem) == MODIFY_EXPR)
2651 return fold_build1 (NOP_EXPR, type, tem);
2658 /* Return false if expr can be assumed not to be an lvalue, true
2662 maybe_lvalue_p (const_tree x)
2664 /* We only need to wrap lvalue tree codes. */
2665 switch (TREE_CODE (x))
2676 case ALIGN_INDIRECT_REF:
2677 case MISALIGNED_INDIRECT_REF:
2679 case ARRAY_RANGE_REF:
2685 case PREINCREMENT_EXPR:
2686 case PREDECREMENT_EXPR:
2688 case TRY_CATCH_EXPR:
2689 case WITH_CLEANUP_EXPR:
2700 /* Assume the worst for front-end tree codes. */
2701 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2709 /* Return an expr equal to X but certainly not valid as an lvalue. */
2714 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2719 if (! maybe_lvalue_p (x))
2721 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2724 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2725 Zero means allow extended lvalues. */
2727 int pedantic_lvalues;
2729 /* When pedantic, return an expr equal to X but certainly not valid as a
2730 pedantic lvalue. Otherwise, return X. */
2733 pedantic_non_lvalue (tree x)
2735 if (pedantic_lvalues)
2736 return non_lvalue (x);
2741 /* Given a tree comparison code, return the code that is the logical inverse
2742 of the given code. It is not safe to do this for floating-point
2743 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2744 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2747 invert_tree_comparison (enum tree_code code, bool honor_nans)
2749 if (honor_nans && flag_trapping_math)
2759 return honor_nans ? UNLE_EXPR : LE_EXPR;
2761 return honor_nans ? UNLT_EXPR : LT_EXPR;
2763 return honor_nans ? UNGE_EXPR : GE_EXPR;
2765 return honor_nans ? UNGT_EXPR : GT_EXPR;
2779 return UNORDERED_EXPR;
2780 case UNORDERED_EXPR:
2781 return ORDERED_EXPR;
2787 /* Similar, but return the comparison that results if the operands are
2788 swapped. This is safe for floating-point. */
2791 swap_tree_comparison (enum tree_code code)
2798 case UNORDERED_EXPR:
2824 /* Convert a comparison tree code from an enum tree_code representation
2825 into a compcode bit-based encoding. This function is the inverse of
2826 compcode_to_comparison. */
2828 static enum comparison_code
2829 comparison_to_compcode (enum tree_code code)
2846 return COMPCODE_ORD;
2847 case UNORDERED_EXPR:
2848 return COMPCODE_UNORD;
2850 return COMPCODE_UNLT;
2852 return COMPCODE_UNEQ;
2854 return COMPCODE_UNLE;
2856 return COMPCODE_UNGT;
2858 return COMPCODE_LTGT;
2860 return COMPCODE_UNGE;
2866 /* Convert a compcode bit-based encoding of a comparison operator back
2867 to GCC's enum tree_code representation. This function is the
2868 inverse of comparison_to_compcode. */
2870 static enum tree_code
2871 compcode_to_comparison (enum comparison_code code)
2888 return ORDERED_EXPR;
2889 case COMPCODE_UNORD:
2890 return UNORDERED_EXPR;
2908 /* Return a tree for the comparison which is the combination of
2909 doing the AND or OR (depending on CODE) of the two operations LCODE
2910 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2911 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2912 if this makes the transformation invalid. */
2915 combine_comparisons (enum tree_code code, enum tree_code lcode,
2916 enum tree_code rcode, tree truth_type,
2917 tree ll_arg, tree lr_arg)
2919 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2920 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2921 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2922 enum comparison_code compcode;
2926 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2927 compcode = lcompcode & rcompcode;
2930 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2931 compcode = lcompcode | rcompcode;
2940 /* Eliminate unordered comparisons, as well as LTGT and ORD
2941 which are not used unless the mode has NaNs. */
2942 compcode &= ~COMPCODE_UNORD;
2943 if (compcode == COMPCODE_LTGT)
2944 compcode = COMPCODE_NE;
2945 else if (compcode == COMPCODE_ORD)
2946 compcode = COMPCODE_TRUE;
2948 else if (flag_trapping_math)
2950 /* Check that the original operation and the optimized ones will trap
2951 under the same condition. */
2952 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2953 && (lcompcode != COMPCODE_EQ)
2954 && (lcompcode != COMPCODE_ORD);
2955 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2956 && (rcompcode != COMPCODE_EQ)
2957 && (rcompcode != COMPCODE_ORD);
2958 bool trap = (compcode & COMPCODE_UNORD) == 0
2959 && (compcode != COMPCODE_EQ)
2960 && (compcode != COMPCODE_ORD);
2962 /* In a short-circuited boolean expression the LHS might be
2963 such that the RHS, if evaluated, will never trap. For
2964 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2965 if neither x nor y is NaN. (This is a mixed blessing: for
2966 example, the expression above will never trap, hence
2967 optimizing it to x < y would be invalid). */
2968 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2969 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2972 /* If the comparison was short-circuited, and only the RHS
2973 trapped, we may now generate a spurious trap. */
2975 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2978 /* If we changed the conditions that cause a trap, we lose. */
2979 if ((ltrap || rtrap) != trap)
2983 if (compcode == COMPCODE_TRUE)
2984 return constant_boolean_node (true, truth_type);
2985 else if (compcode == COMPCODE_FALSE)
2986 return constant_boolean_node (false, truth_type);
2988 return fold_build2 (compcode_to_comparison (compcode),
2989 truth_type, ll_arg, lr_arg);
2992 /* Return nonzero if two operands (typically of the same tree node)
2993 are necessarily equal. If either argument has side-effects this
2994 function returns zero. FLAGS modifies behavior as follows:
2996 If OEP_ONLY_CONST is set, only return nonzero for constants.
2997 This function tests whether the operands are indistinguishable;
2998 it does not test whether they are equal using C's == operation.
2999 The distinction is important for IEEE floating point, because
3000 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3001 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3003 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3004 even though it may hold multiple values during a function.
3005 This is because a GCC tree node guarantees that nothing else is
3006 executed between the evaluation of its "operands" (which may often
3007 be evaluated in arbitrary order). Hence if the operands themselves
3008 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3009 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3010 unset means assuming isochronic (or instantaneous) tree equivalence.
3011 Unless comparing arbitrary expression trees, such as from different
3012 statements, this flag can usually be left unset.
3014 If OEP_PURE_SAME is set, then pure functions with identical arguments
3015 are considered the same. It is used when the caller has other ways
3016 to ensure that global memory is unchanged in between. */
3019 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3021 /* If either is ERROR_MARK, they aren't equal. */
3022 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3025 /* Check equality of integer constants before bailing out due to
3026 precision differences. */
3027 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3028 return tree_int_cst_equal (arg0, arg1);
3030 /* If both types don't have the same signedness, then we can't consider
3031 them equal. We must check this before the STRIP_NOPS calls
3032 because they may change the signedness of the arguments. As pointers
3033 strictly don't have a signedness, require either two pointers or
3034 two non-pointers as well. */
3035 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3036 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3039 /* If both types don't have the same precision, then it is not safe
3041 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3047 /* In case both args are comparisons but with different comparison
3048 code, try to swap the comparison operands of one arg to produce
3049 a match and compare that variant. */
3050 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3051 && COMPARISON_CLASS_P (arg0)
3052 && COMPARISON_CLASS_P (arg1))
3054 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3056 if (TREE_CODE (arg0) == swap_code)
3057 return operand_equal_p (TREE_OPERAND (arg0, 0),
3058 TREE_OPERAND (arg1, 1), flags)
3059 && operand_equal_p (TREE_OPERAND (arg0, 1),
3060 TREE_OPERAND (arg1, 0), flags);
3063 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3064 /* This is needed for conversions and for COMPONENT_REF.
3065 Might as well play it safe and always test this. */
3066 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3067 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3068 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3071 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3072 We don't care about side effects in that case because the SAVE_EXPR
3073 takes care of that for us. In all other cases, two expressions are
3074 equal if they have no side effects. If we have two identical
3075 expressions with side effects that should be treated the same due
3076 to the only side effects being identical SAVE_EXPR's, that will
3077 be detected in the recursive calls below. */
3078 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3079 && (TREE_CODE (arg0) == SAVE_EXPR
3080 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3083 /* Next handle constant cases, those for which we can return 1 even
3084 if ONLY_CONST is set. */
3085 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3086 switch (TREE_CODE (arg0))
3089 return tree_int_cst_equal (arg0, arg1);
3092 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3093 TREE_FIXED_CST (arg1));
3096 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3097 TREE_REAL_CST (arg1)))
3101 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3103 /* If we do not distinguish between signed and unsigned zero,
3104 consider them equal. */
3105 if (real_zerop (arg0) && real_zerop (arg1))
3114 v1 = TREE_VECTOR_CST_ELTS (arg0);
3115 v2 = TREE_VECTOR_CST_ELTS (arg1);
3118 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3121 v1 = TREE_CHAIN (v1);
3122 v2 = TREE_CHAIN (v2);
3129 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3131 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3135 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3136 && ! memcmp (TREE_STRING_POINTER (arg0),
3137 TREE_STRING_POINTER (arg1),
3138 TREE_STRING_LENGTH (arg0)));
3141 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3147 if (flags & OEP_ONLY_CONST)
3150 /* Define macros to test an operand from arg0 and arg1 for equality and a
3151 variant that allows null and views null as being different from any
3152 non-null value. In the latter case, if either is null, the both
3153 must be; otherwise, do the normal comparison. */
3154 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3155 TREE_OPERAND (arg1, N), flags)
3157 #define OP_SAME_WITH_NULL(N) \
3158 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3159 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3161 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3164 /* Two conversions are equal only if signedness and modes match. */
3165 switch (TREE_CODE (arg0))
3168 case FIX_TRUNC_EXPR:
3169 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3170 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3180 case tcc_comparison:
3182 if (OP_SAME (0) && OP_SAME (1))
3185 /* For commutative ops, allow the other order. */
3186 return (commutative_tree_code (TREE_CODE (arg0))
3187 && operand_equal_p (TREE_OPERAND (arg0, 0),
3188 TREE_OPERAND (arg1, 1), flags)
3189 && operand_equal_p (TREE_OPERAND (arg0, 1),
3190 TREE_OPERAND (arg1, 0), flags));
3193 /* If either of the pointer (or reference) expressions we are
3194 dereferencing contain a side effect, these cannot be equal. */
3195 if (TREE_SIDE_EFFECTS (arg0)
3196 || TREE_SIDE_EFFECTS (arg1))
3199 switch (TREE_CODE (arg0))
3202 case ALIGN_INDIRECT_REF:
3203 case MISALIGNED_INDIRECT_REF:
3209 case ARRAY_RANGE_REF:
3210 /* Operands 2 and 3 may be null.
3211 Compare the array index by value if it is constant first as we
3212 may have different types but same value here. */
3214 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3215 TREE_OPERAND (arg1, 1))
3217 && OP_SAME_WITH_NULL (2)
3218 && OP_SAME_WITH_NULL (3));
3221 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3222 may be NULL when we're called to compare MEM_EXPRs. */
3223 return OP_SAME_WITH_NULL (0)
3225 && OP_SAME_WITH_NULL (2);
3228 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3234 case tcc_expression:
3235 switch (TREE_CODE (arg0))
3238 case TRUTH_NOT_EXPR:
3241 case TRUTH_ANDIF_EXPR:
3242 case TRUTH_ORIF_EXPR:
3243 return OP_SAME (0) && OP_SAME (1);
3245 case TRUTH_AND_EXPR:
3247 case TRUTH_XOR_EXPR:
3248 if (OP_SAME (0) && OP_SAME (1))
3251 /* Otherwise take into account this is a commutative operation. */
3252 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3253 TREE_OPERAND (arg1, 1), flags)
3254 && operand_equal_p (TREE_OPERAND (arg0, 1),
3255 TREE_OPERAND (arg1, 0), flags));
3258 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3265 switch (TREE_CODE (arg0))
3268 /* If the CALL_EXPRs call different functions, then they
3269 clearly can not be equal. */
3270 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3275 unsigned int cef = call_expr_flags (arg0);
3276 if (flags & OEP_PURE_SAME)
3277 cef &= ECF_CONST | ECF_PURE;
3284 /* Now see if all the arguments are the same. */
3286 const_call_expr_arg_iterator iter0, iter1;
3288 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3289 a1 = first_const_call_expr_arg (arg1, &iter1);
3291 a0 = next_const_call_expr_arg (&iter0),
3292 a1 = next_const_call_expr_arg (&iter1))
3293 if (! operand_equal_p (a0, a1, flags))
3296 /* If we get here and both argument lists are exhausted
3297 then the CALL_EXPRs are equal. */
3298 return ! (a0 || a1);
3304 case tcc_declaration:
3305 /* Consider __builtin_sqrt equal to sqrt. */
3306 return (TREE_CODE (arg0) == FUNCTION_DECL
3307 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3308 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3309 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3316 #undef OP_SAME_WITH_NULL
3319 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3320 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3322 When in doubt, return 0. */
3325 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3327 int unsignedp1, unsignedpo;
3328 tree primarg0, primarg1, primother;
3329 unsigned int correct_width;
3331 if (operand_equal_p (arg0, arg1, 0))
3334 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3335 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3338 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3339 and see if the inner values are the same. This removes any
3340 signedness comparison, which doesn't matter here. */
3341 primarg0 = arg0, primarg1 = arg1;
3342 STRIP_NOPS (primarg0);
3343 STRIP_NOPS (primarg1);
3344 if (operand_equal_p (primarg0, primarg1, 0))
3347 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3348 actual comparison operand, ARG0.
3350 First throw away any conversions to wider types
3351 already present in the operands. */
3353 primarg1 = get_narrower (arg1, &unsignedp1);
3354 primother = get_narrower (other, &unsignedpo);
3356 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3357 if (unsignedp1 == unsignedpo
3358 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3359 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3361 tree type = TREE_TYPE (arg0);
3363 /* Make sure shorter operand is extended the right way
3364 to match the longer operand. */
3365 primarg1 = fold_convert (signed_or_unsigned_type_for
3366 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3368 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3375 /* See if ARG is an expression that is either a comparison or is performing
3376 arithmetic on comparisons. The comparisons must only be comparing
3377 two different values, which will be stored in *CVAL1 and *CVAL2; if
3378 they are nonzero it means that some operands have already been found.
3379 No variables may be used anywhere else in the expression except in the
3380 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3381 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3383 If this is true, return 1. Otherwise, return zero. */
3386 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3388 enum tree_code code = TREE_CODE (arg);
3389 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3391 /* We can handle some of the tcc_expression cases here. */
3392 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3394 else if (tclass == tcc_expression
3395 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3396 || code == COMPOUND_EXPR))
3397 tclass = tcc_binary;
3399 else if (tclass == tcc_expression && code == SAVE_EXPR
3400 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3402 /* If we've already found a CVAL1 or CVAL2, this expression is
3403 two complex to handle. */
3404 if (*cval1 || *cval2)
3414 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3417 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3418 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3419 cval1, cval2, save_p));
3424 case tcc_expression:
3425 if (code == COND_EXPR)
3426 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3427 cval1, cval2, save_p)
3428 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3429 cval1, cval2, save_p)
3430 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3431 cval1, cval2, save_p));
3434 case tcc_comparison:
3435 /* First see if we can handle the first operand, then the second. For
3436 the second operand, we know *CVAL1 can't be zero. It must be that
3437 one side of the comparison is each of the values; test for the
3438 case where this isn't true by failing if the two operands
3441 if (operand_equal_p (TREE_OPERAND (arg, 0),
3442 TREE_OPERAND (arg, 1), 0))
3446 *cval1 = TREE_OPERAND (arg, 0);
3447 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3449 else if (*cval2 == 0)
3450 *cval2 = TREE_OPERAND (arg, 0);
3451 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3456 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3458 else if (*cval2 == 0)
3459 *cval2 = TREE_OPERAND (arg, 1);
3460 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3472 /* ARG is a tree that is known to contain just arithmetic operations and
3473 comparisons. Evaluate the operations in the tree substituting NEW0 for
3474 any occurrence of OLD0 as an operand of a comparison and likewise for
3478 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3480 tree type = TREE_TYPE (arg);
3481 enum tree_code code = TREE_CODE (arg);
3482 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3484 /* We can handle some of the tcc_expression cases here. */
3485 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3487 else if (tclass == tcc_expression
3488 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3489 tclass = tcc_binary;
3494 return fold_build1 (code, type,
3495 eval_subst (TREE_OPERAND (arg, 0),
3496 old0, new0, old1, new1));
3499 return fold_build2 (code, type,
3500 eval_subst (TREE_OPERAND (arg, 0),
3501 old0, new0, old1, new1),
3502 eval_subst (TREE_OPERAND (arg, 1),
3503 old0, new0, old1, new1));
3505 case tcc_expression:
3509 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3512 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3515 return fold_build3 (code, type,
3516 eval_subst (TREE_OPERAND (arg, 0),
3517 old0, new0, old1, new1),
3518 eval_subst (TREE_OPERAND (arg, 1),
3519 old0, new0, old1, new1),
3520 eval_subst (TREE_OPERAND (arg, 2),
3521 old0, new0, old1, new1));
3525 /* Fall through - ??? */
3527 case tcc_comparison:
3529 tree arg0 = TREE_OPERAND (arg, 0);
3530 tree arg1 = TREE_OPERAND (arg, 1);
3532 /* We need to check both for exact equality and tree equality. The
3533 former will be true if the operand has a side-effect. In that
3534 case, we know the operand occurred exactly once. */
3536 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3538 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3541 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3543 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3546 return fold_build2 (code, type, arg0, arg1);
3554 /* Return a tree for the case when the result of an expression is RESULT
3555 converted to TYPE and OMITTED was previously an operand of the expression
3556 but is now not needed (e.g., we folded OMITTED * 0).
3558 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3559 the conversion of RESULT to TYPE. */
3562 omit_one_operand (tree type, tree result, tree omitted)
3564 tree t = fold_convert (type, result);
3566 /* If the resulting operand is an empty statement, just return the omitted
3567 statement casted to void. */
3568 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3569 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3571 if (TREE_SIDE_EFFECTS (omitted))
3572 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3574 return non_lvalue (t);
3577 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3580 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3582 tree t = fold_convert (type, result);
3584 /* If the resulting operand is an empty statement, just return the omitted
3585 statement casted to void. */
3586 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3587 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3589 if (TREE_SIDE_EFFECTS (omitted))
3590 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3592 return pedantic_non_lvalue (t);
3595 /* Return a tree for the case when the result of an expression is RESULT
3596 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3597 of the expression but are now not needed.
3599 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3600 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3601 evaluated before OMITTED2. Otherwise, if neither has side effects,
3602 just do the conversion of RESULT to TYPE. */
3605 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3607 tree t = fold_convert (type, result);
3609 if (TREE_SIDE_EFFECTS (omitted2))
3610 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3611 if (TREE_SIDE_EFFECTS (omitted1))
3612 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3614 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3618 /* Return a simplified tree node for the truth-negation of ARG. This
3619 never alters ARG itself. We assume that ARG is an operation that
3620 returns a truth value (0 or 1).
3622 FIXME: one would think we would fold the result, but it causes
3623 problems with the dominator optimizer. */
3626 fold_truth_not_expr (tree arg)
3628 tree type = TREE_TYPE (arg);
3629 enum tree_code code = TREE_CODE (arg);
3631 /* If this is a comparison, we can simply invert it, except for
3632 floating-point non-equality comparisons, in which case we just
3633 enclose a TRUTH_NOT_EXPR around what we have. */
3635 if (TREE_CODE_CLASS (code) == tcc_comparison)
3637 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3638 if (FLOAT_TYPE_P (op_type)
3639 && flag_trapping_math
3640 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3641 && code != NE_EXPR && code != EQ_EXPR)
3645 code = invert_tree_comparison (code,
3646 HONOR_NANS (TYPE_MODE (op_type)));
3647 if (code == ERROR_MARK)
3650 return build2 (code, type,
3651 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3658 return constant_boolean_node (integer_zerop (arg), type);
3660 case TRUTH_AND_EXPR:
3661 return build2 (TRUTH_OR_EXPR, type,
3662 invert_truthvalue (TREE_OPERAND (arg, 0)),
3663 invert_truthvalue (TREE_OPERAND (arg, 1)));
3666 return build2 (TRUTH_AND_EXPR, type,
3667 invert_truthvalue (TREE_OPERAND (arg, 0)),
3668 invert_truthvalue (TREE_OPERAND (arg, 1)));
3670 case TRUTH_XOR_EXPR:
3671 /* Here we can invert either operand. We invert the first operand
3672 unless the second operand is a TRUTH_NOT_EXPR in which case our
3673 result is the XOR of the first operand with the inside of the
3674 negation of the second operand. */
3676 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3677 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3678 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3680 return build2 (TRUTH_XOR_EXPR, type,
3681 invert_truthvalue (TREE_OPERAND (arg, 0)),
3682 TREE_OPERAND (arg, 1));
3684 case TRUTH_ANDIF_EXPR:
3685 return build2 (TRUTH_ORIF_EXPR, type,
3686 invert_truthvalue (TREE_OPERAND (arg, 0)),
3687 invert_truthvalue (TREE_OPERAND (arg, 1)));
3689 case TRUTH_ORIF_EXPR:
3690 return build2 (TRUTH_ANDIF_EXPR, type,
3691 invert_truthvalue (TREE_OPERAND (arg, 0)),
3692 invert_truthvalue (TREE_OPERAND (arg, 1)));
3694 case TRUTH_NOT_EXPR:
3695 return TREE_OPERAND (arg, 0);
3699 tree arg1 = TREE_OPERAND (arg, 1);
3700 tree arg2 = TREE_OPERAND (arg, 2);
3701 /* A COND_EXPR may have a throw as one operand, which
3702 then has void type. Just leave void operands
3704 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3705 VOID_TYPE_P (TREE_TYPE (arg1))
3706 ? arg1 : invert_truthvalue (arg1),
3707 VOID_TYPE_P (TREE_TYPE (arg2))
3708 ? arg2 : invert_truthvalue (arg2));
3712 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3713 invert_truthvalue (TREE_OPERAND (arg, 1)));
3715 case NON_LVALUE_EXPR:
3716 return invert_truthvalue (TREE_OPERAND (arg, 0));
3719 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3720 return build1 (TRUTH_NOT_EXPR, type, arg);
3724 return build1 (TREE_CODE (arg), type,
3725 invert_truthvalue (TREE_OPERAND (arg, 0)));
3728 if (!integer_onep (TREE_OPERAND (arg, 1)))
3730 return build2 (EQ_EXPR, type, arg,
3731 build_int_cst (type, 0));
3734 return build1 (TRUTH_NOT_EXPR, type, arg);
3736 case CLEANUP_POINT_EXPR:
3737 return build1 (CLEANUP_POINT_EXPR, type,
3738 invert_truthvalue (TREE_OPERAND (arg, 0)));
3747 /* Return a simplified tree node for the truth-negation of ARG. This
3748 never alters ARG itself. We assume that ARG is an operation that
3749 returns a truth value (0 or 1).
3751 FIXME: one would think we would fold the result, but it causes
3752 problems with the dominator optimizer. */
3755 invert_truthvalue (tree arg)
3759 if (TREE_CODE (arg) == ERROR_MARK)
3762 tem = fold_truth_not_expr (arg);
3764 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3769 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3770 operands are another bit-wise operation with a common input. If so,
3771 distribute the bit operations to save an operation and possibly two if
3772 constants are involved. For example, convert
3773 (A | B) & (A | C) into A | (B & C)
3774 Further simplification will occur if B and C are constants.
3776 If this optimization cannot be done, 0 will be returned. */
3779 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3784 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3785 || TREE_CODE (arg0) == code
3786 || (TREE_CODE (arg0) != BIT_AND_EXPR
3787 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3790 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3792 common = TREE_OPERAND (arg0, 0);
3793 left = TREE_OPERAND (arg0, 1);
3794 right = TREE_OPERAND (arg1, 1);
3796 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3798 common = TREE_OPERAND (arg0, 0);
3799 left = TREE_OPERAND (arg0, 1);
3800 right = TREE_OPERAND (arg1, 0);
3802 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3804 common = TREE_OPERAND (arg0, 1);
3805 left = TREE_OPERAND (arg0, 0);
3806 right = TREE_OPERAND (arg1, 1);
3808 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3810 common = TREE_OPERAND (arg0, 1);
3811 left = TREE_OPERAND (arg0, 0);
3812 right = TREE_OPERAND (arg1, 0);
3817 common = fold_convert (type, common);
3818 left = fold_convert (type, left);
3819 right = fold_convert (type, right);
3820 return fold_build2 (TREE_CODE (arg0), type, common,
3821 fold_build2 (code, type, left, right));
3824 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3825 with code CODE. This optimization is unsafe. */
3827 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3829 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3830 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3832 /* (A / C) +- (B / C) -> (A +- B) / C. */
3834 && operand_equal_p (TREE_OPERAND (arg0, 1),
3835 TREE_OPERAND (arg1, 1), 0))
3836 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3837 fold_build2 (code, type,
3838 TREE_OPERAND (arg0, 0),
3839 TREE_OPERAND (arg1, 0)),
3840 TREE_OPERAND (arg0, 1));
3842 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3843 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3844 TREE_OPERAND (arg1, 0), 0)
3845 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3846 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3848 REAL_VALUE_TYPE r0, r1;
3849 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3850 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3852 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3854 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3855 real_arithmetic (&r0, code, &r0, &r1);
3856 return fold_build2 (MULT_EXPR, type,
3857 TREE_OPERAND (arg0, 0),
3858 build_real (type, r0));
3864 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3865 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3868 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3869 HOST_WIDE_INT bitpos, int unsignedp)
3871 tree result, bftype;
3875 tree size = TYPE_SIZE (TREE_TYPE (inner));
3876 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3877 || POINTER_TYPE_P (TREE_TYPE (inner)))
3878 && host_integerp (size, 0)
3879 && tree_low_cst (size, 0) == bitsize)
3880 return fold_convert (type, inner);
3884 if (TYPE_PRECISION (bftype) != bitsize
3885 || TYPE_UNSIGNED (bftype) == !unsignedp)
3886 bftype = build_nonstandard_integer_type (bitsize, 0);
3888 result = build3 (BIT_FIELD_REF, bftype, inner,
3889 size_int (bitsize), bitsize_int (bitpos));
3892 result = fold_convert (type, result);
3897 /* Optimize a bit-field compare.
3899 There are two cases: First is a compare against a constant and the
3900 second is a comparison of two items where the fields are at the same
3901 bit position relative to the start of a chunk (byte, halfword, word)
3902 large enough to contain it. In these cases we can avoid the shift
3903 implicit in bitfield extractions.
3905 For constants, we emit a compare of the shifted constant with the
3906 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3907 compared. For two fields at the same position, we do the ANDs with the
3908 similar mask and compare the result of the ANDs.
3910 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3911 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3912 are the left and right operands of the comparison, respectively.
3914 If the optimization described above can be done, we return the resulting
3915 tree. Otherwise we return zero. */
3918 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3921 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3922 tree type = TREE_TYPE (lhs);
3923 tree signed_type, unsigned_type;
3924 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3925 enum machine_mode lmode, rmode, nmode;
3926 int lunsignedp, runsignedp;
3927 int lvolatilep = 0, rvolatilep = 0;
3928 tree linner, rinner = NULL_TREE;
3932 /* Get all the information about the extractions being done. If the bit size
3933 if the same as the size of the underlying object, we aren't doing an
3934 extraction at all and so can do nothing. We also don't want to
3935 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3936 then will no longer be able to replace it. */
3937 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3938 &lunsignedp, &lvolatilep, false);
3939 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3940 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3945 /* If this is not a constant, we can only do something if bit positions,
3946 sizes, and signedness are the same. */
3947 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3948 &runsignedp, &rvolatilep, false);
3950 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3951 || lunsignedp != runsignedp || offset != 0
3952 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3956 /* See if we can find a mode to refer to this field. We should be able to,
3957 but fail if we can't. */
3958 nmode = get_best_mode (lbitsize, lbitpos,
3959 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3960 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3961 TYPE_ALIGN (TREE_TYPE (rinner))),
3962 word_mode, lvolatilep || rvolatilep);
3963 if (nmode == VOIDmode)
3966 /* Set signed and unsigned types of the precision of this mode for the
3968 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3969 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3971 /* Compute the bit position and size for the new reference and our offset
3972 within it. If the new reference is the same size as the original, we
3973 won't optimize anything, so return zero. */
3974 nbitsize = GET_MODE_BITSIZE (nmode);
3975 nbitpos = lbitpos & ~ (nbitsize - 1);
3977 if (nbitsize == lbitsize)
3980 if (BYTES_BIG_ENDIAN)
3981 lbitpos = nbitsize - lbitsize - lbitpos;
3983 /* Make the mask to be used against the extracted field. */
3984 mask = build_int_cst_type (unsigned_type, -1);
3985 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3986 mask = const_binop (RSHIFT_EXPR, mask,
3987 size_int (nbitsize - lbitsize - lbitpos), 0);
3990 /* If not comparing with constant, just rework the comparison
3992 return fold_build2 (code, compare_type,
3993 fold_build2 (BIT_AND_EXPR, unsigned_type,
3994 make_bit_field_ref (linner,
3999 fold_build2 (BIT_AND_EXPR, unsigned_type,
4000 make_bit_field_ref (rinner,
4006 /* Otherwise, we are handling the constant case. See if the constant is too
4007 big for the field. Warn and return a tree of for 0 (false) if so. We do
4008 this not only for its own sake, but to avoid having to test for this
4009 error case below. If we didn't, we might generate wrong code.
4011 For unsigned fields, the constant shifted right by the field length should
4012 be all zero. For signed fields, the high-order bits should agree with
4017 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4018 fold_convert (unsigned_type, rhs),
4019 size_int (lbitsize), 0)))
4021 warning (0, "comparison is always %d due to width of bit-field",
4023 return constant_boolean_node (code == NE_EXPR, compare_type);
4028 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4029 size_int (lbitsize - 1), 0);
4030 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4032 warning (0, "comparison is always %d due to width of bit-field",
4034 return constant_boolean_node (code == NE_EXPR, compare_type);
4038 /* Single-bit compares should always be against zero. */
4039 if (lbitsize == 1 && ! integer_zerop (rhs))
4041 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4042 rhs = build_int_cst (type, 0);
4045 /* Make a new bitfield reference, shift the constant over the
4046 appropriate number of bits and mask it with the computed mask
4047 (in case this was a signed field). If we changed it, make a new one. */
4048 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4051 TREE_SIDE_EFFECTS (lhs) = 1;
4052 TREE_THIS_VOLATILE (lhs) = 1;
4055 rhs = const_binop (BIT_AND_EXPR,
4056 const_binop (LSHIFT_EXPR,
4057 fold_convert (unsigned_type, rhs),
4058 size_int (lbitpos), 0),
4061 return build2 (code, compare_type,
4062 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4066 /* Subroutine for fold_truthop: decode a field reference.
4068 If EXP is a comparison reference, we return the innermost reference.
4070 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4071 set to the starting bit number.
4073 If the innermost field can be completely contained in a mode-sized
4074 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4076 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4077 otherwise it is not changed.
4079 *PUNSIGNEDP is set to the signedness of the field.
4081 *PMASK is set to the mask used. This is either contained in a
4082 BIT_AND_EXPR or derived from the width of the field.
4084 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4086 Return 0 if this is not a component reference or is one that we can't
4087 do anything with. */
4090 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4091 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4092 int *punsignedp, int *pvolatilep,
4093 tree *pmask, tree *pand_mask)
4095 tree outer_type = 0;
4097 tree mask, inner, offset;
4099 unsigned int precision;
4101 /* All the optimizations using this function assume integer fields.
4102 There are problems with FP fields since the type_for_size call
4103 below can fail for, e.g., XFmode. */
4104 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4107 /* We are interested in the bare arrangement of bits, so strip everything
4108 that doesn't affect the machine mode. However, record the type of the
4109 outermost expression if it may matter below. */
4110 if (CONVERT_EXPR_P (exp)
4111 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4112 outer_type = TREE_TYPE (exp);
4115 if (TREE_CODE (exp) == BIT_AND_EXPR)
4117 and_mask = TREE_OPERAND (exp, 1);
4118 exp = TREE_OPERAND (exp, 0);
4119 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4120 if (TREE_CODE (and_mask) != INTEGER_CST)
4124 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4125 punsignedp, pvolatilep, false);
4126 if ((inner == exp && and_mask == 0)
4127 || *pbitsize < 0 || offset != 0
4128 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4131 /* If the number of bits in the reference is the same as the bitsize of
4132 the outer type, then the outer type gives the signedness. Otherwise
4133 (in case of a small bitfield) the signedness is unchanged. */
4134 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4135 *punsignedp = TYPE_UNSIGNED (outer_type);
4137 /* Compute the mask to access the bitfield. */
4138 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4139 precision = TYPE_PRECISION (unsigned_type);
4141 mask = build_int_cst_type (unsigned_type, -1);
4143 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4144 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4146 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4148 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4149 fold_convert (unsigned_type, and_mask), mask);
4152 *pand_mask = and_mask;
4156 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4160 all_ones_mask_p (const_tree mask, int size)
4162 tree type = TREE_TYPE (mask);
4163 unsigned int precision = TYPE_PRECISION (type);
4166 tmask = build_int_cst_type (signed_type_for (type), -1);
4169 tree_int_cst_equal (mask,
4170 const_binop (RSHIFT_EXPR,
4171 const_binop (LSHIFT_EXPR, tmask,
4172 size_int (precision - size),
4174 size_int (precision - size), 0));
4177 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4178 represents the sign bit of EXP's type. If EXP represents a sign
4179 or zero extension, also test VAL against the unextended type.
4180 The return value is the (sub)expression whose sign bit is VAL,
4181 or NULL_TREE otherwise. */
4184 sign_bit_p (tree exp, const_tree val)
4186 unsigned HOST_WIDE_INT mask_lo, lo;
4187 HOST_WIDE_INT mask_hi, hi;
4191 /* Tree EXP must have an integral type. */
4192 t = TREE_TYPE (exp);
4193 if (! INTEGRAL_TYPE_P (t))
4196 /* Tree VAL must be an integer constant. */
4197 if (TREE_CODE (val) != INTEGER_CST
4198 || TREE_OVERFLOW (val))
4201 width = TYPE_PRECISION (t);
4202 if (width > HOST_BITS_PER_WIDE_INT)
4204 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4207 mask_hi = ((unsigned HOST_WIDE_INT) -1
4208 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4214 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4217 mask_lo = ((unsigned HOST_WIDE_INT) -1
4218 >> (HOST_BITS_PER_WIDE_INT - width));
4221 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4222 treat VAL as if it were unsigned. */
4223 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4224 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4227 /* Handle extension from a narrower type. */
4228 if (TREE_CODE (exp) == NOP_EXPR
4229 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4230 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4235 /* Subroutine for fold_truthop: determine if an operand is simple enough
4236 to be evaluated unconditionally. */
4239 simple_operand_p (const_tree exp)
4241 /* Strip any conversions that don't change the machine mode. */
4244 return (CONSTANT_CLASS_P (exp)
4245 || TREE_CODE (exp) == SSA_NAME
4247 && ! TREE_ADDRESSABLE (exp)
4248 && ! TREE_THIS_VOLATILE (exp)
4249 && ! DECL_NONLOCAL (exp)
4250 /* Don't regard global variables as simple. They may be
4251 allocated in ways unknown to the compiler (shared memory,
4252 #pragma weak, etc). */
4253 && ! TREE_PUBLIC (exp)
4254 && ! DECL_EXTERNAL (exp)
4255 /* Loading a static variable is unduly expensive, but global
4256 registers aren't expensive. */
4257 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4260 /* The following functions are subroutines to fold_range_test and allow it to
4261 try to change a logical combination of comparisons into a range test.
4264 X == 2 || X == 3 || X == 4 || X == 5
4268 (unsigned) (X - 2) <= 3
4270 We describe each set of comparisons as being either inside or outside
4271 a range, using a variable named like IN_P, and then describe the
4272 range with a lower and upper bound. If one of the bounds is omitted,
4273 it represents either the highest or lowest value of the type.
4275 In the comments below, we represent a range by two numbers in brackets
4276 preceded by a "+" to designate being inside that range, or a "-" to
4277 designate being outside that range, so the condition can be inverted by
4278 flipping the prefix. An omitted bound is represented by a "-". For
4279 example, "- [-, 10]" means being outside the range starting at the lowest
4280 possible value and ending at 10, in other words, being greater than 10.
4281 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4284 We set up things so that the missing bounds are handled in a consistent
4285 manner so neither a missing bound nor "true" and "false" need to be
4286 handled using a special case. */
4288 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4289 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4290 and UPPER1_P are nonzero if the respective argument is an upper bound
4291 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4292 must be specified for a comparison. ARG1 will be converted to ARG0's
4293 type if both are specified. */
4296 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4297 tree arg1, int upper1_p)
4303 /* If neither arg represents infinity, do the normal operation.
4304 Else, if not a comparison, return infinity. Else handle the special
4305 comparison rules. Note that most of the cases below won't occur, but
4306 are handled for consistency. */
4308 if (arg0 != 0 && arg1 != 0)
4310 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4311 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4313 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4316 if (TREE_CODE_CLASS (code) != tcc_comparison)
4319 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4320 for neither. In real maths, we cannot assume open ended ranges are
4321 the same. But, this is computer arithmetic, where numbers are finite.
4322 We can therefore make the transformation of any unbounded range with
4323 the value Z, Z being greater than any representable number. This permits
4324 us to treat unbounded ranges as equal. */
4325 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4326 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4330 result = sgn0 == sgn1;
4333 result = sgn0 != sgn1;
4336 result = sgn0 < sgn1;
4339 result = sgn0 <= sgn1;
4342 result = sgn0 > sgn1;
4345 result = sgn0 >= sgn1;
4351 return constant_boolean_node (result, type);
4354 /* Given EXP, a logical expression, set the range it is testing into
4355 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4356 actually being tested. *PLOW and *PHIGH will be made of the same
4357 type as the returned expression. If EXP is not a comparison, we
4358 will most likely not be returning a useful value and range. Set
4359 *STRICT_OVERFLOW_P to true if the return value is only valid
4360 because signed overflow is undefined; otherwise, do not change
4361 *STRICT_OVERFLOW_P. */
4364 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4365 bool *strict_overflow_p)
4367 enum tree_code code;
4368 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4369 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4371 tree low, high, n_low, n_high;
4373 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4374 and see if we can refine the range. Some of the cases below may not
4375 happen, but it doesn't seem worth worrying about this. We "continue"
4376 the outer loop when we've changed something; otherwise we "break"
4377 the switch, which will "break" the while. */
4380 low = high = build_int_cst (TREE_TYPE (exp), 0);
4384 code = TREE_CODE (exp);
4385 exp_type = TREE_TYPE (exp);
4387 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4389 if (TREE_OPERAND_LENGTH (exp) > 0)
4390 arg0 = TREE_OPERAND (exp, 0);
4391 if (TREE_CODE_CLASS (code) == tcc_comparison
4392 || TREE_CODE_CLASS (code) == tcc_unary
4393 || TREE_CODE_CLASS (code) == tcc_binary)
4394 arg0_type = TREE_TYPE (arg0);
4395 if (TREE_CODE_CLASS (code) == tcc_binary
4396 || TREE_CODE_CLASS (code) == tcc_comparison
4397 || (TREE_CODE_CLASS (code) == tcc_expression
4398 && TREE_OPERAND_LENGTH (exp) > 1))
4399 arg1 = TREE_OPERAND (exp, 1);
4404 case TRUTH_NOT_EXPR:
4405 in_p = ! in_p, exp = arg0;
4408 case EQ_EXPR: case NE_EXPR:
4409 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4410 /* We can only do something if the range is testing for zero
4411 and if the second operand is an integer constant. Note that
4412 saying something is "in" the range we make is done by
4413 complementing IN_P since it will set in the initial case of
4414 being not equal to zero; "out" is leaving it alone. */
4415 if (low == 0 || high == 0
4416 || ! integer_zerop (low) || ! integer_zerop (high)
4417 || TREE_CODE (arg1) != INTEGER_CST)
4422 case NE_EXPR: /* - [c, c] */
4425 case EQ_EXPR: /* + [c, c] */
4426 in_p = ! in_p, low = high = arg1;
4428 case GT_EXPR: /* - [-, c] */
4429 low = 0, high = arg1;
4431 case GE_EXPR: /* + [c, -] */
4432 in_p = ! in_p, low = arg1, high = 0;
4434 case LT_EXPR: /* - [c, -] */
4435 low = arg1, high = 0;
4437 case LE_EXPR: /* + [-, c] */
4438 in_p = ! in_p, low = 0, high = arg1;
4444 /* If this is an unsigned comparison, we also know that EXP is
4445 greater than or equal to zero. We base the range tests we make
4446 on that fact, so we record it here so we can parse existing
4447 range tests. We test arg0_type since often the return type
4448 of, e.g. EQ_EXPR, is boolean. */
4449 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4451 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4453 build_int_cst (arg0_type, 0),
4457 in_p = n_in_p, low = n_low, high = n_high;
4459 /* If the high bound is missing, but we have a nonzero low
4460 bound, reverse the range so it goes from zero to the low bound
4462 if (high == 0 && low && ! integer_zerop (low))
4465 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4466 integer_one_node, 0);
4467 low = build_int_cst (arg0_type, 0);
4475 /* (-x) IN [a,b] -> x in [-b, -a] */
4476 n_low = range_binop (MINUS_EXPR, exp_type,
4477 build_int_cst (exp_type, 0),
4479 n_high = range_binop (MINUS_EXPR, exp_type,
4480 build_int_cst (exp_type, 0),
4482 if (n_high != 0 && TREE_OVERFLOW (n_high))
4488 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4489 build_int_cst (exp_type, 1));
4492 case PLUS_EXPR: case MINUS_EXPR:
4493 if (TREE_CODE (arg1) != INTEGER_CST)
4496 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4497 move a constant to the other side. */
4498 if (!TYPE_UNSIGNED (arg0_type)
4499 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4502 /* If EXP is signed, any overflow in the computation is undefined,
4503 so we don't worry about it so long as our computations on
4504 the bounds don't overflow. For unsigned, overflow is defined
4505 and this is exactly the right thing. */
4506 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4507 arg0_type, low, 0, arg1, 0);
4508 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4509 arg0_type, high, 1, arg1, 0);
4510 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4511 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4514 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4515 *strict_overflow_p = true;
4518 /* Check for an unsigned range which has wrapped around the maximum
4519 value thus making n_high < n_low, and normalize it. */
4520 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4522 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4523 integer_one_node, 0);
4524 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4525 integer_one_node, 0);
4527 /* If the range is of the form +/- [ x+1, x ], we won't
4528 be able to normalize it. But then, it represents the
4529 whole range or the empty set, so make it
4531 if (tree_int_cst_equal (n_low, low)
4532 && tree_int_cst_equal (n_high, high))
4538 low = n_low, high = n_high;
4543 CASE_CONVERT: case NON_LVALUE_EXPR:
4544 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4547 if (! INTEGRAL_TYPE_P (arg0_type)
4548 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4549 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4552 n_low = low, n_high = high;
4555 n_low = fold_convert (arg0_type, n_low);
4558 n_high = fold_convert (arg0_type, n_high);
4561 /* If we're converting arg0 from an unsigned type, to exp,
4562 a signed type, we will be doing the comparison as unsigned.
4563 The tests above have already verified that LOW and HIGH
4566 So we have to ensure that we will handle large unsigned
4567 values the same way that the current signed bounds treat
4570 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4574 /* For fixed-point modes, we need to pass the saturating flag
4575 as the 2nd parameter. */
4576 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4577 equiv_type = lang_hooks.types.type_for_mode
4578 (TYPE_MODE (arg0_type),
4579 TYPE_SATURATING (arg0_type));
4581 equiv_type = lang_hooks.types.type_for_mode
4582 (TYPE_MODE (arg0_type), 1);
4584 /* A range without an upper bound is, naturally, unbounded.
4585 Since convert would have cropped a very large value, use
4586 the max value for the destination type. */
4588 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4589 : TYPE_MAX_VALUE (arg0_type);
4591 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4592 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4593 fold_convert (arg0_type,
4595 build_int_cst (arg0_type, 1));
4597 /* If the low bound is specified, "and" the range with the
4598 range for which the original unsigned value will be
4602 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4603 1, n_low, n_high, 1,
4604 fold_convert (arg0_type,
4609 in_p = (n_in_p == in_p);
4613 /* Otherwise, "or" the range with the range of the input
4614 that will be interpreted as negative. */
4615 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4616 0, n_low, n_high, 1,
4617 fold_convert (arg0_type,
4622 in_p = (in_p != n_in_p);
4627 low = n_low, high = n_high;
4637 /* If EXP is a constant, we can evaluate whether this is true or false. */
4638 if (TREE_CODE (exp) == INTEGER_CST)
4640 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4642 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4648 *pin_p = in_p, *plow = low, *phigh = high;
4652 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4653 type, TYPE, return an expression to test if EXP is in (or out of, depending
4654 on IN_P) the range. Return 0 if the test couldn't be created. */
4657 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4659 tree etype = TREE_TYPE (exp);
4662 #ifdef HAVE_canonicalize_funcptr_for_compare
4663 /* Disable this optimization for function pointer expressions
4664 on targets that require function pointer canonicalization. */
4665 if (HAVE_canonicalize_funcptr_for_compare
4666 && TREE_CODE (etype) == POINTER_TYPE
4667 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4673 value = build_range_check (type, exp, 1, low, high);
4675 return invert_truthvalue (value);
4680 if (low == 0 && high == 0)
4681 return build_int_cst (type, 1);
4684 return fold_build2 (LE_EXPR, type, exp,
4685 fold_convert (etype, high));
4688 return fold_build2 (GE_EXPR, type, exp,
4689 fold_convert (etype, low));
4691 if (operand_equal_p (low, high, 0))
4692 return fold_build2 (EQ_EXPR, type, exp,
4693 fold_convert (etype, low));
4695 if (integer_zerop (low))
4697 if (! TYPE_UNSIGNED (etype))
4699 etype = unsigned_type_for (etype);
4700 high = fold_convert (etype, high);
4701 exp = fold_convert (etype, exp);
4703 return build_range_check (type, exp, 1, 0, high);
4706 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4707 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4709 unsigned HOST_WIDE_INT lo;
4713 prec = TYPE_PRECISION (etype);
4714 if (prec <= HOST_BITS_PER_WIDE_INT)
4717 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4721 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4722 lo = (unsigned HOST_WIDE_INT) -1;
4725 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4727 if (TYPE_UNSIGNED (etype))
4729 tree signed_etype = signed_type_for (etype);
4730 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4732 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4734 etype = signed_etype;
4735 exp = fold_convert (etype, exp);
4737 return fold_build2 (GT_EXPR, type, exp,
4738 build_int_cst (etype, 0));
4742 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4743 This requires wrap-around arithmetics for the type of the expression. */
4744 switch (TREE_CODE (etype))
4747 /* There is no requirement that LOW be within the range of ETYPE
4748 if the latter is a subtype. It must, however, be within the base
4749 type of ETYPE. So be sure we do the subtraction in that type. */
4750 if (TREE_TYPE (etype))
4751 etype = TREE_TYPE (etype);
4756 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4757 TYPE_UNSIGNED (etype));
4764 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4765 if (TREE_CODE (etype) == INTEGER_TYPE
4766 && !TYPE_OVERFLOW_WRAPS (etype))
4768 tree utype, minv, maxv;
4770 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4771 for the type in question, as we rely on this here. */
4772 utype = unsigned_type_for (etype);
4773 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4774 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4775 integer_one_node, 1);
4776 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4778 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4785 high = fold_convert (etype, high);
4786 low = fold_convert (etype, low);
4787 exp = fold_convert (etype, exp);
4789 value = const_binop (MINUS_EXPR, high, low, 0);
4792 if (POINTER_TYPE_P (etype))
4794 if (value != 0 && !TREE_OVERFLOW (value))
4796 low = fold_convert (sizetype, low);
4797 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4798 return build_range_check (type,
4799 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4800 1, build_int_cst (etype, 0), value);
4805 if (value != 0 && !TREE_OVERFLOW (value))
4806 return build_range_check (type,
4807 fold_build2 (MINUS_EXPR, etype, exp, low),
4808 1, build_int_cst (etype, 0), value);
4813 /* Return the predecessor of VAL in its type, handling the infinite case. */
4816 range_predecessor (tree val)
4818 tree type = TREE_TYPE (val);
4820 if (INTEGRAL_TYPE_P (type)
4821 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4824 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4827 /* Return the successor of VAL in its type, handling the infinite case. */
4830 range_successor (tree val)
4832 tree type = TREE_TYPE (val);
4834 if (INTEGRAL_TYPE_P (type)
4835 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4838 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4841 /* Given two ranges, see if we can merge them into one. Return 1 if we
4842 can, 0 if we can't. Set the output range into the specified parameters. */
4845 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4846 tree high0, int in1_p, tree low1, tree high1)
4854 int lowequal = ((low0 == 0 && low1 == 0)
4855 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4856 low0, 0, low1, 0)));
4857 int highequal = ((high0 == 0 && high1 == 0)
4858 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4859 high0, 1, high1, 1)));
4861 /* Make range 0 be the range that starts first, or ends last if they
4862 start at the same value. Swap them if it isn't. */
4863 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4866 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4867 high1, 1, high0, 1))))
4869 temp = in0_p, in0_p = in1_p, in1_p = temp;
4870 tem = low0, low0 = low1, low1 = tem;
4871 tem = high0, high0 = high1, high1 = tem;
4874 /* Now flag two cases, whether the ranges are disjoint or whether the
4875 second range is totally subsumed in the first. Note that the tests
4876 below are simplified by the ones above. */
4877 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4878 high0, 1, low1, 0));
4879 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4880 high1, 1, high0, 1));
4882 /* We now have four cases, depending on whether we are including or
4883 excluding the two ranges. */
4886 /* If they don't overlap, the result is false. If the second range
4887 is a subset it is the result. Otherwise, the range is from the start
4888 of the second to the end of the first. */
4890 in_p = 0, low = high = 0;
4892 in_p = 1, low = low1, high = high1;
4894 in_p = 1, low = low1, high = high0;
4897 else if (in0_p && ! in1_p)
4899 /* If they don't overlap, the result is the first range. If they are
4900 equal, the result is false. If the second range is a subset of the
4901 first, and the ranges begin at the same place, we go from just after
4902 the end of the second range to the end of the first. If the second
4903 range is not a subset of the first, or if it is a subset and both
4904 ranges end at the same place, the range starts at the start of the
4905 first range and ends just before the second range.
4906 Otherwise, we can't describe this as a single range. */
4908 in_p = 1, low = low0, high = high0;
4909 else if (lowequal && highequal)
4910 in_p = 0, low = high = 0;
4911 else if (subset && lowequal)
4913 low = range_successor (high1);
4918 /* We are in the weird situation where high0 > high1 but
4919 high1 has no successor. Punt. */
4923 else if (! subset || highequal)
4926 high = range_predecessor (low1);
4930 /* low0 < low1 but low1 has no predecessor. Punt. */
4938 else if (! in0_p && in1_p)
4940 /* If they don't overlap, the result is the second range. If the second
4941 is a subset of the first, the result is false. Otherwise,
4942 the range starts just after the first range and ends at the
4943 end of the second. */
4945 in_p = 1, low = low1, high = high1;
4946 else if (subset || highequal)
4947 in_p = 0, low = high = 0;
4950 low = range_successor (high0);
4955 /* high1 > high0 but high0 has no successor. Punt. */
4963 /* The case where we are excluding both ranges. Here the complex case
4964 is if they don't overlap. In that case, the only time we have a
4965 range is if they are adjacent. If the second is a subset of the
4966 first, the result is the first. Otherwise, the range to exclude
4967 starts at the beginning of the first range and ends at the end of the
4971 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4972 range_successor (high0),
4974 in_p = 0, low = low0, high = high1;
4977 /* Canonicalize - [min, x] into - [-, x]. */
4978 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4979 switch (TREE_CODE (TREE_TYPE (low0)))
4982 if (TYPE_PRECISION (TREE_TYPE (low0))
4983 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4987 if (tree_int_cst_equal (low0,
4988 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4992 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4993 && integer_zerop (low0))
5000 /* Canonicalize - [x, max] into - [x, -]. */
5001 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5002 switch (TREE_CODE (TREE_TYPE (high1)))
5005 if (TYPE_PRECISION (TREE_TYPE (high1))
5006 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5010 if (tree_int_cst_equal (high1,
5011 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5015 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5016 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5018 integer_one_node, 1)))
5025 /* The ranges might be also adjacent between the maximum and
5026 minimum values of the given type. For
5027 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5028 return + [x + 1, y - 1]. */
5029 if (low0 == 0 && high1 == 0)
5031 low = range_successor (high0);
5032 high = range_predecessor (low1);
5033 if (low == 0 || high == 0)
5043 in_p = 0, low = low0, high = high0;
5045 in_p = 0, low = low0, high = high1;
5048 *pin_p = in_p, *plow = low, *phigh = high;
5053 /* Subroutine of fold, looking inside expressions of the form
5054 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5055 of the COND_EXPR. This function is being used also to optimize
5056 A op B ? C : A, by reversing the comparison first.
5058 Return a folded expression whose code is not a COND_EXPR
5059 anymore, or NULL_TREE if no folding opportunity is found. */
5062 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5064 enum tree_code comp_code = TREE_CODE (arg0);
5065 tree arg00 = TREE_OPERAND (arg0, 0);
5066 tree arg01 = TREE_OPERAND (arg0, 1);
5067 tree arg1_type = TREE_TYPE (arg1);
5073 /* If we have A op 0 ? A : -A, consider applying the following
5076 A == 0? A : -A same as -A
5077 A != 0? A : -A same as A
5078 A >= 0? A : -A same as abs (A)
5079 A > 0? A : -A same as abs (A)
5080 A <= 0? A : -A same as -abs (A)
5081 A < 0? A : -A same as -abs (A)
5083 None of these transformations work for modes with signed
5084 zeros. If A is +/-0, the first two transformations will
5085 change the sign of the result (from +0 to -0, or vice
5086 versa). The last four will fix the sign of the result,
5087 even though the original expressions could be positive or
5088 negative, depending on the sign of A.
5090 Note that all these transformations are correct if A is
5091 NaN, since the two alternatives (A and -A) are also NaNs. */
5092 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5093 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5094 ? real_zerop (arg01)
5095 : integer_zerop (arg01))
5096 && ((TREE_CODE (arg2) == NEGATE_EXPR
5097 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5098 /* In the case that A is of the form X-Y, '-A' (arg2) may
5099 have already been folded to Y-X, check for that. */
5100 || (TREE_CODE (arg1) == MINUS_EXPR
5101 && TREE_CODE (arg2) == MINUS_EXPR
5102 && operand_equal_p (TREE_OPERAND (arg1, 0),
5103 TREE_OPERAND (arg2, 1), 0)
5104 && operand_equal_p (TREE_OPERAND (arg1, 1),
5105 TREE_OPERAND (arg2, 0), 0))))
5110 tem = fold_convert (arg1_type, arg1);
5111 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5114 return pedantic_non_lvalue (fold_convert (type, arg1));
5117 if (flag_trapping_math)
5122 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5123 arg1 = fold_convert (signed_type_for
5124 (TREE_TYPE (arg1)), arg1);
5125 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5126 return pedantic_non_lvalue (fold_convert (type, tem));
5129 if (flag_trapping_math)
5133 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5134 arg1 = fold_convert (signed_type_for
5135 (TREE_TYPE (arg1)), arg1);
5136 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5137 return negate_expr (fold_convert (type, tem));
5139 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5143 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5144 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5145 both transformations are correct when A is NaN: A != 0
5146 is then true, and A == 0 is false. */
5148 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5149 && integer_zerop (arg01) && integer_zerop (arg2))
5151 if (comp_code == NE_EXPR)
5152 return pedantic_non_lvalue (fold_convert (type, arg1));
5153 else if (comp_code == EQ_EXPR)
5154 return build_int_cst (type, 0);
5157 /* Try some transformations of A op B ? A : B.
5159 A == B? A : B same as B
5160 A != B? A : B same as A
5161 A >= B? A : B same as max (A, B)
5162 A > B? A : B same as max (B, A)
5163 A <= B? A : B same as min (A, B)
5164 A < B? A : B same as min (B, A)
5166 As above, these transformations don't work in the presence
5167 of signed zeros. For example, if A and B are zeros of
5168 opposite sign, the first two transformations will change
5169 the sign of the result. In the last four, the original
5170 expressions give different results for (A=+0, B=-0) and
5171 (A=-0, B=+0), but the transformed expressions do not.
5173 The first two transformations are correct if either A or B
5174 is a NaN. In the first transformation, the condition will
5175 be false, and B will indeed be chosen. In the case of the
5176 second transformation, the condition A != B will be true,
5177 and A will be chosen.
5179 The conversions to max() and min() are not correct if B is
5180 a number and A is not. The conditions in the original
5181 expressions will be false, so all four give B. The min()
5182 and max() versions would give a NaN instead. */
5183 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5184 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5185 /* Avoid these transformations if the COND_EXPR may be used
5186 as an lvalue in the C++ front-end. PR c++/19199. */
5188 || (strcmp (lang_hooks.name, "GNU C++") != 0
5189 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5190 || ! maybe_lvalue_p (arg1)
5191 || ! maybe_lvalue_p (arg2)))
5193 tree comp_op0 = arg00;
5194 tree comp_op1 = arg01;
5195 tree comp_type = TREE_TYPE (comp_op0);
5197 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5198 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5208 return pedantic_non_lvalue (fold_convert (type, arg2));
5210 return pedantic_non_lvalue (fold_convert (type, arg1));
5215 /* In C++ a ?: expression can be an lvalue, so put the
5216 operand which will be used if they are equal first
5217 so that we can convert this back to the
5218 corresponding COND_EXPR. */
5219 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5221 comp_op0 = fold_convert (comp_type, comp_op0);
5222 comp_op1 = fold_convert (comp_type, comp_op1);
5223 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5224 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5225 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5226 return pedantic_non_lvalue (fold_convert (type, tem));
5233 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5235 comp_op0 = fold_convert (comp_type, comp_op0);
5236 comp_op1 = fold_convert (comp_type, comp_op1);
5237 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5238 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5239 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5240 return pedantic_non_lvalue (fold_convert (type, tem));
5244 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5245 return pedantic_non_lvalue (fold_convert (type, arg2));
5248 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5249 return pedantic_non_lvalue (fold_convert (type, arg1));
5252 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5257 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5258 we might still be able to simplify this. For example,
5259 if C1 is one less or one more than C2, this might have started
5260 out as a MIN or MAX and been transformed by this function.
5261 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5263 if (INTEGRAL_TYPE_P (type)
5264 && TREE_CODE (arg01) == INTEGER_CST
5265 && TREE_CODE (arg2) == INTEGER_CST)
5269 /* We can replace A with C1 in this case. */
5270 arg1 = fold_convert (type, arg01);
5271 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5274 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5275 MIN_EXPR, to preserve the signedness of the comparison. */
5276 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5278 && operand_equal_p (arg01,
5279 const_binop (PLUS_EXPR, arg2,
5280 build_int_cst (type, 1), 0),
5283 tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00,
5284 fold_convert (TREE_TYPE (arg00), arg2));
5285 return pedantic_non_lvalue (fold_convert (type, tem));
5290 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5292 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5294 && operand_equal_p (arg01,
5295 const_binop (MINUS_EXPR, arg2,
5296 build_int_cst (type, 1), 0),
5299 tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00,
5300 fold_convert (TREE_TYPE (arg00), arg2));
5301 return pedantic_non_lvalue (fold_convert (type, tem));
5306 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5307 MAX_EXPR, to preserve the signedness of the comparison. */
5308 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5310 && operand_equal_p (arg01,
5311 const_binop (MINUS_EXPR, arg2,
5312 build_int_cst (type, 1), 0),
5315 tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00,
5316 fold_convert (TREE_TYPE (arg00), arg2));
5317 return pedantic_non_lvalue (fold_convert (type, tem));
5322 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5323 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5325 && operand_equal_p (arg01,
5326 const_binop (PLUS_EXPR, arg2,
5327 build_int_cst (type, 1), 0),
5330 tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00,
5331 fold_convert (TREE_TYPE (arg00), arg2));
5332 return pedantic_non_lvalue (fold_convert (type, tem));
5346 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5347 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5348 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5352 /* EXP is some logical combination of boolean tests. See if we can
5353 merge it into some range test. Return the new tree if so. */
5356 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5358 int or_op = (code == TRUTH_ORIF_EXPR
5359 || code == TRUTH_OR_EXPR);
5360 int in0_p, in1_p, in_p;
5361 tree low0, low1, low, high0, high1, high;
5362 bool strict_overflow_p = false;
5363 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5364 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5366 const char * const warnmsg = G_("assuming signed overflow does not occur "
5367 "when simplifying range test");
5369 /* If this is an OR operation, invert both sides; we will invert
5370 again at the end. */
5372 in0_p = ! in0_p, in1_p = ! in1_p;
5374 /* If both expressions are the same, if we can merge the ranges, and we
5375 can build the range test, return it or it inverted. If one of the
5376 ranges is always true or always false, consider it to be the same
5377 expression as the other. */
5378 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5379 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5381 && 0 != (tem = (build_range_check (type,
5383 : rhs != 0 ? rhs : integer_zero_node,
5386 if (strict_overflow_p)
5387 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5388 return or_op ? invert_truthvalue (tem) : tem;
5391 /* On machines where the branch cost is expensive, if this is a
5392 short-circuited branch and the underlying object on both sides
5393 is the same, make a non-short-circuit operation. */
5394 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5395 && lhs != 0 && rhs != 0
5396 && (code == TRUTH_ANDIF_EXPR
5397 || code == TRUTH_ORIF_EXPR)
5398 && operand_equal_p (lhs, rhs, 0))
5400 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5401 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5402 which cases we can't do this. */
5403 if (simple_operand_p (lhs))
5404 return build2 (code == TRUTH_ANDIF_EXPR
5405 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5408 else if (lang_hooks.decls.global_bindings_p () == 0
5409 && ! CONTAINS_PLACEHOLDER_P (lhs))
5411 tree common = save_expr (lhs);
5413 if (0 != (lhs = build_range_check (type, common,
5414 or_op ? ! in0_p : in0_p,
5416 && (0 != (rhs = build_range_check (type, common,
5417 or_op ? ! in1_p : in1_p,
5420 if (strict_overflow_p)
5421 fold_overflow_warning (warnmsg,
5422 WARN_STRICT_OVERFLOW_COMPARISON);
5423 return build2 (code == TRUTH_ANDIF_EXPR
5424 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5433 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5434 bit value. Arrange things so the extra bits will be set to zero if and
5435 only if C is signed-extended to its full width. If MASK is nonzero,
5436 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5439 unextend (tree c, int p, int unsignedp, tree mask)
5441 tree type = TREE_TYPE (c);
5442 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5445 if (p == modesize || unsignedp)
5448 /* We work by getting just the sign bit into the low-order bit, then
5449 into the high-order bit, then sign-extend. We then XOR that value
5451 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5452 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5454 /* We must use a signed type in order to get an arithmetic right shift.
5455 However, we must also avoid introducing accidental overflows, so that
5456 a subsequent call to integer_zerop will work. Hence we must
5457 do the type conversion here. At this point, the constant is either
5458 zero or one, and the conversion to a signed type can never overflow.
5459 We could get an overflow if this conversion is done anywhere else. */
5460 if (TYPE_UNSIGNED (type))
5461 temp = fold_convert (signed_type_for (type), temp);
5463 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5464 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5466 temp = const_binop (BIT_AND_EXPR, temp,
5467 fold_convert (TREE_TYPE (c), mask), 0);
5468 /* If necessary, convert the type back to match the type of C. */
5469 if (TYPE_UNSIGNED (type))
5470 temp = fold_convert (type, temp);
5472 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5475 /* Find ways of folding logical expressions of LHS and RHS:
5476 Try to merge two comparisons to the same innermost item.
5477 Look for range tests like "ch >= '0' && ch <= '9'".
5478 Look for combinations of simple terms on machines with expensive branches
5479 and evaluate the RHS unconditionally.
5481 For example, if we have p->a == 2 && p->b == 4 and we can make an
5482 object large enough to span both A and B, we can do this with a comparison
5483 against the object ANDed with the a mask.
5485 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5486 operations to do this with one comparison.
5488 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5489 function and the one above.
5491 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5492 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5494 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5497 We return the simplified tree or 0 if no optimization is possible. */
5500 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5502 /* If this is the "or" of two comparisons, we can do something if
5503 the comparisons are NE_EXPR. If this is the "and", we can do something
5504 if the comparisons are EQ_EXPR. I.e.,
5505 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5507 WANTED_CODE is this operation code. For single bit fields, we can
5508 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5509 comparison for one-bit fields. */
5511 enum tree_code wanted_code;
5512 enum tree_code lcode, rcode;
5513 tree ll_arg, lr_arg, rl_arg, rr_arg;
5514 tree ll_inner, lr_inner, rl_inner, rr_inner;
5515 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5516 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5517 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5518 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5519 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5520 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5521 enum machine_mode lnmode, rnmode;
5522 tree ll_mask, lr_mask, rl_mask, rr_mask;
5523 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5524 tree l_const, r_const;
5525 tree lntype, rntype, result;
5526 HOST_WIDE_INT first_bit, end_bit;
5528 tree orig_lhs = lhs, orig_rhs = rhs;
5529 enum tree_code orig_code = code;
5531 /* Start by getting the comparison codes. Fail if anything is volatile.
5532 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5533 it were surrounded with a NE_EXPR. */
5535 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5538 lcode = TREE_CODE (lhs);
5539 rcode = TREE_CODE (rhs);
5541 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5543 lhs = build2 (NE_EXPR, truth_type, lhs,
5544 build_int_cst (TREE_TYPE (lhs), 0));
5548 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5550 rhs = build2 (NE_EXPR, truth_type, rhs,
5551 build_int_cst (TREE_TYPE (rhs), 0));
5555 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5556 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5559 ll_arg = TREE_OPERAND (lhs, 0);
5560 lr_arg = TREE_OPERAND (lhs, 1);
5561 rl_arg = TREE_OPERAND (rhs, 0);
5562 rr_arg = TREE_OPERAND (rhs, 1);
5564 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5565 if (simple_operand_p (ll_arg)
5566 && simple_operand_p (lr_arg))
5569 if (operand_equal_p (ll_arg, rl_arg, 0)
5570 && operand_equal_p (lr_arg, rr_arg, 0))
5572 result = combine_comparisons (code, lcode, rcode,
5573 truth_type, ll_arg, lr_arg);
5577 else if (operand_equal_p (ll_arg, rr_arg, 0)
5578 && operand_equal_p (lr_arg, rl_arg, 0))
5580 result = combine_comparisons (code, lcode,
5581 swap_tree_comparison (rcode),
5582 truth_type, ll_arg, lr_arg);
5588 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5589 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5591 /* If the RHS can be evaluated unconditionally and its operands are
5592 simple, it wins to evaluate the RHS unconditionally on machines
5593 with expensive branches. In this case, this isn't a comparison
5594 that can be merged. Avoid doing this if the RHS is a floating-point
5595 comparison since those can trap. */
5597 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5599 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5600 && simple_operand_p (rl_arg)
5601 && simple_operand_p (rr_arg))
5603 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5604 if (code == TRUTH_OR_EXPR
5605 && lcode == NE_EXPR && integer_zerop (lr_arg)
5606 && rcode == NE_EXPR && integer_zerop (rr_arg)
5607 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5608 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5609 return build2 (NE_EXPR, truth_type,
5610 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5612 build_int_cst (TREE_TYPE (ll_arg), 0));
5614 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5615 if (code == TRUTH_AND_EXPR
5616 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5617 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5618 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5619 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5620 return build2 (EQ_EXPR, truth_type,
5621 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5623 build_int_cst (TREE_TYPE (ll_arg), 0));
5625 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5627 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5628 return build2 (code, truth_type, lhs, rhs);
5633 /* See if the comparisons can be merged. Then get all the parameters for
5636 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5637 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5641 ll_inner = decode_field_reference (ll_arg,
5642 &ll_bitsize, &ll_bitpos, &ll_mode,
5643 &ll_unsignedp, &volatilep, &ll_mask,
5645 lr_inner = decode_field_reference (lr_arg,
5646 &lr_bitsize, &lr_bitpos, &lr_mode,
5647 &lr_unsignedp, &volatilep, &lr_mask,
5649 rl_inner = decode_field_reference (rl_arg,
5650 &rl_bitsize, &rl_bitpos, &rl_mode,
5651 &rl_unsignedp, &volatilep, &rl_mask,
5653 rr_inner = decode_field_reference (rr_arg,
5654 &rr_bitsize, &rr_bitpos, &rr_mode,
5655 &rr_unsignedp, &volatilep, &rr_mask,
5658 /* It must be true that the inner operation on the lhs of each
5659 comparison must be the same if we are to be able to do anything.
5660 Then see if we have constants. If not, the same must be true for
5662 if (volatilep || ll_inner == 0 || rl_inner == 0
5663 || ! operand_equal_p (ll_inner, rl_inner, 0))
5666 if (TREE_CODE (lr_arg) == INTEGER_CST
5667 && TREE_CODE (rr_arg) == INTEGER_CST)
5668 l_const = lr_arg, r_const = rr_arg;
5669 else if (lr_inner == 0 || rr_inner == 0
5670 || ! operand_equal_p (lr_inner, rr_inner, 0))
5673 l_const = r_const = 0;
5675 /* If either comparison code is not correct for our logical operation,
5676 fail. However, we can convert a one-bit comparison against zero into
5677 the opposite comparison against that bit being set in the field. */
5679 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5680 if (lcode != wanted_code)
5682 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5684 /* Make the left operand unsigned, since we are only interested
5685 in the value of one bit. Otherwise we are doing the wrong
5694 /* This is analogous to the code for l_const above. */
5695 if (rcode != wanted_code)
5697 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5706 /* See if we can find a mode that contains both fields being compared on
5707 the left. If we can't, fail. Otherwise, update all constants and masks
5708 to be relative to a field of that size. */
5709 first_bit = MIN (ll_bitpos, rl_bitpos);
5710 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5711 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5712 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5714 if (lnmode == VOIDmode)
5717 lnbitsize = GET_MODE_BITSIZE (lnmode);
5718 lnbitpos = first_bit & ~ (lnbitsize - 1);
5719 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5720 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5722 if (BYTES_BIG_ENDIAN)
5724 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5725 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5728 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5729 size_int (xll_bitpos), 0);
5730 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5731 size_int (xrl_bitpos), 0);
5735 l_const = fold_convert (lntype, l_const);
5736 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5737 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5738 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5739 fold_build1 (BIT_NOT_EXPR,
5743 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5745 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5750 r_const = fold_convert (lntype, r_const);
5751 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5752 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5753 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5754 fold_build1 (BIT_NOT_EXPR,
5758 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5760 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5764 /* If the right sides are not constant, do the same for it. Also,
5765 disallow this optimization if a size or signedness mismatch occurs
5766 between the left and right sides. */
5769 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5770 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5771 /* Make sure the two fields on the right
5772 correspond to the left without being swapped. */
5773 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5776 first_bit = MIN (lr_bitpos, rr_bitpos);
5777 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5778 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5779 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5781 if (rnmode == VOIDmode)
5784 rnbitsize = GET_MODE_BITSIZE (rnmode);
5785 rnbitpos = first_bit & ~ (rnbitsize - 1);
5786 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5787 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5789 if (BYTES_BIG_ENDIAN)
5791 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5792 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5795 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5796 size_int (xlr_bitpos), 0);
5797 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5798 size_int (xrr_bitpos), 0);
5800 /* Make a mask that corresponds to both fields being compared.
5801 Do this for both items being compared. If the operands are the
5802 same size and the bits being compared are in the same position
5803 then we can do this by masking both and comparing the masked
5805 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5806 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5807 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5809 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5810 ll_unsignedp || rl_unsignedp);
5811 if (! all_ones_mask_p (ll_mask, lnbitsize))
5812 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5814 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5815 lr_unsignedp || rr_unsignedp);
5816 if (! all_ones_mask_p (lr_mask, rnbitsize))
5817 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5819 return build2 (wanted_code, truth_type, lhs, rhs);
5822 /* There is still another way we can do something: If both pairs of
5823 fields being compared are adjacent, we may be able to make a wider
5824 field containing them both.
5826 Note that we still must mask the lhs/rhs expressions. Furthermore,
5827 the mask must be shifted to account for the shift done by
5828 make_bit_field_ref. */
5829 if ((ll_bitsize + ll_bitpos == rl_bitpos
5830 && lr_bitsize + lr_bitpos == rr_bitpos)
5831 || (ll_bitpos == rl_bitpos + rl_bitsize
5832 && lr_bitpos == rr_bitpos + rr_bitsize))
5836 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5837 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5838 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5839 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5841 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5842 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5843 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5844 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5846 /* Convert to the smaller type before masking out unwanted bits. */
5848 if (lntype != rntype)
5850 if (lnbitsize > rnbitsize)
5852 lhs = fold_convert (rntype, lhs);
5853 ll_mask = fold_convert (rntype, ll_mask);
5856 else if (lnbitsize < rnbitsize)
5858 rhs = fold_convert (lntype, rhs);
5859 lr_mask = fold_convert (lntype, lr_mask);
5864 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5865 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5867 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5868 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5870 return build2 (wanted_code, truth_type, lhs, rhs);
5876 /* Handle the case of comparisons with constants. If there is something in
5877 common between the masks, those bits of the constants must be the same.
5878 If not, the condition is always false. Test for this to avoid generating
5879 incorrect code below. */
5880 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5881 if (! integer_zerop (result)
5882 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5883 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5885 if (wanted_code == NE_EXPR)
5887 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5888 return constant_boolean_node (true, truth_type);
5892 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5893 return constant_boolean_node (false, truth_type);
5897 /* Construct the expression we will return. First get the component
5898 reference we will make. Unless the mask is all ones the width of
5899 that field, perform the mask operation. Then compare with the
5901 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5902 ll_unsignedp || rl_unsignedp);
5904 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5905 if (! all_ones_mask_p (ll_mask, lnbitsize))
5906 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5908 return build2 (wanted_code, truth_type, result,
5909 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5912 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5916 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5919 enum tree_code op_code;
5922 int consts_equal, consts_lt;
5925 STRIP_SIGN_NOPS (arg0);
5927 op_code = TREE_CODE (arg0);
5928 minmax_const = TREE_OPERAND (arg0, 1);
5929 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5930 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5931 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5932 inner = TREE_OPERAND (arg0, 0);
5934 /* If something does not permit us to optimize, return the original tree. */
5935 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5936 || TREE_CODE (comp_const) != INTEGER_CST
5937 || TREE_OVERFLOW (comp_const)
5938 || TREE_CODE (minmax_const) != INTEGER_CST
5939 || TREE_OVERFLOW (minmax_const))
5942 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5943 and GT_EXPR, doing the rest with recursive calls using logical
5947 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5949 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5952 return invert_truthvalue (tem);
5958 fold_build2 (TRUTH_ORIF_EXPR, type,
5959 optimize_minmax_comparison
5960 (EQ_EXPR, type, arg0, comp_const),
5961 optimize_minmax_comparison
5962 (GT_EXPR, type, arg0, comp_const));
5965 if (op_code == MAX_EXPR && consts_equal)
5966 /* MAX (X, 0) == 0 -> X <= 0 */
5967 return fold_build2 (LE_EXPR, type, inner, comp_const);
5969 else if (op_code == MAX_EXPR && consts_lt)
5970 /* MAX (X, 0) == 5 -> X == 5 */
5971 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5973 else if (op_code == MAX_EXPR)
5974 /* MAX (X, 0) == -1 -> false */
5975 return omit_one_operand (type, integer_zero_node, inner);
5977 else if (consts_equal)
5978 /* MIN (X, 0) == 0 -> X >= 0 */
5979 return fold_build2 (GE_EXPR, type, inner, comp_const);
5982 /* MIN (X, 0) == 5 -> false */
5983 return omit_one_operand (type, integer_zero_node, inner);
5986 /* MIN (X, 0) == -1 -> X == -1 */
5987 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5990 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5991 /* MAX (X, 0) > 0 -> X > 0
5992 MAX (X, 0) > 5 -> X > 5 */
5993 return fold_build2 (GT_EXPR, type, inner, comp_const);
5995 else if (op_code == MAX_EXPR)
5996 /* MAX (X, 0) > -1 -> true */
5997 return omit_one_operand (type, integer_one_node, inner);
5999 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6000 /* MIN (X, 0) > 0 -> false
6001 MIN (X, 0) > 5 -> false */
6002 return omit_one_operand (type, integer_zero_node, inner);
6005 /* MIN (X, 0) > -1 -> X > -1 */
6006 return fold_build2 (GT_EXPR, type, inner, comp_const);
6013 /* T is an integer expression that is being multiplied, divided, or taken a
6014 modulus (CODE says which and what kind of divide or modulus) by a
6015 constant C. See if we can eliminate that operation by folding it with
6016 other operations already in T. WIDE_TYPE, if non-null, is a type that
6017 should be used for the computation if wider than our type.
6019 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6020 (X * 2) + (Y * 4). We must, however, be assured that either the original
6021 expression would not overflow or that overflow is undefined for the type
6022 in the language in question.
6024 If we return a non-null expression, it is an equivalent form of the
6025 original computation, but need not be in the original type.
6027 We set *STRICT_OVERFLOW_P to true if the return values depends on
6028 signed overflow being undefined. Otherwise we do not change
6029 *STRICT_OVERFLOW_P. */
6032 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6033 bool *strict_overflow_p)
6035 /* To avoid exponential search depth, refuse to allow recursion past
6036 three levels. Beyond that (1) it's highly unlikely that we'll find
6037 something interesting and (2) we've probably processed it before
6038 when we built the inner expression. */
6047 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6054 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6055 bool *strict_overflow_p)
6057 tree type = TREE_TYPE (t);
6058 enum tree_code tcode = TREE_CODE (t);
6059 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6060 > GET_MODE_SIZE (TYPE_MODE (type)))
6061 ? wide_type : type);
6063 int same_p = tcode == code;
6064 tree op0 = NULL_TREE, op1 = NULL_TREE;
6065 bool sub_strict_overflow_p;
6067 /* Don't deal with constants of zero here; they confuse the code below. */
6068 if (integer_zerop (c))
6071 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6072 op0 = TREE_OPERAND (t, 0);
6074 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6075 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6077 /* Note that we need not handle conditional operations here since fold
6078 already handles those cases. So just do arithmetic here. */
6082 /* For a constant, we can always simplify if we are a multiply
6083 or (for divide and modulus) if it is a multiple of our constant. */
6084 if (code == MULT_EXPR
6085 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6086 return const_binop (code, fold_convert (ctype, t),
6087 fold_convert (ctype, c), 0);
6090 CASE_CONVERT: case NON_LVALUE_EXPR:
6091 /* If op0 is an expression ... */
6092 if ((COMPARISON_CLASS_P (op0)
6093 || UNARY_CLASS_P (op0)
6094 || BINARY_CLASS_P (op0)
6095 || VL_EXP_CLASS_P (op0)
6096 || EXPRESSION_CLASS_P (op0))
6097 /* ... and has wrapping overflow, and its type is smaller
6098 than ctype, then we cannot pass through as widening. */
6099 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6100 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6101 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6102 && (TYPE_PRECISION (ctype)
6103 > TYPE_PRECISION (TREE_TYPE (op0))))
6104 /* ... or this is a truncation (t is narrower than op0),
6105 then we cannot pass through this narrowing. */
6106 || (TYPE_PRECISION (type)
6107 < TYPE_PRECISION (TREE_TYPE (op0)))
6108 /* ... or signedness changes for division or modulus,
6109 then we cannot pass through this conversion. */
6110 || (code != MULT_EXPR
6111 && (TYPE_UNSIGNED (ctype)
6112 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6113 /* ... or has undefined overflow while the converted to
6114 type has not, we cannot do the operation in the inner type
6115 as that would introduce undefined overflow. */
6116 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6117 && !TYPE_OVERFLOW_UNDEFINED (type))))
6120 /* Pass the constant down and see if we can make a simplification. If
6121 we can, replace this expression with the inner simplification for
6122 possible later conversion to our or some other type. */
6123 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6124 && TREE_CODE (t2) == INTEGER_CST
6125 && !TREE_OVERFLOW (t2)
6126 && (0 != (t1 = extract_muldiv (op0, t2, code,
6128 ? ctype : NULL_TREE,
6129 strict_overflow_p))))
6134 /* If widening the type changes it from signed to unsigned, then we
6135 must avoid building ABS_EXPR itself as unsigned. */
6136 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6138 tree cstype = (*signed_type_for) (ctype);
6139 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6142 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6143 return fold_convert (ctype, t1);
6147 /* If the constant is negative, we cannot simplify this. */
6148 if (tree_int_cst_sgn (c) == -1)
6152 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6154 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6157 case MIN_EXPR: case MAX_EXPR:
6158 /* If widening the type changes the signedness, then we can't perform
6159 this optimization as that changes the result. */
6160 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6163 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6164 sub_strict_overflow_p = false;
6165 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6166 &sub_strict_overflow_p)) != 0
6167 && (t2 = extract_muldiv (op1, c, code, wide_type,
6168 &sub_strict_overflow_p)) != 0)
6170 if (tree_int_cst_sgn (c) < 0)
6171 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6172 if (sub_strict_overflow_p)
6173 *strict_overflow_p = true;
6174 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6175 fold_convert (ctype, t2));
6179 case LSHIFT_EXPR: case RSHIFT_EXPR:
6180 /* If the second operand is constant, this is a multiplication
6181 or floor division, by a power of two, so we can treat it that
6182 way unless the multiplier or divisor overflows. Signed
6183 left-shift overflow is implementation-defined rather than
6184 undefined in C90, so do not convert signed left shift into
6186 if (TREE_CODE (op1) == INTEGER_CST
6187 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6188 /* const_binop may not detect overflow correctly,
6189 so check for it explicitly here. */
6190 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6191 && TREE_INT_CST_HIGH (op1) == 0
6192 && 0 != (t1 = fold_convert (ctype,
6193 const_binop (LSHIFT_EXPR,
6196 && !TREE_OVERFLOW (t1))
6197 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6198 ? MULT_EXPR : FLOOR_DIV_EXPR,
6199 ctype, fold_convert (ctype, op0), t1),
6200 c, code, wide_type, strict_overflow_p);
6203 case PLUS_EXPR: case MINUS_EXPR:
6204 /* See if we can eliminate the operation on both sides. If we can, we
6205 can return a new PLUS or MINUS. If we can't, the only remaining
6206 cases where we can do anything are if the second operand is a
6208 sub_strict_overflow_p = false;
6209 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6210 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6211 if (t1 != 0 && t2 != 0
6212 && (code == MULT_EXPR
6213 /* If not multiplication, we can only do this if both operands
6214 are divisible by c. */
6215 || (multiple_of_p (ctype, op0, c)
6216 && multiple_of_p (ctype, op1, c))))
6218 if (sub_strict_overflow_p)
6219 *strict_overflow_p = true;
6220 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6221 fold_convert (ctype, t2));
6224 /* If this was a subtraction, negate OP1 and set it to be an addition.
6225 This simplifies the logic below. */
6226 if (tcode == MINUS_EXPR)
6227 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6229 if (TREE_CODE (op1) != INTEGER_CST)
6232 /* If either OP1 or C are negative, this optimization is not safe for
6233 some of the division and remainder types while for others we need
6234 to change the code. */
6235 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6237 if (code == CEIL_DIV_EXPR)
6238 code = FLOOR_DIV_EXPR;
6239 else if (code == FLOOR_DIV_EXPR)
6240 code = CEIL_DIV_EXPR;
6241 else if (code != MULT_EXPR
6242 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6246 /* If it's a multiply or a division/modulus operation of a multiple
6247 of our constant, do the operation and verify it doesn't overflow. */
6248 if (code == MULT_EXPR
6249 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6251 op1 = const_binop (code, fold_convert (ctype, op1),
6252 fold_convert (ctype, c), 0);
6253 /* We allow the constant to overflow with wrapping semantics. */
6255 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6261 /* If we have an unsigned type is not a sizetype, we cannot widen
6262 the operation since it will change the result if the original
6263 computation overflowed. */
6264 if (TYPE_UNSIGNED (ctype)
6265 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6269 /* If we were able to eliminate our operation from the first side,
6270 apply our operation to the second side and reform the PLUS. */
6271 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6272 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6274 /* The last case is if we are a multiply. In that case, we can
6275 apply the distributive law to commute the multiply and addition
6276 if the multiplication of the constants doesn't overflow. */
6277 if (code == MULT_EXPR)
6278 return fold_build2 (tcode, ctype,
6279 fold_build2 (code, ctype,
6280 fold_convert (ctype, op0),
6281 fold_convert (ctype, c)),
6287 /* We have a special case here if we are doing something like
6288 (C * 8) % 4 since we know that's zero. */
6289 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6290 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6291 /* If the multiplication can overflow we cannot optimize this.
6292 ??? Until we can properly mark individual operations as
6293 not overflowing we need to treat sizetype special here as
6294 stor-layout relies on this opimization to make
6295 DECL_FIELD_BIT_OFFSET always a constant. */
6296 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6297 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6298 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6299 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6300 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6302 *strict_overflow_p = true;
6303 return omit_one_operand (type, integer_zero_node, op0);
6306 /* ... fall through ... */
6308 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6309 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6310 /* If we can extract our operation from the LHS, do so and return a
6311 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6312 do something only if the second operand is a constant. */
6314 && (t1 = extract_muldiv (op0, c, code, wide_type,
6315 strict_overflow_p)) != 0)
6316 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6317 fold_convert (ctype, op1));
6318 else if (tcode == MULT_EXPR && code == MULT_EXPR
6319 && (t1 = extract_muldiv (op1, c, code, wide_type,
6320 strict_overflow_p)) != 0)
6321 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6322 fold_convert (ctype, t1));
6323 else if (TREE_CODE (op1) != INTEGER_CST)
6326 /* If these are the same operation types, we can associate them
6327 assuming no overflow. */
6329 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6330 fold_convert (ctype, c), 1))
6331 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6332 TREE_INT_CST_HIGH (t1),
6333 (TYPE_UNSIGNED (ctype)
6334 && tcode != MULT_EXPR) ? -1 : 1,
6335 TREE_OVERFLOW (t1)))
6336 && !TREE_OVERFLOW (t1))
6337 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6339 /* If these operations "cancel" each other, we have the main
6340 optimizations of this pass, which occur when either constant is a
6341 multiple of the other, in which case we replace this with either an
6342 operation or CODE or TCODE.
6344 If we have an unsigned type that is not a sizetype, we cannot do
6345 this since it will change the result if the original computation
6347 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6348 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6349 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6350 || (tcode == MULT_EXPR
6351 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6352 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6353 && code != MULT_EXPR)))
6355 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6357 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6358 *strict_overflow_p = true;
6359 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6360 fold_convert (ctype,
6361 const_binop (TRUNC_DIV_EXPR,
6364 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6366 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6367 *strict_overflow_p = true;
6368 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6369 fold_convert (ctype,
6370 const_binop (TRUNC_DIV_EXPR,
6383 /* Return a node which has the indicated constant VALUE (either 0 or
6384 1), and is of the indicated TYPE. */
6387 constant_boolean_node (int value, tree type)
6389 if (type == integer_type_node)
6390 return value ? integer_one_node : integer_zero_node;
6391 else if (type == boolean_type_node)
6392 return value ? boolean_true_node : boolean_false_node;
6394 return build_int_cst (type, value);
6398 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6399 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6400 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6401 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6402 COND is the first argument to CODE; otherwise (as in the example
6403 given here), it is the second argument. TYPE is the type of the
6404 original expression. Return NULL_TREE if no simplification is
6408 fold_binary_op_with_conditional_arg (enum tree_code code,
6409 tree type, tree op0, tree op1,
6410 tree cond, tree arg, int cond_first_p)
6412 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6413 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6414 tree test, true_value, false_value;
6415 tree lhs = NULL_TREE;
6416 tree rhs = NULL_TREE;
6418 /* This transformation is only worthwhile if we don't have to wrap
6419 arg in a SAVE_EXPR, and the operation can be simplified on at least
6420 one of the branches once its pushed inside the COND_EXPR. */
6421 if (!TREE_CONSTANT (arg))
6424 if (TREE_CODE (cond) == COND_EXPR)
6426 test = TREE_OPERAND (cond, 0);
6427 true_value = TREE_OPERAND (cond, 1);
6428 false_value = TREE_OPERAND (cond, 2);
6429 /* If this operand throws an expression, then it does not make
6430 sense to try to perform a logical or arithmetic operation
6432 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6434 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6439 tree testtype = TREE_TYPE (cond);
6441 true_value = constant_boolean_node (true, testtype);
6442 false_value = constant_boolean_node (false, testtype);
6445 arg = fold_convert (arg_type, arg);
6448 true_value = fold_convert (cond_type, true_value);
6450 lhs = fold_build2 (code, type, true_value, arg);
6452 lhs = fold_build2 (code, type, arg, true_value);
6456 false_value = fold_convert (cond_type, false_value);
6458 rhs = fold_build2 (code, type, false_value, arg);
6460 rhs = fold_build2 (code, type, arg, false_value);
6463 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6464 return fold_convert (type, test);
6468 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6470 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6471 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6472 ADDEND is the same as X.
6474 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6475 and finite. The problematic cases are when X is zero, and its mode
6476 has signed zeros. In the case of rounding towards -infinity,
6477 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6478 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6481 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6483 if (!real_zerop (addend))
6486 /* Don't allow the fold with -fsignaling-nans. */
6487 if (HONOR_SNANS (TYPE_MODE (type)))
6490 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6491 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6494 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6495 if (TREE_CODE (addend) == REAL_CST
6496 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6499 /* The mode has signed zeros, and we have to honor their sign.
6500 In this situation, there is only one case we can return true for.
6501 X - 0 is the same as X unless rounding towards -infinity is
6503 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6506 /* Subroutine of fold() that checks comparisons of built-in math
6507 functions against real constants.
6509 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6510 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6511 is the type of the result and ARG0 and ARG1 are the operands of the
6512 comparison. ARG1 must be a TREE_REAL_CST.
6514 The function returns the constant folded tree if a simplification
6515 can be made, and NULL_TREE otherwise. */
6518 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6519 tree type, tree arg0, tree arg1)
6523 if (BUILTIN_SQRT_P (fcode))
6525 tree arg = CALL_EXPR_ARG (arg0, 0);
6526 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6528 c = TREE_REAL_CST (arg1);
6529 if (REAL_VALUE_NEGATIVE (c))
6531 /* sqrt(x) < y is always false, if y is negative. */
6532 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6533 return omit_one_operand (type, integer_zero_node, arg);
6535 /* sqrt(x) > y is always true, if y is negative and we
6536 don't care about NaNs, i.e. negative values of x. */
6537 if (code == NE_EXPR || !HONOR_NANS (mode))
6538 return omit_one_operand (type, integer_one_node, arg);
6540 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6541 return fold_build2 (GE_EXPR, type, arg,
6542 build_real (TREE_TYPE (arg), dconst0));
6544 else if (code == GT_EXPR || code == GE_EXPR)
6548 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6549 real_convert (&c2, mode, &c2);
6551 if (REAL_VALUE_ISINF (c2))
6553 /* sqrt(x) > y is x == +Inf, when y is very large. */
6554 if (HONOR_INFINITIES (mode))
6555 return fold_build2 (EQ_EXPR, type, arg,
6556 build_real (TREE_TYPE (arg), c2));
6558 /* sqrt(x) > y is always false, when y is very large
6559 and we don't care about infinities. */
6560 return omit_one_operand (type, integer_zero_node, arg);
6563 /* sqrt(x) > c is the same as x > c*c. */
6564 return fold_build2 (code, type, arg,
6565 build_real (TREE_TYPE (arg), c2));
6567 else if (code == LT_EXPR || code == LE_EXPR)
6571 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6572 real_convert (&c2, mode, &c2);
6574 if (REAL_VALUE_ISINF (c2))
6576 /* sqrt(x) < y is always true, when y is a very large
6577 value and we don't care about NaNs or Infinities. */
6578 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6579 return omit_one_operand (type, integer_one_node, arg);
6581 /* sqrt(x) < y is x != +Inf when y is very large and we
6582 don't care about NaNs. */
6583 if (! HONOR_NANS (mode))
6584 return fold_build2 (NE_EXPR, type, arg,
6585 build_real (TREE_TYPE (arg), c2));
6587 /* sqrt(x) < y is x >= 0 when y is very large and we
6588 don't care about Infinities. */
6589 if (! HONOR_INFINITIES (mode))
6590 return fold_build2 (GE_EXPR, type, arg,
6591 build_real (TREE_TYPE (arg), dconst0));
6593 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6594 if (lang_hooks.decls.global_bindings_p () != 0
6595 || CONTAINS_PLACEHOLDER_P (arg))
6598 arg = save_expr (arg);
6599 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6600 fold_build2 (GE_EXPR, type, arg,
6601 build_real (TREE_TYPE (arg),
6603 fold_build2 (NE_EXPR, type, arg,
6604 build_real (TREE_TYPE (arg),
6608 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6609 if (! HONOR_NANS (mode))
6610 return fold_build2 (code, type, arg,
6611 build_real (TREE_TYPE (arg), c2));
6613 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6614 if (lang_hooks.decls.global_bindings_p () == 0
6615 && ! CONTAINS_PLACEHOLDER_P (arg))
6617 arg = save_expr (arg);
6618 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6619 fold_build2 (GE_EXPR, type, arg,
6620 build_real (TREE_TYPE (arg),
6622 fold_build2 (code, type, arg,
6623 build_real (TREE_TYPE (arg),
6632 /* Subroutine of fold() that optimizes comparisons against Infinities,
6633 either +Inf or -Inf.
6635 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6636 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6637 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6639 The function returns the constant folded tree if a simplification
6640 can be made, and NULL_TREE otherwise. */
6643 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6645 enum machine_mode mode;
6646 REAL_VALUE_TYPE max;
6650 mode = TYPE_MODE (TREE_TYPE (arg0));
6652 /* For negative infinity swap the sense of the comparison. */
6653 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6655 code = swap_tree_comparison (code);
6660 /* x > +Inf is always false, if with ignore sNANs. */
6661 if (HONOR_SNANS (mode))
6663 return omit_one_operand (type, integer_zero_node, arg0);
6666 /* x <= +Inf is always true, if we don't case about NaNs. */
6667 if (! HONOR_NANS (mode))
6668 return omit_one_operand (type, integer_one_node, arg0);
6670 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6671 if (lang_hooks.decls.global_bindings_p () == 0
6672 && ! CONTAINS_PLACEHOLDER_P (arg0))
6674 arg0 = save_expr (arg0);
6675 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6681 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6682 real_maxval (&max, neg, mode);
6683 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6684 arg0, build_real (TREE_TYPE (arg0), max));
6687 /* x < +Inf is always equal to x <= DBL_MAX. */
6688 real_maxval (&max, neg, mode);
6689 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6690 arg0, build_real (TREE_TYPE (arg0), max));
6693 /* x != +Inf is always equal to !(x > DBL_MAX). */
6694 real_maxval (&max, neg, mode);
6695 if (! HONOR_NANS (mode))
6696 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6697 arg0, build_real (TREE_TYPE (arg0), max));
6699 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6700 arg0, build_real (TREE_TYPE (arg0), max));
6701 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6710 /* Subroutine of fold() that optimizes comparisons of a division by
6711 a nonzero integer constant against an integer constant, i.e.
6714 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6715 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6716 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6718 The function returns the constant folded tree if a simplification
6719 can be made, and NULL_TREE otherwise. */
6722 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6724 tree prod, tmp, hi, lo;
6725 tree arg00 = TREE_OPERAND (arg0, 0);
6726 tree arg01 = TREE_OPERAND (arg0, 1);
6727 unsigned HOST_WIDE_INT lpart;
6728 HOST_WIDE_INT hpart;
6729 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6733 /* We have to do this the hard way to detect unsigned overflow.
6734 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6735 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6736 TREE_INT_CST_HIGH (arg01),
6737 TREE_INT_CST_LOW (arg1),
6738 TREE_INT_CST_HIGH (arg1),
6739 &lpart, &hpart, unsigned_p);
6740 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6742 neg_overflow = false;
6746 tmp = int_const_binop (MINUS_EXPR, arg01,
6747 build_int_cst (TREE_TYPE (arg01), 1), 0);
6750 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6751 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6752 TREE_INT_CST_HIGH (prod),
6753 TREE_INT_CST_LOW (tmp),
6754 TREE_INT_CST_HIGH (tmp),
6755 &lpart, &hpart, unsigned_p);
6756 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6757 -1, overflow | TREE_OVERFLOW (prod));
6759 else if (tree_int_cst_sgn (arg01) >= 0)
6761 tmp = int_const_binop (MINUS_EXPR, arg01,
6762 build_int_cst (TREE_TYPE (arg01), 1), 0);
6763 switch (tree_int_cst_sgn (arg1))
6766 neg_overflow = true;
6767 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6772 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6777 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6787 /* A negative divisor reverses the relational operators. */
6788 code = swap_tree_comparison (code);
6790 tmp = int_const_binop (PLUS_EXPR, arg01,
6791 build_int_cst (TREE_TYPE (arg01), 1), 0);
6792 switch (tree_int_cst_sgn (arg1))
6795 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6800 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6805 neg_overflow = true;
6806 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6818 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6819 return omit_one_operand (type, integer_zero_node, arg00);
6820 if (TREE_OVERFLOW (hi))
6821 return fold_build2 (GE_EXPR, type, arg00, lo);
6822 if (TREE_OVERFLOW (lo))
6823 return fold_build2 (LE_EXPR, type, arg00, hi);
6824 return build_range_check (type, arg00, 1, lo, hi);
6827 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6828 return omit_one_operand (type, integer_one_node, arg00);
6829 if (TREE_OVERFLOW (hi))
6830 return fold_build2 (LT_EXPR, type, arg00, lo);
6831 if (TREE_OVERFLOW (lo))
6832 return fold_build2 (GT_EXPR, type, arg00, hi);
6833 return build_range_check (type, arg00, 0, lo, hi);
6836 if (TREE_OVERFLOW (lo))
6838 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6839 return omit_one_operand (type, tmp, arg00);
6841 return fold_build2 (LT_EXPR, type, arg00, lo);
6844 if (TREE_OVERFLOW (hi))
6846 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6847 return omit_one_operand (type, tmp, arg00);
6849 return fold_build2 (LE_EXPR, type, arg00, hi);
6852 if (TREE_OVERFLOW (hi))
6854 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6855 return omit_one_operand (type, tmp, arg00);
6857 return fold_build2 (GT_EXPR, type, arg00, hi);
6860 if (TREE_OVERFLOW (lo))
6862 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6863 return omit_one_operand (type, tmp, arg00);
6865 return fold_build2 (GE_EXPR, type, arg00, lo);
6875 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6876 equality/inequality test, then return a simplified form of the test
6877 using a sign testing. Otherwise return NULL. TYPE is the desired
6881 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6884 /* If this is testing a single bit, we can optimize the test. */
6885 if ((code == NE_EXPR || code == EQ_EXPR)
6886 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6887 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6889 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6890 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6891 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6893 if (arg00 != NULL_TREE
6894 /* This is only a win if casting to a signed type is cheap,
6895 i.e. when arg00's type is not a partial mode. */
6896 && TYPE_PRECISION (TREE_TYPE (arg00))
6897 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6899 tree stype = signed_type_for (TREE_TYPE (arg00));
6900 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6901 result_type, fold_convert (stype, arg00),
6902 build_int_cst (stype, 0));
6909 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6910 equality/inequality test, then return a simplified form of
6911 the test using shifts and logical operations. Otherwise return
6912 NULL. TYPE is the desired result type. */
6915 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6918 /* If this is testing a single bit, we can optimize the test. */
6919 if ((code == NE_EXPR || code == EQ_EXPR)
6920 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6921 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6923 tree inner = TREE_OPERAND (arg0, 0);
6924 tree type = TREE_TYPE (arg0);
6925 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6926 enum machine_mode operand_mode = TYPE_MODE (type);
6928 tree signed_type, unsigned_type, intermediate_type;
6931 /* First, see if we can fold the single bit test into a sign-bit
6933 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6938 /* Otherwise we have (A & C) != 0 where C is a single bit,
6939 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6940 Similarly for (A & C) == 0. */
6942 /* If INNER is a right shift of a constant and it plus BITNUM does
6943 not overflow, adjust BITNUM and INNER. */
6944 if (TREE_CODE (inner) == RSHIFT_EXPR
6945 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6946 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6947 && bitnum < TYPE_PRECISION (type)
6948 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6949 bitnum - TYPE_PRECISION (type)))
6951 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6952 inner = TREE_OPERAND (inner, 0);
6955 /* If we are going to be able to omit the AND below, we must do our
6956 operations as unsigned. If we must use the AND, we have a choice.
6957 Normally unsigned is faster, but for some machines signed is. */
6958 #ifdef LOAD_EXTEND_OP
6959 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6960 && !flag_syntax_only) ? 0 : 1;
6965 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6966 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6967 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6968 inner = fold_convert (intermediate_type, inner);
6971 inner = build2 (RSHIFT_EXPR, intermediate_type,
6972 inner, size_int (bitnum));
6974 one = build_int_cst (intermediate_type, 1);
6976 if (code == EQ_EXPR)
6977 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6979 /* Put the AND last so it can combine with more things. */
6980 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6982 /* Make sure to return the proper type. */
6983 inner = fold_convert (result_type, inner);
6990 /* Check whether we are allowed to reorder operands arg0 and arg1,
6991 such that the evaluation of arg1 occurs before arg0. */
6994 reorder_operands_p (const_tree arg0, const_tree arg1)
6996 if (! flag_evaluation_order)
6998 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7000 return ! TREE_SIDE_EFFECTS (arg0)
7001 && ! TREE_SIDE_EFFECTS (arg1);
7004 /* Test whether it is preferable two swap two operands, ARG0 and
7005 ARG1, for example because ARG0 is an integer constant and ARG1
7006 isn't. If REORDER is true, only recommend swapping if we can
7007 evaluate the operands in reverse order. */
7010 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7012 STRIP_SIGN_NOPS (arg0);
7013 STRIP_SIGN_NOPS (arg1);
7015 if (TREE_CODE (arg1) == INTEGER_CST)
7017 if (TREE_CODE (arg0) == INTEGER_CST)
7020 if (TREE_CODE (arg1) == REAL_CST)
7022 if (TREE_CODE (arg0) == REAL_CST)
7025 if (TREE_CODE (arg1) == FIXED_CST)
7027 if (TREE_CODE (arg0) == FIXED_CST)
7030 if (TREE_CODE (arg1) == COMPLEX_CST)
7032 if (TREE_CODE (arg0) == COMPLEX_CST)
7035 if (TREE_CONSTANT (arg1))
7037 if (TREE_CONSTANT (arg0))
7040 if (optimize_function_for_size_p (cfun))
7043 if (reorder && flag_evaluation_order
7044 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7047 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7048 for commutative and comparison operators. Ensuring a canonical
7049 form allows the optimizers to find additional redundancies without
7050 having to explicitly check for both orderings. */
7051 if (TREE_CODE (arg0) == SSA_NAME
7052 && TREE_CODE (arg1) == SSA_NAME
7053 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7056 /* Put SSA_NAMEs last. */
7057 if (TREE_CODE (arg1) == SSA_NAME)
7059 if (TREE_CODE (arg0) == SSA_NAME)
7062 /* Put variables last. */
7071 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7072 ARG0 is extended to a wider type. */
7075 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7077 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7079 tree shorter_type, outer_type;
7083 if (arg0_unw == arg0)
7085 shorter_type = TREE_TYPE (arg0_unw);
7087 #ifdef HAVE_canonicalize_funcptr_for_compare
7088 /* Disable this optimization if we're casting a function pointer
7089 type on targets that require function pointer canonicalization. */
7090 if (HAVE_canonicalize_funcptr_for_compare
7091 && TREE_CODE (shorter_type) == POINTER_TYPE
7092 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7096 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7099 arg1_unw = get_unwidened (arg1, NULL_TREE);
7101 /* If possible, express the comparison in the shorter mode. */
7102 if ((code == EQ_EXPR || code == NE_EXPR
7103 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7104 && (TREE_TYPE (arg1_unw) == shorter_type
7105 || ((TYPE_PRECISION (shorter_type)
7106 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7107 && (TYPE_UNSIGNED (shorter_type)
7108 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7109 || (TREE_CODE (arg1_unw) == INTEGER_CST
7110 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7111 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7112 && int_fits_type_p (arg1_unw, shorter_type))))
7113 return fold_build2 (code, type, arg0_unw,
7114 fold_convert (shorter_type, arg1_unw));
7116 if (TREE_CODE (arg1_unw) != INTEGER_CST
7117 || TREE_CODE (shorter_type) != INTEGER_TYPE
7118 || !int_fits_type_p (arg1_unw, shorter_type))
7121 /* If we are comparing with the integer that does not fit into the range
7122 of the shorter type, the result is known. */
7123 outer_type = TREE_TYPE (arg1_unw);
7124 min = lower_bound_in_type (outer_type, shorter_type);
7125 max = upper_bound_in_type (outer_type, shorter_type);
7127 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7129 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7136 return omit_one_operand (type, integer_zero_node, arg0);
7141 return omit_one_operand (type, integer_one_node, arg0);
7147 return omit_one_operand (type, integer_one_node, arg0);
7149 return omit_one_operand (type, integer_zero_node, arg0);
7154 return omit_one_operand (type, integer_zero_node, arg0);
7156 return omit_one_operand (type, integer_one_node, arg0);
7165 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7166 ARG0 just the signedness is changed. */
7169 fold_sign_changed_comparison (enum tree_code code, tree type,
7170 tree arg0, tree arg1)
7173 tree inner_type, outer_type;
7175 if (!CONVERT_EXPR_P (arg0))
7178 outer_type = TREE_TYPE (arg0);
7179 arg0_inner = TREE_OPERAND (arg0, 0);
7180 inner_type = TREE_TYPE (arg0_inner);
7182 #ifdef HAVE_canonicalize_funcptr_for_compare
7183 /* Disable this optimization if we're casting a function pointer
7184 type on targets that require function pointer canonicalization. */
7185 if (HAVE_canonicalize_funcptr_for_compare
7186 && TREE_CODE (inner_type) == POINTER_TYPE
7187 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7191 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7194 /* If the conversion is from an integral subtype to its basetype
7196 if (TREE_TYPE (inner_type) == outer_type)
7199 if (TREE_CODE (arg1) != INTEGER_CST
7200 && !(CONVERT_EXPR_P (arg1)
7201 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7204 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7205 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7210 if (TREE_CODE (arg1) == INTEGER_CST)
7211 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7212 TREE_INT_CST_HIGH (arg1), 0,
7213 TREE_OVERFLOW (arg1));
7215 arg1 = fold_convert (inner_type, arg1);
7217 return fold_build2 (code, type, arg0_inner, arg1);
7220 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7221 step of the array. Reconstructs s and delta in the case of s * delta
7222 being an integer constant (and thus already folded).
7223 ADDR is the address. MULT is the multiplicative expression.
7224 If the function succeeds, the new address expression is returned. Otherwise
7225 NULL_TREE is returned. */
7228 try_move_mult_to_index (tree addr, tree op1)
7230 tree s, delta, step;
7231 tree ref = TREE_OPERAND (addr, 0), pref;
7236 /* Strip the nops that might be added when converting op1 to sizetype. */
7239 /* Canonicalize op1 into a possibly non-constant delta
7240 and an INTEGER_CST s. */
7241 if (TREE_CODE (op1) == MULT_EXPR)
7243 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7248 if (TREE_CODE (arg0) == INTEGER_CST)
7253 else if (TREE_CODE (arg1) == INTEGER_CST)
7261 else if (TREE_CODE (op1) == INTEGER_CST)
7268 /* Simulate we are delta * 1. */
7270 s = integer_one_node;
7273 for (;; ref = TREE_OPERAND (ref, 0))
7275 if (TREE_CODE (ref) == ARRAY_REF)
7277 /* Remember if this was a multi-dimensional array. */
7278 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7281 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7285 step = array_ref_element_size (ref);
7286 if (TREE_CODE (step) != INTEGER_CST)
7291 if (! tree_int_cst_equal (step, s))
7296 /* Try if delta is a multiple of step. */
7297 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7303 /* Only fold here if we can verify we do not overflow one
7304 dimension of a multi-dimensional array. */
7309 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7310 || !INTEGRAL_TYPE_P (itype)
7311 || !TYPE_MAX_VALUE (itype)
7312 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7315 tmp = fold_binary (PLUS_EXPR, itype,
7316 fold_convert (itype,
7317 TREE_OPERAND (ref, 1)),
7318 fold_convert (itype, delta));
7320 || TREE_CODE (tmp) != INTEGER_CST
7321 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7330 if (!handled_component_p (ref))
7334 /* We found the suitable array reference. So copy everything up to it,
7335 and replace the index. */
7337 pref = TREE_OPERAND (addr, 0);
7338 ret = copy_node (pref);
7343 pref = TREE_OPERAND (pref, 0);
7344 TREE_OPERAND (pos, 0) = copy_node (pref);
7345 pos = TREE_OPERAND (pos, 0);
7348 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7349 fold_convert (itype,
7350 TREE_OPERAND (pos, 1)),
7351 fold_convert (itype, delta));
7353 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7357 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7358 means A >= Y && A != MAX, but in this case we know that
7359 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7362 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7364 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7366 if (TREE_CODE (bound) == LT_EXPR)
7367 a = TREE_OPERAND (bound, 0);
7368 else if (TREE_CODE (bound) == GT_EXPR)
7369 a = TREE_OPERAND (bound, 1);
7373 typea = TREE_TYPE (a);
7374 if (!INTEGRAL_TYPE_P (typea)
7375 && !POINTER_TYPE_P (typea))
7378 if (TREE_CODE (ineq) == LT_EXPR)
7380 a1 = TREE_OPERAND (ineq, 1);
7381 y = TREE_OPERAND (ineq, 0);
7383 else if (TREE_CODE (ineq) == GT_EXPR)
7385 a1 = TREE_OPERAND (ineq, 0);
7386 y = TREE_OPERAND (ineq, 1);
7391 if (TREE_TYPE (a1) != typea)
7394 if (POINTER_TYPE_P (typea))
7396 /* Convert the pointer types into integer before taking the difference. */
7397 tree ta = fold_convert (ssizetype, a);
7398 tree ta1 = fold_convert (ssizetype, a1);
7399 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7402 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7404 if (!diff || !integer_onep (diff))
7407 return fold_build2 (GE_EXPR, type, a, y);
7410 /* Fold a sum or difference of at least one multiplication.
7411 Returns the folded tree or NULL if no simplification could be made. */
7414 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7416 tree arg00, arg01, arg10, arg11;
7417 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7419 /* (A * C) +- (B * C) -> (A+-B) * C.
7420 (A * C) +- A -> A * (C+-1).
7421 We are most concerned about the case where C is a constant,
7422 but other combinations show up during loop reduction. Since
7423 it is not difficult, try all four possibilities. */
7425 if (TREE_CODE (arg0) == MULT_EXPR)
7427 arg00 = TREE_OPERAND (arg0, 0);
7428 arg01 = TREE_OPERAND (arg0, 1);
7430 else if (TREE_CODE (arg0) == INTEGER_CST)
7432 arg00 = build_one_cst (type);
7437 /* We cannot generate constant 1 for fract. */
7438 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7441 arg01 = build_one_cst (type);
7443 if (TREE_CODE (arg1) == MULT_EXPR)
7445 arg10 = TREE_OPERAND (arg1, 0);
7446 arg11 = TREE_OPERAND (arg1, 1);
7448 else if (TREE_CODE (arg1) == INTEGER_CST)
7450 arg10 = build_one_cst (type);
7451 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7452 the purpose of this canonicalization. */
7453 if (TREE_INT_CST_HIGH (arg1) == -1
7454 && negate_expr_p (arg1)
7455 && code == PLUS_EXPR)
7457 arg11 = negate_expr (arg1);
7465 /* We cannot generate constant 1 for fract. */
7466 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7469 arg11 = build_one_cst (type);
7473 if (operand_equal_p (arg01, arg11, 0))
7474 same = arg01, alt0 = arg00, alt1 = arg10;
7475 else if (operand_equal_p (arg00, arg10, 0))
7476 same = arg00, alt0 = arg01, alt1 = arg11;
7477 else if (operand_equal_p (arg00, arg11, 0))
7478 same = arg00, alt0 = arg01, alt1 = arg10;
7479 else if (operand_equal_p (arg01, arg10, 0))
7480 same = arg01, alt0 = arg00, alt1 = arg11;
7482 /* No identical multiplicands; see if we can find a common
7483 power-of-two factor in non-power-of-two multiplies. This
7484 can help in multi-dimensional array access. */
7485 else if (host_integerp (arg01, 0)
7486 && host_integerp (arg11, 0))
7488 HOST_WIDE_INT int01, int11, tmp;
7491 int01 = TREE_INT_CST_LOW (arg01);
7492 int11 = TREE_INT_CST_LOW (arg11);
7494 /* Move min of absolute values to int11. */
7495 if ((int01 >= 0 ? int01 : -int01)
7496 < (int11 >= 0 ? int11 : -int11))
7498 tmp = int01, int01 = int11, int11 = tmp;
7499 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7506 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7508 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7509 build_int_cst (TREE_TYPE (arg00),
7514 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7519 return fold_build2 (MULT_EXPR, type,
7520 fold_build2 (code, type,
7521 fold_convert (type, alt0),
7522 fold_convert (type, alt1)),
7523 fold_convert (type, same));
7528 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7529 specified by EXPR into the buffer PTR of length LEN bytes.
7530 Return the number of bytes placed in the buffer, or zero
7534 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7536 tree type = TREE_TYPE (expr);
7537 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7538 int byte, offset, word, words;
7539 unsigned char value;
7541 if (total_bytes > len)
7543 words = total_bytes / UNITS_PER_WORD;
7545 for (byte = 0; byte < total_bytes; byte++)
7547 int bitpos = byte * BITS_PER_UNIT;
7548 if (bitpos < HOST_BITS_PER_WIDE_INT)
7549 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7551 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7552 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7554 if (total_bytes > UNITS_PER_WORD)
7556 word = byte / UNITS_PER_WORD;
7557 if (WORDS_BIG_ENDIAN)
7558 word = (words - 1) - word;
7559 offset = word * UNITS_PER_WORD;
7560 if (BYTES_BIG_ENDIAN)
7561 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7563 offset += byte % UNITS_PER_WORD;
7566 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7567 ptr[offset] = value;
7573 /* Subroutine of native_encode_expr. Encode the REAL_CST
7574 specified by EXPR into the buffer PTR of length LEN bytes.
7575 Return the number of bytes placed in the buffer, or zero
7579 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7581 tree type = TREE_TYPE (expr);
7582 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7583 int byte, offset, word, words, bitpos;
7584 unsigned char value;
7586 /* There are always 32 bits in each long, no matter the size of
7587 the hosts long. We handle floating point representations with
7591 if (total_bytes > len)
7593 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7595 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7597 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7598 bitpos += BITS_PER_UNIT)
7600 byte = (bitpos / BITS_PER_UNIT) & 3;
7601 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7603 if (UNITS_PER_WORD < 4)
7605 word = byte / UNITS_PER_WORD;
7606 if (WORDS_BIG_ENDIAN)
7607 word = (words - 1) - word;
7608 offset = word * UNITS_PER_WORD;
7609 if (BYTES_BIG_ENDIAN)
7610 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7612 offset += byte % UNITS_PER_WORD;
7615 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7616 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7621 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7622 specified by EXPR into the buffer PTR of length LEN bytes.
7623 Return the number of bytes placed in the buffer, or zero
7627 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7632 part = TREE_REALPART (expr);
7633 rsize = native_encode_expr (part, ptr, len);
7636 part = TREE_IMAGPART (expr);
7637 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7640 return rsize + isize;
7644 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7645 specified by EXPR into the buffer PTR of length LEN bytes.
7646 Return the number of bytes placed in the buffer, or zero
7650 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7652 int i, size, offset, count;
7653 tree itype, elem, elements;
7656 elements = TREE_VECTOR_CST_ELTS (expr);
7657 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7658 itype = TREE_TYPE (TREE_TYPE (expr));
7659 size = GET_MODE_SIZE (TYPE_MODE (itype));
7660 for (i = 0; i < count; i++)
7664 elem = TREE_VALUE (elements);
7665 elements = TREE_CHAIN (elements);
7672 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7677 if (offset + size > len)
7679 memset (ptr+offset, 0, size);
7687 /* Subroutine of native_encode_expr. Encode the STRING_CST
7688 specified by EXPR into the buffer PTR of length LEN bytes.
7689 Return the number of bytes placed in the buffer, or zero
7693 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7695 tree type = TREE_TYPE (expr);
7696 HOST_WIDE_INT total_bytes;
7698 if (TREE_CODE (type) != ARRAY_TYPE
7699 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7700 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7701 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7703 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7704 if (total_bytes > len)
7706 if (TREE_STRING_LENGTH (expr) < total_bytes)
7708 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7709 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7710 total_bytes - TREE_STRING_LENGTH (expr));
7713 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7718 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7719 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7720 buffer PTR of length LEN bytes. Return the number of bytes
7721 placed in the buffer, or zero upon failure. */
7724 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7726 switch (TREE_CODE (expr))
7729 return native_encode_int (expr, ptr, len);
7732 return native_encode_real (expr, ptr, len);
7735 return native_encode_complex (expr, ptr, len);
7738 return native_encode_vector (expr, ptr, len);
7741 return native_encode_string (expr, ptr, len);
7749 /* Subroutine of native_interpret_expr. Interpret the contents of
7750 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7751 If the buffer cannot be interpreted, return NULL_TREE. */
7754 native_interpret_int (tree type, const unsigned char *ptr, int len)
7756 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7757 int byte, offset, word, words;
7758 unsigned char value;
7759 unsigned int HOST_WIDE_INT lo = 0;
7760 HOST_WIDE_INT hi = 0;
7762 if (total_bytes > len)
7764 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7766 words = total_bytes / UNITS_PER_WORD;
7768 for (byte = 0; byte < total_bytes; byte++)
7770 int bitpos = byte * BITS_PER_UNIT;
7771 if (total_bytes > UNITS_PER_WORD)
7773 word = byte / UNITS_PER_WORD;
7774 if (WORDS_BIG_ENDIAN)
7775 word = (words - 1) - word;
7776 offset = word * UNITS_PER_WORD;
7777 if (BYTES_BIG_ENDIAN)
7778 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7780 offset += byte % UNITS_PER_WORD;
7783 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7784 value = ptr[offset];
7786 if (bitpos < HOST_BITS_PER_WIDE_INT)
7787 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7789 hi |= (unsigned HOST_WIDE_INT) value
7790 << (bitpos - HOST_BITS_PER_WIDE_INT);
7793 return build_int_cst_wide_type (type, lo, hi);
7797 /* Subroutine of native_interpret_expr. Interpret the contents of
7798 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7799 If the buffer cannot be interpreted, return NULL_TREE. */
7802 native_interpret_real (tree type, const unsigned char *ptr, int len)
7804 enum machine_mode mode = TYPE_MODE (type);
7805 int total_bytes = GET_MODE_SIZE (mode);
7806 int byte, offset, word, words, bitpos;
7807 unsigned char value;
7808 /* There are always 32 bits in each long, no matter the size of
7809 the hosts long. We handle floating point representations with
7814 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7815 if (total_bytes > len || total_bytes > 24)
7817 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7819 memset (tmp, 0, sizeof (tmp));
7820 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7821 bitpos += BITS_PER_UNIT)
7823 byte = (bitpos / BITS_PER_UNIT) & 3;
7824 if (UNITS_PER_WORD < 4)
7826 word = byte / UNITS_PER_WORD;
7827 if (WORDS_BIG_ENDIAN)
7828 word = (words - 1) - word;
7829 offset = word * UNITS_PER_WORD;
7830 if (BYTES_BIG_ENDIAN)
7831 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7833 offset += byte % UNITS_PER_WORD;
7836 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7837 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7839 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7842 real_from_target (&r, tmp, mode);
7843 return build_real (type, r);
7847 /* Subroutine of native_interpret_expr. Interpret the contents of
7848 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7849 If the buffer cannot be interpreted, return NULL_TREE. */
7852 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7854 tree etype, rpart, ipart;
7857 etype = TREE_TYPE (type);
7858 size = GET_MODE_SIZE (TYPE_MODE (etype));
7861 rpart = native_interpret_expr (etype, ptr, size);
7864 ipart = native_interpret_expr (etype, ptr+size, size);
7867 return build_complex (type, rpart, ipart);
7871 /* Subroutine of native_interpret_expr. Interpret the contents of
7872 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7873 If the buffer cannot be interpreted, return NULL_TREE. */
7876 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7878 tree etype, elem, elements;
7881 etype = TREE_TYPE (type);
7882 size = GET_MODE_SIZE (TYPE_MODE (etype));
7883 count = TYPE_VECTOR_SUBPARTS (type);
7884 if (size * count > len)
7887 elements = NULL_TREE;
7888 for (i = count - 1; i >= 0; i--)
7890 elem = native_interpret_expr (etype, ptr+(i*size), size);
7893 elements = tree_cons (NULL_TREE, elem, elements);
7895 return build_vector (type, elements);
7899 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7900 the buffer PTR of length LEN as a constant of type TYPE. For
7901 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7902 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7903 return NULL_TREE. */
7906 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7908 switch (TREE_CODE (type))
7913 return native_interpret_int (type, ptr, len);
7916 return native_interpret_real (type, ptr, len);
7919 return native_interpret_complex (type, ptr, len);
7922 return native_interpret_vector (type, ptr, len);
7930 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7931 TYPE at compile-time. If we're unable to perform the conversion
7932 return NULL_TREE. */
7935 fold_view_convert_expr (tree type, tree expr)
7937 /* We support up to 512-bit values (for V8DFmode). */
7938 unsigned char buffer[64];
7941 /* Check that the host and target are sane. */
7942 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7945 len = native_encode_expr (expr, buffer, sizeof (buffer));
7949 return native_interpret_expr (type, buffer, len);
7952 /* Build an expression for the address of T. Folds away INDIRECT_REF
7953 to avoid confusing the gimplify process. When IN_FOLD is true
7954 avoid modifications of T. */
7957 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7959 /* The size of the object is not relevant when talking about its address. */
7960 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7961 t = TREE_OPERAND (t, 0);
7963 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7964 if (TREE_CODE (t) == INDIRECT_REF
7965 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7967 t = TREE_OPERAND (t, 0);
7969 if (TREE_TYPE (t) != ptrtype)
7970 t = build1 (NOP_EXPR, ptrtype, t);
7976 while (handled_component_p (base))
7977 base = TREE_OPERAND (base, 0);
7980 TREE_ADDRESSABLE (base) = 1;
7982 t = build1 (ADDR_EXPR, ptrtype, t);
7985 t = build1 (ADDR_EXPR, ptrtype, t);
7990 /* Build an expression for the address of T with type PTRTYPE. This
7991 function modifies the input parameter 'T' by sometimes setting the
7992 TREE_ADDRESSABLE flag. */
7995 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7997 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
8000 /* Build an expression for the address of T. This function modifies
8001 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
8002 flag. When called from fold functions, use fold_addr_expr instead. */
8005 build_fold_addr_expr (tree t)
8007 return build_fold_addr_expr_with_type_1 (t,
8008 build_pointer_type (TREE_TYPE (t)),
8012 /* Same as build_fold_addr_expr, builds an expression for the address
8013 of T, but avoids touching the input node 't'. Fold functions
8014 should use this version. */
8017 fold_addr_expr (tree t)
8019 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8021 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
8024 /* Fold a unary expression of code CODE and type TYPE with operand
8025 OP0. Return the folded expression if folding is successful.
8026 Otherwise, return NULL_TREE. */
8029 fold_unary (enum tree_code code, tree type, tree op0)
8033 enum tree_code_class kind = TREE_CODE_CLASS (code);
8035 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8036 && TREE_CODE_LENGTH (code) == 1);
8041 if (CONVERT_EXPR_CODE_P (code)
8042 || code == FLOAT_EXPR || code == ABS_EXPR)
8044 /* Don't use STRIP_NOPS, because signedness of argument type
8046 STRIP_SIGN_NOPS (arg0);
8050 /* Strip any conversions that don't change the mode. This
8051 is safe for every expression, except for a comparison
8052 expression because its signedness is derived from its
8055 Note that this is done as an internal manipulation within
8056 the constant folder, in order to find the simplest
8057 representation of the arguments so that their form can be
8058 studied. In any cases, the appropriate type conversions
8059 should be put back in the tree that will get out of the
8065 if (TREE_CODE_CLASS (code) == tcc_unary)
8067 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8068 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8069 fold_build1 (code, type,
8070 fold_convert (TREE_TYPE (op0),
8071 TREE_OPERAND (arg0, 1))));
8072 else if (TREE_CODE (arg0) == COND_EXPR)
8074 tree arg01 = TREE_OPERAND (arg0, 1);
8075 tree arg02 = TREE_OPERAND (arg0, 2);
8076 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8077 arg01 = fold_build1 (code, type,
8078 fold_convert (TREE_TYPE (op0), arg01));
8079 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8080 arg02 = fold_build1 (code, type,
8081 fold_convert (TREE_TYPE (op0), arg02));
8082 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8085 /* If this was a conversion, and all we did was to move into
8086 inside the COND_EXPR, bring it back out. But leave it if
8087 it is a conversion from integer to integer and the
8088 result precision is no wider than a word since such a
8089 conversion is cheap and may be optimized away by combine,
8090 while it couldn't if it were outside the COND_EXPR. Then return
8091 so we don't get into an infinite recursion loop taking the
8092 conversion out and then back in. */
8094 if ((CONVERT_EXPR_CODE_P (code)
8095 || code == NON_LVALUE_EXPR)
8096 && TREE_CODE (tem) == COND_EXPR
8097 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8098 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8099 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8100 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8101 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8102 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8103 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8105 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8106 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8107 || flag_syntax_only))
8108 tem = build1 (code, type,
8110 TREE_TYPE (TREE_OPERAND
8111 (TREE_OPERAND (tem, 1), 0)),
8112 TREE_OPERAND (tem, 0),
8113 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8114 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8117 else if (COMPARISON_CLASS_P (arg0))
8119 if (TREE_CODE (type) == BOOLEAN_TYPE)
8121 arg0 = copy_node (arg0);
8122 TREE_TYPE (arg0) = type;
8125 else if (TREE_CODE (type) != INTEGER_TYPE)
8126 return fold_build3 (COND_EXPR, type, arg0,
8127 fold_build1 (code, type,
8129 fold_build1 (code, type,
8130 integer_zero_node));
8137 /* Re-association barriers around constants and other re-association
8138 barriers can be removed. */
8139 if (CONSTANT_CLASS_P (op0)
8140 || TREE_CODE (op0) == PAREN_EXPR)
8141 return fold_convert (type, op0);
8146 case FIX_TRUNC_EXPR:
8147 if (TREE_TYPE (op0) == type)
8150 /* If we have (type) (a CMP b) and type is an integral type, return
8151 new expression involving the new type. */
8152 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8153 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8154 TREE_OPERAND (op0, 1));
8156 /* Handle cases of two conversions in a row. */
8157 if (CONVERT_EXPR_P (op0))
8159 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8160 tree inter_type = TREE_TYPE (op0);
8161 int inside_int = INTEGRAL_TYPE_P (inside_type);
8162 int inside_ptr = POINTER_TYPE_P (inside_type);
8163 int inside_float = FLOAT_TYPE_P (inside_type);
8164 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8165 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8166 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8167 int inter_int = INTEGRAL_TYPE_P (inter_type);
8168 int inter_ptr = POINTER_TYPE_P (inter_type);
8169 int inter_float = FLOAT_TYPE_P (inter_type);
8170 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8171 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8172 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8173 int final_int = INTEGRAL_TYPE_P (type);
8174 int final_ptr = POINTER_TYPE_P (type);
8175 int final_float = FLOAT_TYPE_P (type);
8176 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8177 unsigned int final_prec = TYPE_PRECISION (type);
8178 int final_unsignedp = TYPE_UNSIGNED (type);
8180 /* In addition to the cases of two conversions in a row
8181 handled below, if we are converting something to its own
8182 type via an object of identical or wider precision, neither
8183 conversion is needed. */
8184 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8185 && (((inter_int || inter_ptr) && final_int)
8186 || (inter_float && final_float))
8187 && inter_prec >= final_prec)
8188 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8190 /* Likewise, if the intermediate and initial types are either both
8191 float or both integer, we don't need the middle conversion if the
8192 former is wider than the latter and doesn't change the signedness
8193 (for integers). Avoid this if the final type is a pointer since
8194 then we sometimes need the middle conversion. Likewise if the
8195 final type has a precision not equal to the size of its mode. */
8196 if (((inter_int && inside_int)
8197 || (inter_float && inside_float)
8198 || (inter_vec && inside_vec))
8199 && inter_prec >= inside_prec
8200 && (inter_float || inter_vec
8201 || inter_unsignedp == inside_unsignedp)
8202 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8203 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8205 && (! final_vec || inter_prec == inside_prec))
8206 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8208 /* If we have a sign-extension of a zero-extended value, we can
8209 replace that by a single zero-extension. */
8210 if (inside_int && inter_int && final_int
8211 && inside_prec < inter_prec && inter_prec < final_prec
8212 && inside_unsignedp && !inter_unsignedp)
8213 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8215 /* Two conversions in a row are not needed unless:
8216 - some conversion is floating-point (overstrict for now), or
8217 - some conversion is a vector (overstrict for now), or
8218 - the intermediate type is narrower than both initial and
8220 - the intermediate type and innermost type differ in signedness,
8221 and the outermost type is wider than the intermediate, or
8222 - the initial type is a pointer type and the precisions of the
8223 intermediate and final types differ, or
8224 - the final type is a pointer type and the precisions of the
8225 initial and intermediate types differ. */
8226 if (! inside_float && ! inter_float && ! final_float
8227 && ! inside_vec && ! inter_vec && ! final_vec
8228 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8229 && ! (inside_int && inter_int
8230 && inter_unsignedp != inside_unsignedp
8231 && inter_prec < final_prec)
8232 && ((inter_unsignedp && inter_prec > inside_prec)
8233 == (final_unsignedp && final_prec > inter_prec))
8234 && ! (inside_ptr && inter_prec != final_prec)
8235 && ! (final_ptr && inside_prec != inter_prec)
8236 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8237 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8238 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8241 /* Handle (T *)&A.B.C for A being of type T and B and C
8242 living at offset zero. This occurs frequently in
8243 C++ upcasting and then accessing the base. */
8244 if (TREE_CODE (op0) == ADDR_EXPR
8245 && POINTER_TYPE_P (type)
8246 && handled_component_p (TREE_OPERAND (op0, 0)))
8248 HOST_WIDE_INT bitsize, bitpos;
8250 enum machine_mode mode;
8251 int unsignedp, volatilep;
8252 tree base = TREE_OPERAND (op0, 0);
8253 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8254 &mode, &unsignedp, &volatilep, false);
8255 /* If the reference was to a (constant) zero offset, we can use
8256 the address of the base if it has the same base type
8257 as the result type. */
8258 if (! offset && bitpos == 0
8259 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8260 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8261 return fold_convert (type, fold_addr_expr (base));
8264 if (TREE_CODE (op0) == MODIFY_EXPR
8265 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8266 /* Detect assigning a bitfield. */
8267 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8269 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8271 /* Don't leave an assignment inside a conversion
8272 unless assigning a bitfield. */
8273 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8274 /* First do the assignment, then return converted constant. */
8275 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8276 TREE_NO_WARNING (tem) = 1;
8277 TREE_USED (tem) = 1;
8281 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8282 constants (if x has signed type, the sign bit cannot be set
8283 in c). This folds extension into the BIT_AND_EXPR.
8284 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8285 very likely don't have maximal range for their precision and this
8286 transformation effectively doesn't preserve non-maximal ranges. */
8287 if (TREE_CODE (type) == INTEGER_TYPE
8288 && TREE_CODE (op0) == BIT_AND_EXPR
8289 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
8290 /* Not if the conversion is to the sub-type. */
8291 && TREE_TYPE (type) != TREE_TYPE (op0))
8294 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8297 if (TYPE_UNSIGNED (TREE_TYPE (and))
8298 || (TYPE_PRECISION (type)
8299 <= TYPE_PRECISION (TREE_TYPE (and))))
8301 else if (TYPE_PRECISION (TREE_TYPE (and1))
8302 <= HOST_BITS_PER_WIDE_INT
8303 && host_integerp (and1, 1))
8305 unsigned HOST_WIDE_INT cst;
8307 cst = tree_low_cst (and1, 1);
8308 cst &= (HOST_WIDE_INT) -1
8309 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8310 change = (cst == 0);
8311 #ifdef LOAD_EXTEND_OP
8313 && !flag_syntax_only
8314 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8317 tree uns = unsigned_type_for (TREE_TYPE (and0));
8318 and0 = fold_convert (uns, and0);
8319 and1 = fold_convert (uns, and1);
8325 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8326 TREE_INT_CST_HIGH (and1), 0,
8327 TREE_OVERFLOW (and1));
8328 return fold_build2 (BIT_AND_EXPR, type,
8329 fold_convert (type, and0), tem);
8333 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8334 when one of the new casts will fold away. Conservatively we assume
8335 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8336 if (POINTER_TYPE_P (type)
8337 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8338 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8339 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8340 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8342 tree arg00 = TREE_OPERAND (arg0, 0);
8343 tree arg01 = TREE_OPERAND (arg0, 1);
8345 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8346 fold_convert (sizetype, arg01));
8349 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8350 of the same precision, and X is an integer type not narrower than
8351 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8352 if (INTEGRAL_TYPE_P (type)
8353 && TREE_CODE (op0) == BIT_NOT_EXPR
8354 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8355 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8356 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8358 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8359 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8360 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8361 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8364 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8365 type of X and Y (integer types only). */
8366 if (INTEGRAL_TYPE_P (type)
8367 && TREE_CODE (op0) == MULT_EXPR
8368 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8369 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8371 /* Be careful not to introduce new overflows. */
8373 if (TYPE_OVERFLOW_WRAPS (type))
8376 mult_type = unsigned_type_for (type);
8378 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8380 tem = fold_build2 (MULT_EXPR, mult_type,
8381 fold_convert (mult_type,
8382 TREE_OPERAND (op0, 0)),
8383 fold_convert (mult_type,
8384 TREE_OPERAND (op0, 1)));
8385 return fold_convert (type, tem);
8389 tem = fold_convert_const (code, type, op0);
8390 return tem ? tem : NULL_TREE;
8392 case FIXED_CONVERT_EXPR:
8393 tem = fold_convert_const (code, type, arg0);
8394 return tem ? tem : NULL_TREE;
8396 case VIEW_CONVERT_EXPR:
8397 if (TREE_TYPE (op0) == type)
8399 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8400 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8402 /* For integral conversions with the same precision or pointer
8403 conversions use a NOP_EXPR instead. */
8404 if ((INTEGRAL_TYPE_P (type)
8405 || POINTER_TYPE_P (type))
8406 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8407 || POINTER_TYPE_P (TREE_TYPE (op0)))
8408 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8409 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8410 a sub-type to its base type as generated by the Ada FE. */
8411 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
8412 && TREE_TYPE (TREE_TYPE (op0))))
8413 return fold_convert (type, op0);
8415 /* Strip inner integral conversions that do not change the precision. */
8416 if (CONVERT_EXPR_P (op0)
8417 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8418 || POINTER_TYPE_P (TREE_TYPE (op0)))
8419 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8420 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8421 && (TYPE_PRECISION (TREE_TYPE (op0))
8422 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8423 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8425 return fold_view_convert_expr (type, op0);
8428 tem = fold_negate_expr (arg0);
8430 return fold_convert (type, tem);
8434 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8435 return fold_abs_const (arg0, type);
8436 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8437 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8438 /* Convert fabs((double)float) into (double)fabsf(float). */
8439 else if (TREE_CODE (arg0) == NOP_EXPR
8440 && TREE_CODE (type) == REAL_TYPE)
8442 tree targ0 = strip_float_extensions (arg0);
8444 return fold_convert (type, fold_build1 (ABS_EXPR,
8448 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8449 else if (TREE_CODE (arg0) == ABS_EXPR)
8451 else if (tree_expr_nonnegative_p (arg0))
8454 /* Strip sign ops from argument. */
8455 if (TREE_CODE (type) == REAL_TYPE)
8457 tem = fold_strip_sign_ops (arg0);
8459 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8464 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8465 return fold_convert (type, arg0);
8466 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8468 tree itype = TREE_TYPE (type);
8469 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8470 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8471 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8473 if (TREE_CODE (arg0) == COMPLEX_CST)
8475 tree itype = TREE_TYPE (type);
8476 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8477 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8478 return build_complex (type, rpart, negate_expr (ipart));
8480 if (TREE_CODE (arg0) == CONJ_EXPR)
8481 return fold_convert (type, TREE_OPERAND (arg0, 0));
8485 if (TREE_CODE (arg0) == INTEGER_CST)
8486 return fold_not_const (arg0, type);
8487 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8488 return fold_convert (type, TREE_OPERAND (arg0, 0));
8489 /* Convert ~ (-A) to A - 1. */
8490 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8491 return fold_build2 (MINUS_EXPR, type,
8492 fold_convert (type, TREE_OPERAND (arg0, 0)),
8493 build_int_cst (type, 1));
8494 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8495 else if (INTEGRAL_TYPE_P (type)
8496 && ((TREE_CODE (arg0) == MINUS_EXPR
8497 && integer_onep (TREE_OPERAND (arg0, 1)))
8498 || (TREE_CODE (arg0) == PLUS_EXPR
8499 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8500 return fold_build1 (NEGATE_EXPR, type,
8501 fold_convert (type, TREE_OPERAND (arg0, 0)));
8502 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8503 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8504 && (tem = fold_unary (BIT_NOT_EXPR, type,
8506 TREE_OPERAND (arg0, 0)))))
8507 return fold_build2 (BIT_XOR_EXPR, type, tem,
8508 fold_convert (type, TREE_OPERAND (arg0, 1)));
8509 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8510 && (tem = fold_unary (BIT_NOT_EXPR, type,
8512 TREE_OPERAND (arg0, 1)))))
8513 return fold_build2 (BIT_XOR_EXPR, type,
8514 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8515 /* Perform BIT_NOT_EXPR on each element individually. */
8516 else if (TREE_CODE (arg0) == VECTOR_CST)
8518 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8519 int count = TYPE_VECTOR_SUBPARTS (type), i;
8521 for (i = 0; i < count; i++)
8525 elem = TREE_VALUE (elements);
8526 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8527 if (elem == NULL_TREE)
8529 elements = TREE_CHAIN (elements);
8532 elem = build_int_cst (TREE_TYPE (type), -1);
8533 list = tree_cons (NULL_TREE, elem, list);
8536 return build_vector (type, nreverse (list));
8541 case TRUTH_NOT_EXPR:
8542 /* The argument to invert_truthvalue must have Boolean type. */
8543 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8544 arg0 = fold_convert (boolean_type_node, arg0);
8546 /* Note that the operand of this must be an int
8547 and its values must be 0 or 1.
8548 ("true" is a fixed value perhaps depending on the language,
8549 but we don't handle values other than 1 correctly yet.) */
8550 tem = fold_truth_not_expr (arg0);
8553 return fold_convert (type, tem);
8556 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8557 return fold_convert (type, arg0);
8558 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8559 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8560 TREE_OPERAND (arg0, 1));
8561 if (TREE_CODE (arg0) == COMPLEX_CST)
8562 return fold_convert (type, TREE_REALPART (arg0));
8563 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8565 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8566 tem = fold_build2 (TREE_CODE (arg0), itype,
8567 fold_build1 (REALPART_EXPR, itype,
8568 TREE_OPERAND (arg0, 0)),
8569 fold_build1 (REALPART_EXPR, itype,
8570 TREE_OPERAND (arg0, 1)));
8571 return fold_convert (type, tem);
8573 if (TREE_CODE (arg0) == CONJ_EXPR)
8575 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8576 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8577 return fold_convert (type, tem);
8579 if (TREE_CODE (arg0) == CALL_EXPR)
8581 tree fn = get_callee_fndecl (arg0);
8582 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8583 switch (DECL_FUNCTION_CODE (fn))
8585 CASE_FLT_FN (BUILT_IN_CEXPI):
8586 fn = mathfn_built_in (type, BUILT_IN_COS);
8588 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8598 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8599 return fold_convert (type, integer_zero_node);
8600 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8601 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8602 TREE_OPERAND (arg0, 0));
8603 if (TREE_CODE (arg0) == COMPLEX_CST)
8604 return fold_convert (type, TREE_IMAGPART (arg0));
8605 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8607 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8608 tem = fold_build2 (TREE_CODE (arg0), itype,
8609 fold_build1 (IMAGPART_EXPR, itype,
8610 TREE_OPERAND (arg0, 0)),
8611 fold_build1 (IMAGPART_EXPR, itype,
8612 TREE_OPERAND (arg0, 1)));
8613 return fold_convert (type, tem);
8615 if (TREE_CODE (arg0) == CONJ_EXPR)
8617 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8618 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8619 return fold_convert (type, negate_expr (tem));
8621 if (TREE_CODE (arg0) == CALL_EXPR)
8623 tree fn = get_callee_fndecl (arg0);
8624 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8625 switch (DECL_FUNCTION_CODE (fn))
8627 CASE_FLT_FN (BUILT_IN_CEXPI):
8628 fn = mathfn_built_in (type, BUILT_IN_SIN);
8630 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8641 } /* switch (code) */
8645 /* If the operation was a conversion do _not_ mark a resulting constant
8646 with TREE_OVERFLOW if the original constant was not. These conversions
8647 have implementation defined behavior and retaining the TREE_OVERFLOW
8648 flag here would confuse later passes such as VRP. */
8650 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8652 tree res = fold_unary (code, type, op0);
8654 && TREE_CODE (res) == INTEGER_CST
8655 && TREE_CODE (op0) == INTEGER_CST
8656 && CONVERT_EXPR_CODE_P (code))
8657 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8662 /* Fold a binary expression of code CODE and type TYPE with operands
8663 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8664 Return the folded expression if folding is successful. Otherwise,
8665 return NULL_TREE. */
8668 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8670 enum tree_code compl_code;
8672 if (code == MIN_EXPR)
8673 compl_code = MAX_EXPR;
8674 else if (code == MAX_EXPR)
8675 compl_code = MIN_EXPR;
8679 /* MIN (MAX (a, b), b) == b. */
8680 if (TREE_CODE (op0) == compl_code
8681 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8682 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8684 /* MIN (MAX (b, a), b) == b. */
8685 if (TREE_CODE (op0) == compl_code
8686 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8687 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8688 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8690 /* MIN (a, MAX (a, b)) == a. */
8691 if (TREE_CODE (op1) == compl_code
8692 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8693 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8694 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8696 /* MIN (a, MAX (b, a)) == a. */
8697 if (TREE_CODE (op1) == compl_code
8698 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8699 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8700 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8705 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8706 by changing CODE to reduce the magnitude of constants involved in
8707 ARG0 of the comparison.
8708 Returns a canonicalized comparison tree if a simplification was
8709 possible, otherwise returns NULL_TREE.
8710 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8711 valid if signed overflow is undefined. */
8714 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8715 tree arg0, tree arg1,
8716 bool *strict_overflow_p)
8718 enum tree_code code0 = TREE_CODE (arg0);
8719 tree t, cst0 = NULL_TREE;
8723 /* Match A +- CST code arg1 and CST code arg1. We can change the
8724 first form only if overflow is undefined. */
8725 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8726 /* In principle pointers also have undefined overflow behavior,
8727 but that causes problems elsewhere. */
8728 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8729 && (code0 == MINUS_EXPR
8730 || code0 == PLUS_EXPR)
8731 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8732 || code0 == INTEGER_CST))
8735 /* Identify the constant in arg0 and its sign. */
8736 if (code0 == INTEGER_CST)
8739 cst0 = TREE_OPERAND (arg0, 1);
8740 sgn0 = tree_int_cst_sgn (cst0);
8742 /* Overflowed constants and zero will cause problems. */
8743 if (integer_zerop (cst0)
8744 || TREE_OVERFLOW (cst0))
8747 /* See if we can reduce the magnitude of the constant in
8748 arg0 by changing the comparison code. */
8749 if (code0 == INTEGER_CST)
8751 /* CST <= arg1 -> CST-1 < arg1. */
8752 if (code == LE_EXPR && sgn0 == 1)
8754 /* -CST < arg1 -> -CST-1 <= arg1. */
8755 else if (code == LT_EXPR && sgn0 == -1)
8757 /* CST > arg1 -> CST-1 >= arg1. */
8758 else if (code == GT_EXPR && sgn0 == 1)
8760 /* -CST >= arg1 -> -CST-1 > arg1. */
8761 else if (code == GE_EXPR && sgn0 == -1)
8765 /* arg1 code' CST' might be more canonical. */
8770 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8772 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8774 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8775 else if (code == GT_EXPR
8776 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8778 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8779 else if (code == LE_EXPR
8780 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8782 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8783 else if (code == GE_EXPR
8784 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8788 *strict_overflow_p = true;
8791 /* Now build the constant reduced in magnitude. But not if that
8792 would produce one outside of its types range. */
8793 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8795 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8796 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8798 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8799 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8800 /* We cannot swap the comparison here as that would cause us to
8801 endlessly recurse. */
8804 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8805 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8806 if (code0 != INTEGER_CST)
8807 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8809 /* If swapping might yield to a more canonical form, do so. */
8811 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8813 return fold_build2 (code, type, t, arg1);
8816 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8817 overflow further. Try to decrease the magnitude of constants involved
8818 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8819 and put sole constants at the second argument position.
8820 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8823 maybe_canonicalize_comparison (enum tree_code code, tree type,
8824 tree arg0, tree arg1)
8827 bool strict_overflow_p;
8828 const char * const warnmsg = G_("assuming signed overflow does not occur "
8829 "when reducing constant in comparison");
8831 /* Try canonicalization by simplifying arg0. */
8832 strict_overflow_p = false;
8833 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8834 &strict_overflow_p);
8837 if (strict_overflow_p)
8838 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8842 /* Try canonicalization by simplifying arg1 using the swapped
8844 code = swap_tree_comparison (code);
8845 strict_overflow_p = false;
8846 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8847 &strict_overflow_p);
8848 if (t && strict_overflow_p)
8849 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8853 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8854 space. This is used to avoid issuing overflow warnings for
8855 expressions like &p->x which can not wrap. */
8858 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8860 unsigned HOST_WIDE_INT offset_low, total_low;
8861 HOST_WIDE_INT size, offset_high, total_high;
8863 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8869 if (offset == NULL_TREE)
8874 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8878 offset_low = TREE_INT_CST_LOW (offset);
8879 offset_high = TREE_INT_CST_HIGH (offset);
8882 if (add_double_with_sign (offset_low, offset_high,
8883 bitpos / BITS_PER_UNIT, 0,
8884 &total_low, &total_high,
8888 if (total_high != 0)
8891 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8895 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8897 if (TREE_CODE (base) == ADDR_EXPR)
8899 HOST_WIDE_INT base_size;
8901 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8902 if (base_size > 0 && size < base_size)
8906 return total_low > (unsigned HOST_WIDE_INT) size;
8909 /* Subroutine of fold_binary. This routine performs all of the
8910 transformations that are common to the equality/inequality
8911 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8912 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8913 fold_binary should call fold_binary. Fold a comparison with
8914 tree code CODE and type TYPE with operands OP0 and OP1. Return
8915 the folded comparison or NULL_TREE. */
8918 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8920 tree arg0, arg1, tem;
8925 STRIP_SIGN_NOPS (arg0);
8926 STRIP_SIGN_NOPS (arg1);
8928 tem = fold_relational_const (code, type, arg0, arg1);
8929 if (tem != NULL_TREE)
8932 /* If one arg is a real or integer constant, put it last. */
8933 if (tree_swap_operands_p (arg0, arg1, true))
8934 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8936 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8937 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8938 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8939 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8941 && (TREE_CODE (arg1) == INTEGER_CST
8942 && !TREE_OVERFLOW (arg1)))
8944 tree const1 = TREE_OPERAND (arg0, 1);
8946 tree variable = TREE_OPERAND (arg0, 0);
8949 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8951 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8952 TREE_TYPE (arg1), const2, const1);
8954 /* If the constant operation overflowed this can be
8955 simplified as a comparison against INT_MAX/INT_MIN. */
8956 if (TREE_CODE (lhs) == INTEGER_CST
8957 && TREE_OVERFLOW (lhs))
8959 int const1_sgn = tree_int_cst_sgn (const1);
8960 enum tree_code code2 = code;
8962 /* Get the sign of the constant on the lhs if the
8963 operation were VARIABLE + CONST1. */
8964 if (TREE_CODE (arg0) == MINUS_EXPR)
8965 const1_sgn = -const1_sgn;
8967 /* The sign of the constant determines if we overflowed
8968 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8969 Canonicalize to the INT_MIN overflow by swapping the comparison
8971 if (const1_sgn == -1)
8972 code2 = swap_tree_comparison (code);
8974 /* We now can look at the canonicalized case
8975 VARIABLE + 1 CODE2 INT_MIN
8976 and decide on the result. */
8977 if (code2 == LT_EXPR
8979 || code2 == EQ_EXPR)
8980 return omit_one_operand (type, boolean_false_node, variable);
8981 else if (code2 == NE_EXPR
8983 || code2 == GT_EXPR)
8984 return omit_one_operand (type, boolean_true_node, variable);
8987 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8988 && (TREE_CODE (lhs) != INTEGER_CST
8989 || !TREE_OVERFLOW (lhs)))
8991 fold_overflow_warning (("assuming signed overflow does not occur "
8992 "when changing X +- C1 cmp C2 to "
8994 WARN_STRICT_OVERFLOW_COMPARISON);
8995 return fold_build2 (code, type, variable, lhs);
8999 /* For comparisons of pointers we can decompose it to a compile time
9000 comparison of the base objects and the offsets into the object.
9001 This requires at least one operand being an ADDR_EXPR or a
9002 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9003 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9004 && (TREE_CODE (arg0) == ADDR_EXPR
9005 || TREE_CODE (arg1) == ADDR_EXPR
9006 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9007 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9009 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9010 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9011 enum machine_mode mode;
9012 int volatilep, unsignedp;
9013 bool indirect_base0 = false, indirect_base1 = false;
9015 /* Get base and offset for the access. Strip ADDR_EXPR for
9016 get_inner_reference, but put it back by stripping INDIRECT_REF
9017 off the base object if possible. indirect_baseN will be true
9018 if baseN is not an address but refers to the object itself. */
9020 if (TREE_CODE (arg0) == ADDR_EXPR)
9022 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9023 &bitsize, &bitpos0, &offset0, &mode,
9024 &unsignedp, &volatilep, false);
9025 if (TREE_CODE (base0) == INDIRECT_REF)
9026 base0 = TREE_OPERAND (base0, 0);
9028 indirect_base0 = true;
9030 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9032 base0 = TREE_OPERAND (arg0, 0);
9033 offset0 = TREE_OPERAND (arg0, 1);
9037 if (TREE_CODE (arg1) == ADDR_EXPR)
9039 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9040 &bitsize, &bitpos1, &offset1, &mode,
9041 &unsignedp, &volatilep, false);
9042 if (TREE_CODE (base1) == INDIRECT_REF)
9043 base1 = TREE_OPERAND (base1, 0);
9045 indirect_base1 = true;
9047 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9049 base1 = TREE_OPERAND (arg1, 0);
9050 offset1 = TREE_OPERAND (arg1, 1);
9053 /* If we have equivalent bases we might be able to simplify. */
9054 if (indirect_base0 == indirect_base1
9055 && operand_equal_p (base0, base1, 0))
9057 /* We can fold this expression to a constant if the non-constant
9058 offset parts are equal. */
9059 if ((offset0 == offset1
9060 || (offset0 && offset1
9061 && operand_equal_p (offset0, offset1, 0)))
9064 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9069 && bitpos0 != bitpos1
9070 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9071 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9072 fold_overflow_warning (("assuming pointer wraparound does not "
9073 "occur when comparing P +- C1 with "
9075 WARN_STRICT_OVERFLOW_CONDITIONAL);
9080 return constant_boolean_node (bitpos0 == bitpos1, type);
9082 return constant_boolean_node (bitpos0 != bitpos1, type);
9084 return constant_boolean_node (bitpos0 < bitpos1, type);
9086 return constant_boolean_node (bitpos0 <= bitpos1, type);
9088 return constant_boolean_node (bitpos0 >= bitpos1, type);
9090 return constant_boolean_node (bitpos0 > bitpos1, type);
9094 /* We can simplify the comparison to a comparison of the variable
9095 offset parts if the constant offset parts are equal.
9096 Be careful to use signed size type here because otherwise we
9097 mess with array offsets in the wrong way. This is possible
9098 because pointer arithmetic is restricted to retain within an
9099 object and overflow on pointer differences is undefined as of
9100 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9101 else if (bitpos0 == bitpos1
9102 && ((code == EQ_EXPR || code == NE_EXPR)
9103 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9105 tree signed_size_type_node;
9106 signed_size_type_node = signed_type_for (size_type_node);
9108 /* By converting to signed size type we cover middle-end pointer
9109 arithmetic which operates on unsigned pointer types of size
9110 type size and ARRAY_REF offsets which are properly sign or
9111 zero extended from their type in case it is narrower than
9113 if (offset0 == NULL_TREE)
9114 offset0 = build_int_cst (signed_size_type_node, 0);
9116 offset0 = fold_convert (signed_size_type_node, offset0);
9117 if (offset1 == NULL_TREE)
9118 offset1 = build_int_cst (signed_size_type_node, 0);
9120 offset1 = fold_convert (signed_size_type_node, offset1);
9124 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9125 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9126 fold_overflow_warning (("assuming pointer wraparound does not "
9127 "occur when comparing P +- C1 with "
9129 WARN_STRICT_OVERFLOW_COMPARISON);
9131 return fold_build2 (code, type, offset0, offset1);
9134 /* For non-equal bases we can simplify if they are addresses
9135 of local binding decls or constants. */
9136 else if (indirect_base0 && indirect_base1
9137 /* We know that !operand_equal_p (base0, base1, 0)
9138 because the if condition was false. But make
9139 sure two decls are not the same. */
9141 && TREE_CODE (arg0) == ADDR_EXPR
9142 && TREE_CODE (arg1) == ADDR_EXPR
9143 && (((TREE_CODE (base0) == VAR_DECL
9144 || TREE_CODE (base0) == PARM_DECL)
9145 && (targetm.binds_local_p (base0)
9146 || CONSTANT_CLASS_P (base1)))
9147 || CONSTANT_CLASS_P (base0))
9148 && (((TREE_CODE (base1) == VAR_DECL
9149 || TREE_CODE (base1) == PARM_DECL)
9150 && (targetm.binds_local_p (base1)
9151 || CONSTANT_CLASS_P (base0)))
9152 || CONSTANT_CLASS_P (base1)))
9154 if (code == EQ_EXPR)
9155 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9156 else if (code == NE_EXPR)
9157 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9159 /* For equal offsets we can simplify to a comparison of the
9161 else if (bitpos0 == bitpos1
9163 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9165 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9166 && ((offset0 == offset1)
9167 || (offset0 && offset1
9168 && operand_equal_p (offset0, offset1, 0))))
9171 base0 = fold_addr_expr (base0);
9173 base1 = fold_addr_expr (base1);
9174 return fold_build2 (code, type, base0, base1);
9178 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9179 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9180 the resulting offset is smaller in absolute value than the
9182 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9183 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9184 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9185 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9186 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9187 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9188 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9190 tree const1 = TREE_OPERAND (arg0, 1);
9191 tree const2 = TREE_OPERAND (arg1, 1);
9192 tree variable1 = TREE_OPERAND (arg0, 0);
9193 tree variable2 = TREE_OPERAND (arg1, 0);
9195 const char * const warnmsg = G_("assuming signed overflow does not "
9196 "occur when combining constants around "
9199 /* Put the constant on the side where it doesn't overflow and is
9200 of lower absolute value than before. */
9201 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9202 ? MINUS_EXPR : PLUS_EXPR,
9204 if (!TREE_OVERFLOW (cst)
9205 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9207 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9208 return fold_build2 (code, type,
9210 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
9214 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9215 ? MINUS_EXPR : PLUS_EXPR,
9217 if (!TREE_OVERFLOW (cst)
9218 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9220 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9221 return fold_build2 (code, type,
9222 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
9228 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9229 signed arithmetic case. That form is created by the compiler
9230 often enough for folding it to be of value. One example is in
9231 computing loop trip counts after Operator Strength Reduction. */
9232 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9233 && TREE_CODE (arg0) == MULT_EXPR
9234 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9235 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9236 && integer_zerop (arg1))
9238 tree const1 = TREE_OPERAND (arg0, 1);
9239 tree const2 = arg1; /* zero */
9240 tree variable1 = TREE_OPERAND (arg0, 0);
9241 enum tree_code cmp_code = code;
9243 gcc_assert (!integer_zerop (const1));
9245 fold_overflow_warning (("assuming signed overflow does not occur when "
9246 "eliminating multiplication in comparison "
9248 WARN_STRICT_OVERFLOW_COMPARISON);
9250 /* If const1 is negative we swap the sense of the comparison. */
9251 if (tree_int_cst_sgn (const1) < 0)
9252 cmp_code = swap_tree_comparison (cmp_code);
9254 return fold_build2 (cmp_code, type, variable1, const2);
9257 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9261 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9263 tree targ0 = strip_float_extensions (arg0);
9264 tree targ1 = strip_float_extensions (arg1);
9265 tree newtype = TREE_TYPE (targ0);
9267 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9268 newtype = TREE_TYPE (targ1);
9270 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9271 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9272 return fold_build2 (code, type, fold_convert (newtype, targ0),
9273 fold_convert (newtype, targ1));
9275 /* (-a) CMP (-b) -> b CMP a */
9276 if (TREE_CODE (arg0) == NEGATE_EXPR
9277 && TREE_CODE (arg1) == NEGATE_EXPR)
9278 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9279 TREE_OPERAND (arg0, 0));
9281 if (TREE_CODE (arg1) == REAL_CST)
9283 REAL_VALUE_TYPE cst;
9284 cst = TREE_REAL_CST (arg1);
9286 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9287 if (TREE_CODE (arg0) == NEGATE_EXPR)
9288 return fold_build2 (swap_tree_comparison (code), type,
9289 TREE_OPERAND (arg0, 0),
9290 build_real (TREE_TYPE (arg1),
9291 REAL_VALUE_NEGATE (cst)));
9293 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9294 /* a CMP (-0) -> a CMP 0 */
9295 if (REAL_VALUE_MINUS_ZERO (cst))
9296 return fold_build2 (code, type, arg0,
9297 build_real (TREE_TYPE (arg1), dconst0));
9299 /* x != NaN is always true, other ops are always false. */
9300 if (REAL_VALUE_ISNAN (cst)
9301 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9303 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9304 return omit_one_operand (type, tem, arg0);
9307 /* Fold comparisons against infinity. */
9308 if (REAL_VALUE_ISINF (cst)
9309 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9311 tem = fold_inf_compare (code, type, arg0, arg1);
9312 if (tem != NULL_TREE)
9317 /* If this is a comparison of a real constant with a PLUS_EXPR
9318 or a MINUS_EXPR of a real constant, we can convert it into a
9319 comparison with a revised real constant as long as no overflow
9320 occurs when unsafe_math_optimizations are enabled. */
9321 if (flag_unsafe_math_optimizations
9322 && TREE_CODE (arg1) == REAL_CST
9323 && (TREE_CODE (arg0) == PLUS_EXPR
9324 || TREE_CODE (arg0) == MINUS_EXPR)
9325 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9326 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9327 ? MINUS_EXPR : PLUS_EXPR,
9328 arg1, TREE_OPERAND (arg0, 1), 0))
9329 && !TREE_OVERFLOW (tem))
9330 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9332 /* Likewise, we can simplify a comparison of a real constant with
9333 a MINUS_EXPR whose first operand is also a real constant, i.e.
9334 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9335 floating-point types only if -fassociative-math is set. */
9336 if (flag_associative_math
9337 && TREE_CODE (arg1) == REAL_CST
9338 && TREE_CODE (arg0) == MINUS_EXPR
9339 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9340 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9342 && !TREE_OVERFLOW (tem))
9343 return fold_build2 (swap_tree_comparison (code), type,
9344 TREE_OPERAND (arg0, 1), tem);
9346 /* Fold comparisons against built-in math functions. */
9347 if (TREE_CODE (arg1) == REAL_CST
9348 && flag_unsafe_math_optimizations
9349 && ! flag_errno_math)
9351 enum built_in_function fcode = builtin_mathfn_code (arg0);
9353 if (fcode != END_BUILTINS)
9355 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9356 if (tem != NULL_TREE)
9362 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9363 && CONVERT_EXPR_P (arg0))
9365 /* If we are widening one operand of an integer comparison,
9366 see if the other operand is similarly being widened. Perhaps we
9367 can do the comparison in the narrower type. */
9368 tem = fold_widened_comparison (code, type, arg0, arg1);
9372 /* Or if we are changing signedness. */
9373 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9378 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9379 constant, we can simplify it. */
9380 if (TREE_CODE (arg1) == INTEGER_CST
9381 && (TREE_CODE (arg0) == MIN_EXPR
9382 || TREE_CODE (arg0) == MAX_EXPR)
9383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9385 tem = optimize_minmax_comparison (code, type, op0, op1);
9390 /* Simplify comparison of something with itself. (For IEEE
9391 floating-point, we can only do some of these simplifications.) */
9392 if (operand_equal_p (arg0, arg1, 0))
9397 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9398 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9399 return constant_boolean_node (1, type);
9404 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9405 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9406 return constant_boolean_node (1, type);
9407 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9410 /* For NE, we can only do this simplification if integer
9411 or we don't honor IEEE floating point NaNs. */
9412 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9413 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9415 /* ... fall through ... */
9418 return constant_boolean_node (0, type);
9424 /* If we are comparing an expression that just has comparisons
9425 of two integer values, arithmetic expressions of those comparisons,
9426 and constants, we can simplify it. There are only three cases
9427 to check: the two values can either be equal, the first can be
9428 greater, or the second can be greater. Fold the expression for
9429 those three values. Since each value must be 0 or 1, we have
9430 eight possibilities, each of which corresponds to the constant 0
9431 or 1 or one of the six possible comparisons.
9433 This handles common cases like (a > b) == 0 but also handles
9434 expressions like ((x > y) - (y > x)) > 0, which supposedly
9435 occur in macroized code. */
9437 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9439 tree cval1 = 0, cval2 = 0;
9442 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9443 /* Don't handle degenerate cases here; they should already
9444 have been handled anyway. */
9445 && cval1 != 0 && cval2 != 0
9446 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9447 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9448 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9449 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9450 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9451 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9452 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9454 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9455 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9457 /* We can't just pass T to eval_subst in case cval1 or cval2
9458 was the same as ARG1. */
9461 = fold_build2 (code, type,
9462 eval_subst (arg0, cval1, maxval,
9466 = fold_build2 (code, type,
9467 eval_subst (arg0, cval1, maxval,
9471 = fold_build2 (code, type,
9472 eval_subst (arg0, cval1, minval,
9476 /* All three of these results should be 0 or 1. Confirm they are.
9477 Then use those values to select the proper code to use. */
9479 if (TREE_CODE (high_result) == INTEGER_CST
9480 && TREE_CODE (equal_result) == INTEGER_CST
9481 && TREE_CODE (low_result) == INTEGER_CST)
9483 /* Make a 3-bit mask with the high-order bit being the
9484 value for `>', the next for '=', and the low for '<'. */
9485 switch ((integer_onep (high_result) * 4)
9486 + (integer_onep (equal_result) * 2)
9487 + integer_onep (low_result))
9491 return omit_one_operand (type, integer_zero_node, arg0);
9512 return omit_one_operand (type, integer_one_node, arg0);
9516 return save_expr (build2 (code, type, cval1, cval2));
9517 return fold_build2 (code, type, cval1, cval2);
9522 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9523 into a single range test. */
9524 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9525 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9526 && TREE_CODE (arg1) == INTEGER_CST
9527 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9528 && !integer_zerop (TREE_OPERAND (arg0, 1))
9529 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9530 && !TREE_OVERFLOW (arg1))
9532 tem = fold_div_compare (code, type, arg0, arg1);
9533 if (tem != NULL_TREE)
9537 /* Fold ~X op ~Y as Y op X. */
9538 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9539 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9541 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9542 return fold_build2 (code, type,
9543 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9544 TREE_OPERAND (arg0, 0));
9547 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9548 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9549 && TREE_CODE (arg1) == INTEGER_CST)
9551 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9552 return fold_build2 (swap_tree_comparison (code), type,
9553 TREE_OPERAND (arg0, 0),
9554 fold_build1 (BIT_NOT_EXPR, cmp_type,
9555 fold_convert (cmp_type, arg1)));
9562 /* Subroutine of fold_binary. Optimize complex multiplications of the
9563 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9564 argument EXPR represents the expression "z" of type TYPE. */
9567 fold_mult_zconjz (tree type, tree expr)
9569 tree itype = TREE_TYPE (type);
9570 tree rpart, ipart, tem;
9572 if (TREE_CODE (expr) == COMPLEX_EXPR)
9574 rpart = TREE_OPERAND (expr, 0);
9575 ipart = TREE_OPERAND (expr, 1);
9577 else if (TREE_CODE (expr) == COMPLEX_CST)
9579 rpart = TREE_REALPART (expr);
9580 ipart = TREE_IMAGPART (expr);
9584 expr = save_expr (expr);
9585 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9586 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9589 rpart = save_expr (rpart);
9590 ipart = save_expr (ipart);
9591 tem = fold_build2 (PLUS_EXPR, itype,
9592 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9593 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9594 return fold_build2 (COMPLEX_EXPR, type, tem,
9595 fold_convert (itype, integer_zero_node));
9599 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9600 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9601 guarantees that P and N have the same least significant log2(M) bits.
9602 N is not otherwise constrained. In particular, N is not normalized to
9603 0 <= N < M as is common. In general, the precise value of P is unknown.
9604 M is chosen as large as possible such that constant N can be determined.
9606 Returns M and sets *RESIDUE to N. */
9608 static unsigned HOST_WIDE_INT
9609 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9611 enum tree_code code;
9615 code = TREE_CODE (expr);
9616 if (code == ADDR_EXPR)
9618 expr = TREE_OPERAND (expr, 0);
9619 if (handled_component_p (expr))
9621 HOST_WIDE_INT bitsize, bitpos;
9623 enum machine_mode mode;
9624 int unsignedp, volatilep;
9626 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9627 &mode, &unsignedp, &volatilep, false);
9628 *residue = bitpos / BITS_PER_UNIT;
9631 if (TREE_CODE (offset) == INTEGER_CST)
9632 *residue += TREE_INT_CST_LOW (offset);
9634 /* We don't handle more complicated offset expressions. */
9639 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9640 return DECL_ALIGN_UNIT (expr);
9642 else if (code == POINTER_PLUS_EXPR)
9645 unsigned HOST_WIDE_INT modulus;
9646 enum tree_code inner_code;
9648 op0 = TREE_OPERAND (expr, 0);
9650 modulus = get_pointer_modulus_and_residue (op0, residue);
9652 op1 = TREE_OPERAND (expr, 1);
9654 inner_code = TREE_CODE (op1);
9655 if (inner_code == INTEGER_CST)
9657 *residue += TREE_INT_CST_LOW (op1);
9660 else if (inner_code == MULT_EXPR)
9662 op1 = TREE_OPERAND (op1, 1);
9663 if (TREE_CODE (op1) == INTEGER_CST)
9665 unsigned HOST_WIDE_INT align;
9667 /* Compute the greatest power-of-2 divisor of op1. */
9668 align = TREE_INT_CST_LOW (op1);
9671 /* If align is non-zero and less than *modulus, replace
9672 *modulus with align., If align is 0, then either op1 is 0
9673 or the greatest power-of-2 divisor of op1 doesn't fit in an
9674 unsigned HOST_WIDE_INT. In either case, no additional
9675 constraint is imposed. */
9677 modulus = MIN (modulus, align);
9684 /* If we get here, we were unable to determine anything useful about the
9690 /* Fold a binary expression of code CODE and type TYPE with operands
9691 OP0 and OP1. Return the folded expression if folding is
9692 successful. Otherwise, return NULL_TREE. */
9695 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9697 enum tree_code_class kind = TREE_CODE_CLASS (code);
9698 tree arg0, arg1, tem;
9699 tree t1 = NULL_TREE;
9700 bool strict_overflow_p;
9702 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9703 && TREE_CODE_LENGTH (code) == 2
9705 && op1 != NULL_TREE);
9710 /* Strip any conversions that don't change the mode. This is
9711 safe for every expression, except for a comparison expression
9712 because its signedness is derived from its operands. So, in
9713 the latter case, only strip conversions that don't change the
9714 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9717 Note that this is done as an internal manipulation within the
9718 constant folder, in order to find the simplest representation
9719 of the arguments so that their form can be studied. In any
9720 cases, the appropriate type conversions should be put back in
9721 the tree that will get out of the constant folder. */
9723 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9725 STRIP_SIGN_NOPS (arg0);
9726 STRIP_SIGN_NOPS (arg1);
9734 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9735 constant but we can't do arithmetic on them. */
9736 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9737 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9738 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9739 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9740 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9741 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9743 if (kind == tcc_binary)
9745 /* Make sure type and arg0 have the same saturating flag. */
9746 gcc_assert (TYPE_SATURATING (type)
9747 == TYPE_SATURATING (TREE_TYPE (arg0)));
9748 tem = const_binop (code, arg0, arg1, 0);
9750 else if (kind == tcc_comparison)
9751 tem = fold_relational_const (code, type, arg0, arg1);
9755 if (tem != NULL_TREE)
9757 if (TREE_TYPE (tem) != type)
9758 tem = fold_convert (type, tem);
9763 /* If this is a commutative operation, and ARG0 is a constant, move it
9764 to ARG1 to reduce the number of tests below. */
9765 if (commutative_tree_code (code)
9766 && tree_swap_operands_p (arg0, arg1, true))
9767 return fold_build2 (code, type, op1, op0);
9769 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9771 First check for cases where an arithmetic operation is applied to a
9772 compound, conditional, or comparison operation. Push the arithmetic
9773 operation inside the compound or conditional to see if any folding
9774 can then be done. Convert comparison to conditional for this purpose.
9775 The also optimizes non-constant cases that used to be done in
9778 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9779 one of the operands is a comparison and the other is a comparison, a
9780 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9781 code below would make the expression more complex. Change it to a
9782 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9783 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9785 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9786 || code == EQ_EXPR || code == NE_EXPR)
9787 && ((truth_value_p (TREE_CODE (arg0))
9788 && (truth_value_p (TREE_CODE (arg1))
9789 || (TREE_CODE (arg1) == BIT_AND_EXPR
9790 && integer_onep (TREE_OPERAND (arg1, 1)))))
9791 || (truth_value_p (TREE_CODE (arg1))
9792 && (truth_value_p (TREE_CODE (arg0))
9793 || (TREE_CODE (arg0) == BIT_AND_EXPR
9794 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9796 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9797 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9800 fold_convert (boolean_type_node, arg0),
9801 fold_convert (boolean_type_node, arg1));
9803 if (code == EQ_EXPR)
9804 tem = invert_truthvalue (tem);
9806 return fold_convert (type, tem);
9809 if (TREE_CODE_CLASS (code) == tcc_binary
9810 || TREE_CODE_CLASS (code) == tcc_comparison)
9812 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9813 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9814 fold_build2 (code, type,
9815 fold_convert (TREE_TYPE (op0),
9816 TREE_OPERAND (arg0, 1)),
9818 if (TREE_CODE (arg1) == COMPOUND_EXPR
9819 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9820 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9821 fold_build2 (code, type, op0,
9822 fold_convert (TREE_TYPE (op1),
9823 TREE_OPERAND (arg1, 1))));
9825 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9827 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9829 /*cond_first_p=*/1);
9830 if (tem != NULL_TREE)
9834 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9836 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9838 /*cond_first_p=*/0);
9839 if (tem != NULL_TREE)
9846 case POINTER_PLUS_EXPR:
9847 /* 0 +p index -> (type)index */
9848 if (integer_zerop (arg0))
9849 return non_lvalue (fold_convert (type, arg1));
9851 /* PTR +p 0 -> PTR */
9852 if (integer_zerop (arg1))
9853 return non_lvalue (fold_convert (type, arg0));
9855 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9856 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9857 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9858 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9859 fold_convert (sizetype, arg1),
9860 fold_convert (sizetype, arg0)));
9862 /* index +p PTR -> PTR +p index */
9863 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9864 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9865 return fold_build2 (POINTER_PLUS_EXPR, type,
9866 fold_convert (type, arg1),
9867 fold_convert (sizetype, arg0));
9869 /* (PTR +p B) +p A -> PTR +p (B + A) */
9870 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9873 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9874 tree arg00 = TREE_OPERAND (arg0, 0);
9875 inner = fold_build2 (PLUS_EXPR, sizetype,
9876 arg01, fold_convert (sizetype, arg1));
9877 return fold_convert (type,
9878 fold_build2 (POINTER_PLUS_EXPR,
9879 TREE_TYPE (arg00), arg00, inner));
9882 /* PTR_CST +p CST -> CST1 */
9883 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9884 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9886 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9887 of the array. Loop optimizer sometimes produce this type of
9889 if (TREE_CODE (arg0) == ADDR_EXPR)
9891 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9893 return fold_convert (type, tem);
9899 /* A + (-B) -> A - B */
9900 if (TREE_CODE (arg1) == NEGATE_EXPR)
9901 return fold_build2 (MINUS_EXPR, type,
9902 fold_convert (type, arg0),
9903 fold_convert (type, TREE_OPERAND (arg1, 0)));
9904 /* (-A) + B -> B - A */
9905 if (TREE_CODE (arg0) == NEGATE_EXPR
9906 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9907 return fold_build2 (MINUS_EXPR, type,
9908 fold_convert (type, arg1),
9909 fold_convert (type, TREE_OPERAND (arg0, 0)));
9911 if (INTEGRAL_TYPE_P (type))
9913 /* Convert ~A + 1 to -A. */
9914 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9915 && integer_onep (arg1))
9916 return fold_build1 (NEGATE_EXPR, type,
9917 fold_convert (type, TREE_OPERAND (arg0, 0)));
9920 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9921 && !TYPE_OVERFLOW_TRAPS (type))
9923 tree tem = TREE_OPERAND (arg0, 0);
9926 if (operand_equal_p (tem, arg1, 0))
9928 t1 = build_int_cst_type (type, -1);
9929 return omit_one_operand (type, t1, arg1);
9934 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9935 && !TYPE_OVERFLOW_TRAPS (type))
9937 tree tem = TREE_OPERAND (arg1, 0);
9940 if (operand_equal_p (arg0, tem, 0))
9942 t1 = build_int_cst_type (type, -1);
9943 return omit_one_operand (type, t1, arg0);
9947 /* X + (X / CST) * -CST is X % CST. */
9948 if (TREE_CODE (arg1) == MULT_EXPR
9949 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9950 && operand_equal_p (arg0,
9951 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9953 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9954 tree cst1 = TREE_OPERAND (arg1, 1);
9955 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9956 if (sum && integer_zerop (sum))
9957 return fold_convert (type,
9958 fold_build2 (TRUNC_MOD_EXPR,
9959 TREE_TYPE (arg0), arg0, cst0));
9963 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9964 same or one. Make sure type is not saturating.
9965 fold_plusminus_mult_expr will re-associate. */
9966 if ((TREE_CODE (arg0) == MULT_EXPR
9967 || TREE_CODE (arg1) == MULT_EXPR)
9968 && !TYPE_SATURATING (type)
9969 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9971 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9976 if (! FLOAT_TYPE_P (type))
9978 if (integer_zerop (arg1))
9979 return non_lvalue (fold_convert (type, arg0));
9981 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9982 with a constant, and the two constants have no bits in common,
9983 we should treat this as a BIT_IOR_EXPR since this may produce more
9985 if (TREE_CODE (arg0) == BIT_AND_EXPR
9986 && TREE_CODE (arg1) == BIT_AND_EXPR
9987 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9988 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9989 && integer_zerop (const_binop (BIT_AND_EXPR,
9990 TREE_OPERAND (arg0, 1),
9991 TREE_OPERAND (arg1, 1), 0)))
9993 code = BIT_IOR_EXPR;
9997 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9998 (plus (plus (mult) (mult)) (foo)) so that we can
9999 take advantage of the factoring cases below. */
10000 if (((TREE_CODE (arg0) == PLUS_EXPR
10001 || TREE_CODE (arg0) == MINUS_EXPR)
10002 && TREE_CODE (arg1) == MULT_EXPR)
10003 || ((TREE_CODE (arg1) == PLUS_EXPR
10004 || TREE_CODE (arg1) == MINUS_EXPR)
10005 && TREE_CODE (arg0) == MULT_EXPR))
10007 tree parg0, parg1, parg, marg;
10008 enum tree_code pcode;
10010 if (TREE_CODE (arg1) == MULT_EXPR)
10011 parg = arg0, marg = arg1;
10013 parg = arg1, marg = arg0;
10014 pcode = TREE_CODE (parg);
10015 parg0 = TREE_OPERAND (parg, 0);
10016 parg1 = TREE_OPERAND (parg, 1);
10017 STRIP_NOPS (parg0);
10018 STRIP_NOPS (parg1);
10020 if (TREE_CODE (parg0) == MULT_EXPR
10021 && TREE_CODE (parg1) != MULT_EXPR)
10022 return fold_build2 (pcode, type,
10023 fold_build2 (PLUS_EXPR, type,
10024 fold_convert (type, parg0),
10025 fold_convert (type, marg)),
10026 fold_convert (type, parg1));
10027 if (TREE_CODE (parg0) != MULT_EXPR
10028 && TREE_CODE (parg1) == MULT_EXPR)
10029 return fold_build2 (PLUS_EXPR, type,
10030 fold_convert (type, parg0),
10031 fold_build2 (pcode, type,
10032 fold_convert (type, marg),
10033 fold_convert (type,
10039 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10040 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10041 return non_lvalue (fold_convert (type, arg0));
10043 /* Likewise if the operands are reversed. */
10044 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10045 return non_lvalue (fold_convert (type, arg1));
10047 /* Convert X + -C into X - C. */
10048 if (TREE_CODE (arg1) == REAL_CST
10049 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10051 tem = fold_negate_const (arg1, type);
10052 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10053 return fold_build2 (MINUS_EXPR, type,
10054 fold_convert (type, arg0),
10055 fold_convert (type, tem));
10058 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10059 to __complex__ ( x, y ). This is not the same for SNaNs or
10060 if signed zeros are involved. */
10061 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10062 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10063 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10065 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10066 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10067 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10068 bool arg0rz = false, arg0iz = false;
10069 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10070 || (arg0i && (arg0iz = real_zerop (arg0i))))
10072 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10073 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10074 if (arg0rz && arg1i && real_zerop (arg1i))
10076 tree rp = arg1r ? arg1r
10077 : build1 (REALPART_EXPR, rtype, arg1);
10078 tree ip = arg0i ? arg0i
10079 : build1 (IMAGPART_EXPR, rtype, arg0);
10080 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10082 else if (arg0iz && arg1r && real_zerop (arg1r))
10084 tree rp = arg0r ? arg0r
10085 : build1 (REALPART_EXPR, rtype, arg0);
10086 tree ip = arg1i ? arg1i
10087 : build1 (IMAGPART_EXPR, rtype, arg1);
10088 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10093 if (flag_unsafe_math_optimizations
10094 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10095 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10096 && (tem = distribute_real_division (code, type, arg0, arg1)))
10099 /* Convert x+x into x*2.0. */
10100 if (operand_equal_p (arg0, arg1, 0)
10101 && SCALAR_FLOAT_TYPE_P (type))
10102 return fold_build2 (MULT_EXPR, type, arg0,
10103 build_real (type, dconst2));
10105 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10106 We associate floats only if the user has specified
10107 -fassociative-math. */
10108 if (flag_associative_math
10109 && TREE_CODE (arg1) == PLUS_EXPR
10110 && TREE_CODE (arg0) != MULT_EXPR)
10112 tree tree10 = TREE_OPERAND (arg1, 0);
10113 tree tree11 = TREE_OPERAND (arg1, 1);
10114 if (TREE_CODE (tree11) == MULT_EXPR
10115 && TREE_CODE (tree10) == MULT_EXPR)
10118 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10119 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10122 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10123 We associate floats only if the user has specified
10124 -fassociative-math. */
10125 if (flag_associative_math
10126 && TREE_CODE (arg0) == PLUS_EXPR
10127 && TREE_CODE (arg1) != MULT_EXPR)
10129 tree tree00 = TREE_OPERAND (arg0, 0);
10130 tree tree01 = TREE_OPERAND (arg0, 1);
10131 if (TREE_CODE (tree01) == MULT_EXPR
10132 && TREE_CODE (tree00) == MULT_EXPR)
10135 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10136 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10142 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10143 is a rotate of A by C1 bits. */
10144 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10145 is a rotate of A by B bits. */
10147 enum tree_code code0, code1;
10149 code0 = TREE_CODE (arg0);
10150 code1 = TREE_CODE (arg1);
10151 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10152 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10153 && operand_equal_p (TREE_OPERAND (arg0, 0),
10154 TREE_OPERAND (arg1, 0), 0)
10155 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10156 TYPE_UNSIGNED (rtype))
10157 /* Only create rotates in complete modes. Other cases are not
10158 expanded properly. */
10159 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10161 tree tree01, tree11;
10162 enum tree_code code01, code11;
10164 tree01 = TREE_OPERAND (arg0, 1);
10165 tree11 = TREE_OPERAND (arg1, 1);
10166 STRIP_NOPS (tree01);
10167 STRIP_NOPS (tree11);
10168 code01 = TREE_CODE (tree01);
10169 code11 = TREE_CODE (tree11);
10170 if (code01 == INTEGER_CST
10171 && code11 == INTEGER_CST
10172 && TREE_INT_CST_HIGH (tree01) == 0
10173 && TREE_INT_CST_HIGH (tree11) == 0
10174 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10175 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10176 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
10177 code0 == LSHIFT_EXPR ? tree01 : tree11);
10178 else if (code11 == MINUS_EXPR)
10180 tree tree110, tree111;
10181 tree110 = TREE_OPERAND (tree11, 0);
10182 tree111 = TREE_OPERAND (tree11, 1);
10183 STRIP_NOPS (tree110);
10184 STRIP_NOPS (tree111);
10185 if (TREE_CODE (tree110) == INTEGER_CST
10186 && 0 == compare_tree_int (tree110,
10188 (TREE_TYPE (TREE_OPERAND
10190 && operand_equal_p (tree01, tree111, 0))
10191 return build2 ((code0 == LSHIFT_EXPR
10194 type, TREE_OPERAND (arg0, 0), tree01);
10196 else if (code01 == MINUS_EXPR)
10198 tree tree010, tree011;
10199 tree010 = TREE_OPERAND (tree01, 0);
10200 tree011 = TREE_OPERAND (tree01, 1);
10201 STRIP_NOPS (tree010);
10202 STRIP_NOPS (tree011);
10203 if (TREE_CODE (tree010) == INTEGER_CST
10204 && 0 == compare_tree_int (tree010,
10206 (TREE_TYPE (TREE_OPERAND
10208 && operand_equal_p (tree11, tree011, 0))
10209 return build2 ((code0 != LSHIFT_EXPR
10212 type, TREE_OPERAND (arg0, 0), tree11);
10218 /* In most languages, can't associate operations on floats through
10219 parentheses. Rather than remember where the parentheses were, we
10220 don't associate floats at all, unless the user has specified
10221 -fassociative-math.
10222 And, we need to make sure type is not saturating. */
10224 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10225 && !TYPE_SATURATING (type))
10227 tree var0, con0, lit0, minus_lit0;
10228 tree var1, con1, lit1, minus_lit1;
10231 /* Split both trees into variables, constants, and literals. Then
10232 associate each group together, the constants with literals,
10233 then the result with variables. This increases the chances of
10234 literals being recombined later and of generating relocatable
10235 expressions for the sum of a constant and literal. */
10236 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10237 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10238 code == MINUS_EXPR);
10240 /* With undefined overflow we can only associate constants
10241 with one variable. */
10242 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10243 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10249 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10250 tmp0 = TREE_OPERAND (tmp0, 0);
10251 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10252 tmp1 = TREE_OPERAND (tmp1, 0);
10253 /* The only case we can still associate with two variables
10254 is if they are the same, modulo negation. */
10255 if (!operand_equal_p (tmp0, tmp1, 0))
10259 /* Only do something if we found more than two objects. Otherwise,
10260 nothing has changed and we risk infinite recursion. */
10262 && (2 < ((var0 != 0) + (var1 != 0)
10263 + (con0 != 0) + (con1 != 0)
10264 + (lit0 != 0) + (lit1 != 0)
10265 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10267 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10268 if (code == MINUS_EXPR)
10271 var0 = associate_trees (var0, var1, code, type);
10272 con0 = associate_trees (con0, con1, code, type);
10273 lit0 = associate_trees (lit0, lit1, code, type);
10274 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10276 /* Preserve the MINUS_EXPR if the negative part of the literal is
10277 greater than the positive part. Otherwise, the multiplicative
10278 folding code (i.e extract_muldiv) may be fooled in case
10279 unsigned constants are subtracted, like in the following
10280 example: ((X*2 + 4) - 8U)/2. */
10281 if (minus_lit0 && lit0)
10283 if (TREE_CODE (lit0) == INTEGER_CST
10284 && TREE_CODE (minus_lit0) == INTEGER_CST
10285 && tree_int_cst_lt (lit0, minus_lit0))
10287 minus_lit0 = associate_trees (minus_lit0, lit0,
10293 lit0 = associate_trees (lit0, minus_lit0,
10301 return fold_convert (type,
10302 associate_trees (var0, minus_lit0,
10303 MINUS_EXPR, type));
10306 con0 = associate_trees (con0, minus_lit0,
10308 return fold_convert (type,
10309 associate_trees (var0, con0,
10314 con0 = associate_trees (con0, lit0, code, type);
10315 return fold_convert (type, associate_trees (var0, con0,
10323 /* Pointer simplifications for subtraction, simple reassociations. */
10324 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10326 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10327 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10328 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10330 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10331 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10332 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10333 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10334 return fold_build2 (PLUS_EXPR, type,
10335 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10336 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10338 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10339 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10341 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10342 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10343 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10345 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10348 /* A - (-B) -> A + B */
10349 if (TREE_CODE (arg1) == NEGATE_EXPR)
10350 return fold_build2 (PLUS_EXPR, type, op0,
10351 fold_convert (type, TREE_OPERAND (arg1, 0)));
10352 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10353 if (TREE_CODE (arg0) == NEGATE_EXPR
10354 && (FLOAT_TYPE_P (type)
10355 || INTEGRAL_TYPE_P (type))
10356 && negate_expr_p (arg1)
10357 && reorder_operands_p (arg0, arg1))
10358 return fold_build2 (MINUS_EXPR, type,
10359 fold_convert (type, negate_expr (arg1)),
10360 fold_convert (type, TREE_OPERAND (arg0, 0)));
10361 /* Convert -A - 1 to ~A. */
10362 if (INTEGRAL_TYPE_P (type)
10363 && TREE_CODE (arg0) == NEGATE_EXPR
10364 && integer_onep (arg1)
10365 && !TYPE_OVERFLOW_TRAPS (type))
10366 return fold_build1 (BIT_NOT_EXPR, type,
10367 fold_convert (type, TREE_OPERAND (arg0, 0)));
10369 /* Convert -1 - A to ~A. */
10370 if (INTEGRAL_TYPE_P (type)
10371 && integer_all_onesp (arg0))
10372 return fold_build1 (BIT_NOT_EXPR, type, op1);
10375 /* X - (X / CST) * CST is X % CST. */
10376 if (INTEGRAL_TYPE_P (type)
10377 && TREE_CODE (arg1) == MULT_EXPR
10378 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10379 && operand_equal_p (arg0,
10380 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10381 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10382 TREE_OPERAND (arg1, 1), 0))
10383 return fold_convert (type,
10384 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10385 arg0, TREE_OPERAND (arg1, 1)));
10387 if (! FLOAT_TYPE_P (type))
10389 if (integer_zerop (arg0))
10390 return negate_expr (fold_convert (type, arg1));
10391 if (integer_zerop (arg1))
10392 return non_lvalue (fold_convert (type, arg0));
10394 /* Fold A - (A & B) into ~B & A. */
10395 if (!TREE_SIDE_EFFECTS (arg0)
10396 && TREE_CODE (arg1) == BIT_AND_EXPR)
10398 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10400 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10401 return fold_build2 (BIT_AND_EXPR, type,
10402 fold_build1 (BIT_NOT_EXPR, type, arg10),
10403 fold_convert (type, arg0));
10405 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10407 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10408 return fold_build2 (BIT_AND_EXPR, type,
10409 fold_build1 (BIT_NOT_EXPR, type, arg11),
10410 fold_convert (type, arg0));
10414 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10415 any power of 2 minus 1. */
10416 if (TREE_CODE (arg0) == BIT_AND_EXPR
10417 && TREE_CODE (arg1) == BIT_AND_EXPR
10418 && operand_equal_p (TREE_OPERAND (arg0, 0),
10419 TREE_OPERAND (arg1, 0), 0))
10421 tree mask0 = TREE_OPERAND (arg0, 1);
10422 tree mask1 = TREE_OPERAND (arg1, 1);
10423 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10425 if (operand_equal_p (tem, mask1, 0))
10427 tem = fold_build2 (BIT_XOR_EXPR, type,
10428 TREE_OPERAND (arg0, 0), mask1);
10429 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10434 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10435 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10436 return non_lvalue (fold_convert (type, arg0));
10438 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10439 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10440 (-ARG1 + ARG0) reduces to -ARG1. */
10441 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10442 return negate_expr (fold_convert (type, arg1));
10444 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10445 __complex__ ( x, -y ). This is not the same for SNaNs or if
10446 signed zeros are involved. */
10447 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10449 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10451 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10452 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10453 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10454 bool arg0rz = false, arg0iz = false;
10455 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10456 || (arg0i && (arg0iz = real_zerop (arg0i))))
10458 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10459 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10460 if (arg0rz && arg1i && real_zerop (arg1i))
10462 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10464 : build1 (REALPART_EXPR, rtype, arg1));
10465 tree ip = arg0i ? arg0i
10466 : build1 (IMAGPART_EXPR, rtype, arg0);
10467 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10469 else if (arg0iz && arg1r && real_zerop (arg1r))
10471 tree rp = arg0r ? arg0r
10472 : build1 (REALPART_EXPR, rtype, arg0);
10473 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10475 : build1 (IMAGPART_EXPR, rtype, arg1));
10476 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10481 /* Fold &x - &x. This can happen from &x.foo - &x.
10482 This is unsafe for certain floats even in non-IEEE formats.
10483 In IEEE, it is unsafe because it does wrong for NaNs.
10484 Also note that operand_equal_p is always false if an operand
10487 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10488 && operand_equal_p (arg0, arg1, 0))
10489 return fold_convert (type, integer_zero_node);
10491 /* A - B -> A + (-B) if B is easily negatable. */
10492 if (negate_expr_p (arg1)
10493 && ((FLOAT_TYPE_P (type)
10494 /* Avoid this transformation if B is a positive REAL_CST. */
10495 && (TREE_CODE (arg1) != REAL_CST
10496 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10497 || INTEGRAL_TYPE_P (type)))
10498 return fold_build2 (PLUS_EXPR, type,
10499 fold_convert (type, arg0),
10500 fold_convert (type, negate_expr (arg1)));
10502 /* Try folding difference of addresses. */
10504 HOST_WIDE_INT diff;
10506 if ((TREE_CODE (arg0) == ADDR_EXPR
10507 || TREE_CODE (arg1) == ADDR_EXPR)
10508 && ptr_difference_const (arg0, arg1, &diff))
10509 return build_int_cst_type (type, diff);
10512 /* Fold &a[i] - &a[j] to i-j. */
10513 if (TREE_CODE (arg0) == ADDR_EXPR
10514 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10515 && TREE_CODE (arg1) == ADDR_EXPR
10516 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10518 tree aref0 = TREE_OPERAND (arg0, 0);
10519 tree aref1 = TREE_OPERAND (arg1, 0);
10520 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10521 TREE_OPERAND (aref1, 0), 0))
10523 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10524 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10525 tree esz = array_ref_element_size (aref0);
10526 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10527 return fold_build2 (MULT_EXPR, type, diff,
10528 fold_convert (type, esz));
10533 if (flag_unsafe_math_optimizations
10534 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10535 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10536 && (tem = distribute_real_division (code, type, arg0, arg1)))
10539 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10540 same or one. Make sure type is not saturating.
10541 fold_plusminus_mult_expr will re-associate. */
10542 if ((TREE_CODE (arg0) == MULT_EXPR
10543 || TREE_CODE (arg1) == MULT_EXPR)
10544 && !TYPE_SATURATING (type)
10545 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10547 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10555 /* (-A) * (-B) -> A * B */
10556 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10557 return fold_build2 (MULT_EXPR, type,
10558 fold_convert (type, TREE_OPERAND (arg0, 0)),
10559 fold_convert (type, negate_expr (arg1)));
10560 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10561 return fold_build2 (MULT_EXPR, type,
10562 fold_convert (type, negate_expr (arg0)),
10563 fold_convert (type, TREE_OPERAND (arg1, 0)));
10565 if (! FLOAT_TYPE_P (type))
10567 if (integer_zerop (arg1))
10568 return omit_one_operand (type, arg1, arg0);
10569 if (integer_onep (arg1))
10570 return non_lvalue (fold_convert (type, arg0));
10571 /* Transform x * -1 into -x. Make sure to do the negation
10572 on the original operand with conversions not stripped
10573 because we can only strip non-sign-changing conversions. */
10574 if (integer_all_onesp (arg1))
10575 return fold_convert (type, negate_expr (op0));
10576 /* Transform x * -C into -x * C if x is easily negatable. */
10577 if (TREE_CODE (arg1) == INTEGER_CST
10578 && tree_int_cst_sgn (arg1) == -1
10579 && negate_expr_p (arg0)
10580 && (tem = negate_expr (arg1)) != arg1
10581 && !TREE_OVERFLOW (tem))
10582 return fold_build2 (MULT_EXPR, type,
10583 fold_convert (type, negate_expr (arg0)), tem);
10585 /* (a * (1 << b)) is (a << b) */
10586 if (TREE_CODE (arg1) == LSHIFT_EXPR
10587 && integer_onep (TREE_OPERAND (arg1, 0)))
10588 return fold_build2 (LSHIFT_EXPR, type, op0,
10589 TREE_OPERAND (arg1, 1));
10590 if (TREE_CODE (arg0) == LSHIFT_EXPR
10591 && integer_onep (TREE_OPERAND (arg0, 0)))
10592 return fold_build2 (LSHIFT_EXPR, type, op1,
10593 TREE_OPERAND (arg0, 1));
10595 /* (A + A) * C -> A * 2 * C */
10596 if (TREE_CODE (arg0) == PLUS_EXPR
10597 && TREE_CODE (arg1) == INTEGER_CST
10598 && operand_equal_p (TREE_OPERAND (arg0, 0),
10599 TREE_OPERAND (arg0, 1), 0))
10600 return fold_build2 (MULT_EXPR, type,
10601 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10602 TREE_OPERAND (arg0, 1)),
10603 fold_build2 (MULT_EXPR, type,
10604 build_int_cst (type, 2) , arg1));
10606 strict_overflow_p = false;
10607 if (TREE_CODE (arg1) == INTEGER_CST
10608 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10609 &strict_overflow_p)))
10611 if (strict_overflow_p)
10612 fold_overflow_warning (("assuming signed overflow does not "
10613 "occur when simplifying "
10615 WARN_STRICT_OVERFLOW_MISC);
10616 return fold_convert (type, tem);
10619 /* Optimize z * conj(z) for integer complex numbers. */
10620 if (TREE_CODE (arg0) == CONJ_EXPR
10621 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10622 return fold_mult_zconjz (type, arg1);
10623 if (TREE_CODE (arg1) == CONJ_EXPR
10624 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10625 return fold_mult_zconjz (type, arg0);
10629 /* Maybe fold x * 0 to 0. The expressions aren't the same
10630 when x is NaN, since x * 0 is also NaN. Nor are they the
10631 same in modes with signed zeros, since multiplying a
10632 negative value by 0 gives -0, not +0. */
10633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10634 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10635 && real_zerop (arg1))
10636 return omit_one_operand (type, arg1, arg0);
10637 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10638 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10639 && real_onep (arg1))
10640 return non_lvalue (fold_convert (type, arg0));
10642 /* Transform x * -1.0 into -x. */
10643 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10644 && real_minus_onep (arg1))
10645 return fold_convert (type, negate_expr (arg0));
10647 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10648 the result for floating point types due to rounding so it is applied
10649 only if -fassociative-math was specify. */
10650 if (flag_associative_math
10651 && TREE_CODE (arg0) == RDIV_EXPR
10652 && TREE_CODE (arg1) == REAL_CST
10653 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10655 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10658 return fold_build2 (RDIV_EXPR, type, tem,
10659 TREE_OPERAND (arg0, 1));
10662 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10663 if (operand_equal_p (arg0, arg1, 0))
10665 tree tem = fold_strip_sign_ops (arg0);
10666 if (tem != NULL_TREE)
10668 tem = fold_convert (type, tem);
10669 return fold_build2 (MULT_EXPR, type, tem, tem);
10673 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10674 This is not the same for NaNs or if signed zeros are
10676 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10677 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10678 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10679 && TREE_CODE (arg1) == COMPLEX_CST
10680 && real_zerop (TREE_REALPART (arg1)))
10682 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10683 if (real_onep (TREE_IMAGPART (arg1)))
10684 return fold_build2 (COMPLEX_EXPR, type,
10685 negate_expr (fold_build1 (IMAGPART_EXPR,
10687 fold_build1 (REALPART_EXPR, rtype, arg0));
10688 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10689 return fold_build2 (COMPLEX_EXPR, type,
10690 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10691 negate_expr (fold_build1 (REALPART_EXPR,
10695 /* Optimize z * conj(z) for floating point complex numbers.
10696 Guarded by flag_unsafe_math_optimizations as non-finite
10697 imaginary components don't produce scalar results. */
10698 if (flag_unsafe_math_optimizations
10699 && TREE_CODE (arg0) == CONJ_EXPR
10700 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10701 return fold_mult_zconjz (type, arg1);
10702 if (flag_unsafe_math_optimizations
10703 && TREE_CODE (arg1) == CONJ_EXPR
10704 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10705 return fold_mult_zconjz (type, arg0);
10707 if (flag_unsafe_math_optimizations)
10709 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10710 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10712 /* Optimizations of root(...)*root(...). */
10713 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10716 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10717 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10719 /* Optimize sqrt(x)*sqrt(x) as x. */
10720 if (BUILTIN_SQRT_P (fcode0)
10721 && operand_equal_p (arg00, arg10, 0)
10722 && ! HONOR_SNANS (TYPE_MODE (type)))
10725 /* Optimize root(x)*root(y) as root(x*y). */
10726 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10727 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10728 return build_call_expr (rootfn, 1, arg);
10731 /* Optimize expN(x)*expN(y) as expN(x+y). */
10732 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10734 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10735 tree arg = fold_build2 (PLUS_EXPR, type,
10736 CALL_EXPR_ARG (arg0, 0),
10737 CALL_EXPR_ARG (arg1, 0));
10738 return build_call_expr (expfn, 1, arg);
10741 /* Optimizations of pow(...)*pow(...). */
10742 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10743 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10744 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10746 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10747 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10748 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10749 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10751 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10752 if (operand_equal_p (arg01, arg11, 0))
10754 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10755 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10756 return build_call_expr (powfn, 2, arg, arg01);
10759 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10760 if (operand_equal_p (arg00, arg10, 0))
10762 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10763 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10764 return build_call_expr (powfn, 2, arg00, arg);
10768 /* Optimize tan(x)*cos(x) as sin(x). */
10769 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10770 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10771 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10772 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10773 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10774 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10775 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10776 CALL_EXPR_ARG (arg1, 0), 0))
10778 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10780 if (sinfn != NULL_TREE)
10781 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10784 /* Optimize x*pow(x,c) as pow(x,c+1). */
10785 if (fcode1 == BUILT_IN_POW
10786 || fcode1 == BUILT_IN_POWF
10787 || fcode1 == BUILT_IN_POWL)
10789 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10790 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10791 if (TREE_CODE (arg11) == REAL_CST
10792 && !TREE_OVERFLOW (arg11)
10793 && operand_equal_p (arg0, arg10, 0))
10795 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10799 c = TREE_REAL_CST (arg11);
10800 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10801 arg = build_real (type, c);
10802 return build_call_expr (powfn, 2, arg0, arg);
10806 /* Optimize pow(x,c)*x as pow(x,c+1). */
10807 if (fcode0 == BUILT_IN_POW
10808 || fcode0 == BUILT_IN_POWF
10809 || fcode0 == BUILT_IN_POWL)
10811 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10812 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10813 if (TREE_CODE (arg01) == REAL_CST
10814 && !TREE_OVERFLOW (arg01)
10815 && operand_equal_p (arg1, arg00, 0))
10817 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10821 c = TREE_REAL_CST (arg01);
10822 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10823 arg = build_real (type, c);
10824 return build_call_expr (powfn, 2, arg1, arg);
10828 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10829 if (optimize_function_for_speed_p (cfun)
10830 && operand_equal_p (arg0, arg1, 0))
10832 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10836 tree arg = build_real (type, dconst2);
10837 return build_call_expr (powfn, 2, arg0, arg);
10846 if (integer_all_onesp (arg1))
10847 return omit_one_operand (type, arg1, arg0);
10848 if (integer_zerop (arg1))
10849 return non_lvalue (fold_convert (type, arg0));
10850 if (operand_equal_p (arg0, arg1, 0))
10851 return non_lvalue (fold_convert (type, arg0));
10853 /* ~X | X is -1. */
10854 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10855 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10857 t1 = fold_convert (type, integer_zero_node);
10858 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10859 return omit_one_operand (type, t1, arg1);
10862 /* X | ~X is -1. */
10863 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10864 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10866 t1 = fold_convert (type, integer_zero_node);
10867 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10868 return omit_one_operand (type, t1, arg0);
10871 /* Canonicalize (X & C1) | C2. */
10872 if (TREE_CODE (arg0) == BIT_AND_EXPR
10873 && TREE_CODE (arg1) == INTEGER_CST
10874 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10876 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10877 int width = TYPE_PRECISION (type), w;
10878 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10879 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10880 hi2 = TREE_INT_CST_HIGH (arg1);
10881 lo2 = TREE_INT_CST_LOW (arg1);
10883 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10884 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10885 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10887 if (width > HOST_BITS_PER_WIDE_INT)
10889 mhi = (unsigned HOST_WIDE_INT) -1
10890 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10896 mlo = (unsigned HOST_WIDE_INT) -1
10897 >> (HOST_BITS_PER_WIDE_INT - width);
10900 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10901 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10902 return fold_build2 (BIT_IOR_EXPR, type,
10903 TREE_OPERAND (arg0, 0), arg1);
10905 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10906 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10907 mode which allows further optimizations. */
10914 for (w = BITS_PER_UNIT;
10915 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10918 unsigned HOST_WIDE_INT mask
10919 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10920 if (((lo1 | lo2) & mask) == mask
10921 && (lo1 & ~mask) == 0 && hi1 == 0)
10928 if (hi3 != hi1 || lo3 != lo1)
10929 return fold_build2 (BIT_IOR_EXPR, type,
10930 fold_build2 (BIT_AND_EXPR, type,
10931 TREE_OPERAND (arg0, 0),
10932 build_int_cst_wide (type,
10937 /* (X & Y) | Y is (X, Y). */
10938 if (TREE_CODE (arg0) == BIT_AND_EXPR
10939 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10940 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10941 /* (X & Y) | X is (Y, X). */
10942 if (TREE_CODE (arg0) == BIT_AND_EXPR
10943 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10944 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10945 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10946 /* X | (X & Y) is (Y, X). */
10947 if (TREE_CODE (arg1) == BIT_AND_EXPR
10948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10949 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10950 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10951 /* X | (Y & X) is (Y, X). */
10952 if (TREE_CODE (arg1) == BIT_AND_EXPR
10953 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10954 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10955 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10957 t1 = distribute_bit_expr (code, type, arg0, arg1);
10958 if (t1 != NULL_TREE)
10961 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10963 This results in more efficient code for machines without a NAND
10964 instruction. Combine will canonicalize to the first form
10965 which will allow use of NAND instructions provided by the
10966 backend if they exist. */
10967 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10968 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10970 return fold_build1 (BIT_NOT_EXPR, type,
10971 build2 (BIT_AND_EXPR, type,
10972 fold_convert (type,
10973 TREE_OPERAND (arg0, 0)),
10974 fold_convert (type,
10975 TREE_OPERAND (arg1, 0))));
10978 /* See if this can be simplified into a rotate first. If that
10979 is unsuccessful continue in the association code. */
10983 if (integer_zerop (arg1))
10984 return non_lvalue (fold_convert (type, arg0));
10985 if (integer_all_onesp (arg1))
10986 return fold_build1 (BIT_NOT_EXPR, type, op0);
10987 if (operand_equal_p (arg0, arg1, 0))
10988 return omit_one_operand (type, integer_zero_node, arg0);
10990 /* ~X ^ X is -1. */
10991 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10992 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10994 t1 = fold_convert (type, integer_zero_node);
10995 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10996 return omit_one_operand (type, t1, arg1);
10999 /* X ^ ~X is -1. */
11000 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11001 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11003 t1 = fold_convert (type, integer_zero_node);
11004 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11005 return omit_one_operand (type, t1, arg0);
11008 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11009 with a constant, and the two constants have no bits in common,
11010 we should treat this as a BIT_IOR_EXPR since this may produce more
11011 simplifications. */
11012 if (TREE_CODE (arg0) == BIT_AND_EXPR
11013 && TREE_CODE (arg1) == BIT_AND_EXPR
11014 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11015 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11016 && integer_zerop (const_binop (BIT_AND_EXPR,
11017 TREE_OPERAND (arg0, 1),
11018 TREE_OPERAND (arg1, 1), 0)))
11020 code = BIT_IOR_EXPR;
11024 /* (X | Y) ^ X -> Y & ~ X*/
11025 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11026 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11028 tree t2 = TREE_OPERAND (arg0, 1);
11029 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11031 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11032 fold_convert (type, t1));
11036 /* (Y | X) ^ X -> Y & ~ X*/
11037 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11038 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11040 tree t2 = TREE_OPERAND (arg0, 0);
11041 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11043 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11044 fold_convert (type, t1));
11048 /* X ^ (X | Y) -> Y & ~ X*/
11049 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11050 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11052 tree t2 = TREE_OPERAND (arg1, 1);
11053 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11055 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11056 fold_convert (type, t1));
11060 /* X ^ (Y | X) -> Y & ~ X*/
11061 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11062 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11064 tree t2 = TREE_OPERAND (arg1, 0);
11065 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11067 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11068 fold_convert (type, t1));
11072 /* Convert ~X ^ ~Y to X ^ Y. */
11073 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11074 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11075 return fold_build2 (code, type,
11076 fold_convert (type, TREE_OPERAND (arg0, 0)),
11077 fold_convert (type, TREE_OPERAND (arg1, 0)));
11079 /* Convert ~X ^ C to X ^ ~C. */
11080 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11081 && TREE_CODE (arg1) == INTEGER_CST)
11082 return fold_build2 (code, type,
11083 fold_convert (type, TREE_OPERAND (arg0, 0)),
11084 fold_build1 (BIT_NOT_EXPR, type, arg1));
11086 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11087 if (TREE_CODE (arg0) == BIT_AND_EXPR
11088 && integer_onep (TREE_OPERAND (arg0, 1))
11089 && integer_onep (arg1))
11090 return fold_build2 (EQ_EXPR, type, arg0,
11091 build_int_cst (TREE_TYPE (arg0), 0));
11093 /* Fold (X & Y) ^ Y as ~X & Y. */
11094 if (TREE_CODE (arg0) == BIT_AND_EXPR
11095 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11097 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11098 return fold_build2 (BIT_AND_EXPR, type,
11099 fold_build1 (BIT_NOT_EXPR, type, tem),
11100 fold_convert (type, arg1));
11102 /* Fold (X & Y) ^ X as ~Y & X. */
11103 if (TREE_CODE (arg0) == BIT_AND_EXPR
11104 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11105 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11107 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11108 return fold_build2 (BIT_AND_EXPR, type,
11109 fold_build1 (BIT_NOT_EXPR, type, tem),
11110 fold_convert (type, arg1));
11112 /* Fold X ^ (X & Y) as X & ~Y. */
11113 if (TREE_CODE (arg1) == BIT_AND_EXPR
11114 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11116 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11117 return fold_build2 (BIT_AND_EXPR, type,
11118 fold_convert (type, arg0),
11119 fold_build1 (BIT_NOT_EXPR, type, tem));
11121 /* Fold X ^ (Y & X) as ~Y & X. */
11122 if (TREE_CODE (arg1) == BIT_AND_EXPR
11123 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11124 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11126 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11127 return fold_build2 (BIT_AND_EXPR, type,
11128 fold_build1 (BIT_NOT_EXPR, type, tem),
11129 fold_convert (type, arg0));
11132 /* See if this can be simplified into a rotate first. If that
11133 is unsuccessful continue in the association code. */
11137 if (integer_all_onesp (arg1))
11138 return non_lvalue (fold_convert (type, arg0));
11139 if (integer_zerop (arg1))
11140 return omit_one_operand (type, arg1, arg0);
11141 if (operand_equal_p (arg0, arg1, 0))
11142 return non_lvalue (fold_convert (type, arg0));
11144 /* ~X & X is always zero. */
11145 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11146 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11147 return omit_one_operand (type, integer_zero_node, arg1);
11149 /* X & ~X is always zero. */
11150 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11151 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11152 return omit_one_operand (type, integer_zero_node, arg0);
11154 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11155 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11156 && TREE_CODE (arg1) == INTEGER_CST
11157 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11159 tree tmp1 = fold_convert (type, arg1);
11160 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11161 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11162 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11163 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11164 return fold_convert (type,
11165 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11168 /* (X | Y) & Y is (X, Y). */
11169 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11170 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11171 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11172 /* (X | Y) & X is (Y, X). */
11173 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11174 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11175 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11176 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11177 /* X & (X | Y) is (Y, X). */
11178 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11179 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11180 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11181 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11182 /* X & (Y | X) is (Y, X). */
11183 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11184 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11185 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11186 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11188 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11189 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11190 && integer_onep (TREE_OPERAND (arg0, 1))
11191 && integer_onep (arg1))
11193 tem = TREE_OPERAND (arg0, 0);
11194 return fold_build2 (EQ_EXPR, type,
11195 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11196 build_int_cst (TREE_TYPE (tem), 1)),
11197 build_int_cst (TREE_TYPE (tem), 0));
11199 /* Fold ~X & 1 as (X & 1) == 0. */
11200 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11201 && integer_onep (arg1))
11203 tem = TREE_OPERAND (arg0, 0);
11204 return fold_build2 (EQ_EXPR, type,
11205 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11206 build_int_cst (TREE_TYPE (tem), 1)),
11207 build_int_cst (TREE_TYPE (tem), 0));
11210 /* Fold (X ^ Y) & Y as ~X & Y. */
11211 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11212 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11214 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11215 return fold_build2 (BIT_AND_EXPR, type,
11216 fold_build1 (BIT_NOT_EXPR, type, tem),
11217 fold_convert (type, arg1));
11219 /* Fold (X ^ Y) & X as ~Y & X. */
11220 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11221 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11222 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11224 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11225 return fold_build2 (BIT_AND_EXPR, type,
11226 fold_build1 (BIT_NOT_EXPR, type, tem),
11227 fold_convert (type, arg1));
11229 /* Fold X & (X ^ Y) as X & ~Y. */
11230 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11231 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11233 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11234 return fold_build2 (BIT_AND_EXPR, type,
11235 fold_convert (type, arg0),
11236 fold_build1 (BIT_NOT_EXPR, type, tem));
11238 /* Fold X & (Y ^ X) as ~Y & X. */
11239 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11240 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11241 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11243 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11244 return fold_build2 (BIT_AND_EXPR, type,
11245 fold_build1 (BIT_NOT_EXPR, type, tem),
11246 fold_convert (type, arg0));
11249 t1 = distribute_bit_expr (code, type, arg0, arg1);
11250 if (t1 != NULL_TREE)
11252 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11253 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11254 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11257 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11259 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11260 && (~TREE_INT_CST_LOW (arg1)
11261 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11262 return fold_convert (type, TREE_OPERAND (arg0, 0));
11265 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11267 This results in more efficient code for machines without a NOR
11268 instruction. Combine will canonicalize to the first form
11269 which will allow use of NOR instructions provided by the
11270 backend if they exist. */
11271 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11272 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11274 return fold_build1 (BIT_NOT_EXPR, type,
11275 build2 (BIT_IOR_EXPR, type,
11276 fold_convert (type,
11277 TREE_OPERAND (arg0, 0)),
11278 fold_convert (type,
11279 TREE_OPERAND (arg1, 0))));
11282 /* If arg0 is derived from the address of an object or function, we may
11283 be able to fold this expression using the object or function's
11285 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11287 unsigned HOST_WIDE_INT modulus, residue;
11288 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11290 modulus = get_pointer_modulus_and_residue (arg0, &residue);
11292 /* This works because modulus is a power of 2. If this weren't the
11293 case, we'd have to replace it by its greatest power-of-2
11294 divisor: modulus & -modulus. */
11296 return build_int_cst (type, residue & low);
11299 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11300 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11301 if the new mask might be further optimized. */
11302 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11303 || TREE_CODE (arg0) == RSHIFT_EXPR)
11304 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11305 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11306 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11307 < TYPE_PRECISION (TREE_TYPE (arg0))
11308 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11309 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11311 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11312 unsigned HOST_WIDE_INT mask
11313 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11314 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11315 tree shift_type = TREE_TYPE (arg0);
11317 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11318 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11319 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11320 && TYPE_PRECISION (TREE_TYPE (arg0))
11321 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11323 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11324 tree arg00 = TREE_OPERAND (arg0, 0);
11325 /* See if more bits can be proven as zero because of
11327 if (TREE_CODE (arg00) == NOP_EXPR
11328 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11330 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11331 if (TYPE_PRECISION (inner_type)
11332 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11333 && TYPE_PRECISION (inner_type) < prec)
11335 prec = TYPE_PRECISION (inner_type);
11336 /* See if we can shorten the right shift. */
11338 shift_type = inner_type;
11341 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11342 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11343 zerobits <<= prec - shiftc;
11344 /* For arithmetic shift if sign bit could be set, zerobits
11345 can contain actually sign bits, so no transformation is
11346 possible, unless MASK masks them all away. In that
11347 case the shift needs to be converted into logical shift. */
11348 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11349 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11351 if ((mask & zerobits) == 0)
11352 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11358 /* ((X << 16) & 0xff00) is (X, 0). */
11359 if ((mask & zerobits) == mask)
11360 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11362 newmask = mask | zerobits;
11363 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11367 /* Only do the transformation if NEWMASK is some integer
11369 for (prec = BITS_PER_UNIT;
11370 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11371 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11373 if (prec < HOST_BITS_PER_WIDE_INT
11374 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11378 if (shift_type != TREE_TYPE (arg0))
11380 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11381 fold_convert (shift_type,
11382 TREE_OPERAND (arg0, 0)),
11383 TREE_OPERAND (arg0, 1));
11384 tem = fold_convert (type, tem);
11388 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11389 if (!tree_int_cst_equal (newmaskt, arg1))
11390 return fold_build2 (BIT_AND_EXPR, type, tem, newmaskt);
11398 /* Don't touch a floating-point divide by zero unless the mode
11399 of the constant can represent infinity. */
11400 if (TREE_CODE (arg1) == REAL_CST
11401 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11402 && real_zerop (arg1))
11405 /* Optimize A / A to 1.0 if we don't care about
11406 NaNs or Infinities. Skip the transformation
11407 for non-real operands. */
11408 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11409 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11410 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11411 && operand_equal_p (arg0, arg1, 0))
11413 tree r = build_real (TREE_TYPE (arg0), dconst1);
11415 return omit_two_operands (type, r, arg0, arg1);
11418 /* The complex version of the above A / A optimization. */
11419 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11420 && operand_equal_p (arg0, arg1, 0))
11422 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11423 if (! HONOR_NANS (TYPE_MODE (elem_type))
11424 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11426 tree r = build_real (elem_type, dconst1);
11427 /* omit_two_operands will call fold_convert for us. */
11428 return omit_two_operands (type, r, arg0, arg1);
11432 /* (-A) / (-B) -> A / B */
11433 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11434 return fold_build2 (RDIV_EXPR, type,
11435 TREE_OPERAND (arg0, 0),
11436 negate_expr (arg1));
11437 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11438 return fold_build2 (RDIV_EXPR, type,
11439 negate_expr (arg0),
11440 TREE_OPERAND (arg1, 0));
11442 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11443 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11444 && real_onep (arg1))
11445 return non_lvalue (fold_convert (type, arg0));
11447 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11448 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11449 && real_minus_onep (arg1))
11450 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11452 /* If ARG1 is a constant, we can convert this to a multiply by the
11453 reciprocal. This does not have the same rounding properties,
11454 so only do this if -freciprocal-math. We can actually
11455 always safely do it if ARG1 is a power of two, but it's hard to
11456 tell if it is or not in a portable manner. */
11457 if (TREE_CODE (arg1) == REAL_CST)
11459 if (flag_reciprocal_math
11460 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11462 return fold_build2 (MULT_EXPR, type, arg0, tem);
11463 /* Find the reciprocal if optimizing and the result is exact. */
11467 r = TREE_REAL_CST (arg1);
11468 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11470 tem = build_real (type, r);
11471 return fold_build2 (MULT_EXPR, type,
11472 fold_convert (type, arg0), tem);
11476 /* Convert A/B/C to A/(B*C). */
11477 if (flag_reciprocal_math
11478 && TREE_CODE (arg0) == RDIV_EXPR)
11479 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11480 fold_build2 (MULT_EXPR, type,
11481 TREE_OPERAND (arg0, 1), arg1));
11483 /* Convert A/(B/C) to (A/B)*C. */
11484 if (flag_reciprocal_math
11485 && TREE_CODE (arg1) == RDIV_EXPR)
11486 return fold_build2 (MULT_EXPR, type,
11487 fold_build2 (RDIV_EXPR, type, arg0,
11488 TREE_OPERAND (arg1, 0)),
11489 TREE_OPERAND (arg1, 1));
11491 /* Convert C1/(X*C2) into (C1/C2)/X. */
11492 if (flag_reciprocal_math
11493 && TREE_CODE (arg1) == MULT_EXPR
11494 && TREE_CODE (arg0) == REAL_CST
11495 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11497 tree tem = const_binop (RDIV_EXPR, arg0,
11498 TREE_OPERAND (arg1, 1), 0);
11500 return fold_build2 (RDIV_EXPR, type, tem,
11501 TREE_OPERAND (arg1, 0));
11504 if (flag_unsafe_math_optimizations)
11506 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11507 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11509 /* Optimize sin(x)/cos(x) as tan(x). */
11510 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11511 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11512 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11513 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11514 CALL_EXPR_ARG (arg1, 0), 0))
11516 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11518 if (tanfn != NULL_TREE)
11519 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11522 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11523 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11524 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11525 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11526 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11527 CALL_EXPR_ARG (arg1, 0), 0))
11529 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11531 if (tanfn != NULL_TREE)
11533 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11534 return fold_build2 (RDIV_EXPR, type,
11535 build_real (type, dconst1), tmp);
11539 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11540 NaNs or Infinities. */
11541 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11542 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11543 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11545 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11546 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11548 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11549 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11550 && operand_equal_p (arg00, arg01, 0))
11552 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11554 if (cosfn != NULL_TREE)
11555 return build_call_expr (cosfn, 1, arg00);
11559 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11560 NaNs or Infinities. */
11561 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11562 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11563 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11565 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11566 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11568 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11569 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11570 && operand_equal_p (arg00, arg01, 0))
11572 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11574 if (cosfn != NULL_TREE)
11576 tree tmp = build_call_expr (cosfn, 1, arg00);
11577 return fold_build2 (RDIV_EXPR, type,
11578 build_real (type, dconst1),
11584 /* Optimize pow(x,c)/x as pow(x,c-1). */
11585 if (fcode0 == BUILT_IN_POW
11586 || fcode0 == BUILT_IN_POWF
11587 || fcode0 == BUILT_IN_POWL)
11589 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11590 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11591 if (TREE_CODE (arg01) == REAL_CST
11592 && !TREE_OVERFLOW (arg01)
11593 && operand_equal_p (arg1, arg00, 0))
11595 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11599 c = TREE_REAL_CST (arg01);
11600 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11601 arg = build_real (type, c);
11602 return build_call_expr (powfn, 2, arg1, arg);
11606 /* Optimize a/root(b/c) into a*root(c/b). */
11607 if (BUILTIN_ROOT_P (fcode1))
11609 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11611 if (TREE_CODE (rootarg) == RDIV_EXPR)
11613 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11614 tree b = TREE_OPERAND (rootarg, 0);
11615 tree c = TREE_OPERAND (rootarg, 1);
11617 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11619 tmp = build_call_expr (rootfn, 1, tmp);
11620 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11624 /* Optimize x/expN(y) into x*expN(-y). */
11625 if (BUILTIN_EXPONENT_P (fcode1))
11627 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11628 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11629 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11630 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11633 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11634 if (fcode1 == BUILT_IN_POW
11635 || fcode1 == BUILT_IN_POWF
11636 || fcode1 == BUILT_IN_POWL)
11638 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11639 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11640 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11641 tree neg11 = fold_convert (type, negate_expr (arg11));
11642 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11643 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11648 case TRUNC_DIV_EXPR:
11649 case FLOOR_DIV_EXPR:
11650 /* Simplify A / (B << N) where A and B are positive and B is
11651 a power of 2, to A >> (N + log2(B)). */
11652 strict_overflow_p = false;
11653 if (TREE_CODE (arg1) == LSHIFT_EXPR
11654 && (TYPE_UNSIGNED (type)
11655 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11657 tree sval = TREE_OPERAND (arg1, 0);
11658 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11660 tree sh_cnt = TREE_OPERAND (arg1, 1);
11661 unsigned long pow2;
11663 if (TREE_INT_CST_LOW (sval))
11664 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11666 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11667 + HOST_BITS_PER_WIDE_INT;
11669 if (strict_overflow_p)
11670 fold_overflow_warning (("assuming signed overflow does not "
11671 "occur when simplifying A / (B << N)"),
11672 WARN_STRICT_OVERFLOW_MISC);
11674 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11675 sh_cnt, build_int_cst (NULL_TREE, pow2));
11676 return fold_build2 (RSHIFT_EXPR, type,
11677 fold_convert (type, arg0), sh_cnt);
11681 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11682 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11683 if (INTEGRAL_TYPE_P (type)
11684 && TYPE_UNSIGNED (type)
11685 && code == FLOOR_DIV_EXPR)
11686 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11690 case ROUND_DIV_EXPR:
11691 case CEIL_DIV_EXPR:
11692 case EXACT_DIV_EXPR:
11693 if (integer_onep (arg1))
11694 return non_lvalue (fold_convert (type, arg0));
11695 if (integer_zerop (arg1))
11697 /* X / -1 is -X. */
11698 if (!TYPE_UNSIGNED (type)
11699 && TREE_CODE (arg1) == INTEGER_CST
11700 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11701 && TREE_INT_CST_HIGH (arg1) == -1)
11702 return fold_convert (type, negate_expr (arg0));
11704 /* Convert -A / -B to A / B when the type is signed and overflow is
11706 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11707 && TREE_CODE (arg0) == NEGATE_EXPR
11708 && negate_expr_p (arg1))
11710 if (INTEGRAL_TYPE_P (type))
11711 fold_overflow_warning (("assuming signed overflow does not occur "
11712 "when distributing negation across "
11714 WARN_STRICT_OVERFLOW_MISC);
11715 return fold_build2 (code, type,
11716 fold_convert (type, TREE_OPERAND (arg0, 0)),
11717 fold_convert (type, negate_expr (arg1)));
11719 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11720 && TREE_CODE (arg1) == NEGATE_EXPR
11721 && negate_expr_p (arg0))
11723 if (INTEGRAL_TYPE_P (type))
11724 fold_overflow_warning (("assuming signed overflow does not occur "
11725 "when distributing negation across "
11727 WARN_STRICT_OVERFLOW_MISC);
11728 return fold_build2 (code, type,
11729 fold_convert (type, negate_expr (arg0)),
11730 fold_convert (type, TREE_OPERAND (arg1, 0)));
11733 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11734 operation, EXACT_DIV_EXPR.
11736 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11737 At one time others generated faster code, it's not clear if they do
11738 after the last round to changes to the DIV code in expmed.c. */
11739 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11740 && multiple_of_p (type, arg0, arg1))
11741 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11743 strict_overflow_p = false;
11744 if (TREE_CODE (arg1) == INTEGER_CST
11745 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11746 &strict_overflow_p)))
11748 if (strict_overflow_p)
11749 fold_overflow_warning (("assuming signed overflow does not occur "
11750 "when simplifying division"),
11751 WARN_STRICT_OVERFLOW_MISC);
11752 return fold_convert (type, tem);
11757 case CEIL_MOD_EXPR:
11758 case FLOOR_MOD_EXPR:
11759 case ROUND_MOD_EXPR:
11760 case TRUNC_MOD_EXPR:
11761 /* X % 1 is always zero, but be sure to preserve any side
11763 if (integer_onep (arg1))
11764 return omit_one_operand (type, integer_zero_node, arg0);
11766 /* X % 0, return X % 0 unchanged so that we can get the
11767 proper warnings and errors. */
11768 if (integer_zerop (arg1))
11771 /* 0 % X is always zero, but be sure to preserve any side
11772 effects in X. Place this after checking for X == 0. */
11773 if (integer_zerop (arg0))
11774 return omit_one_operand (type, integer_zero_node, arg1);
11776 /* X % -1 is zero. */
11777 if (!TYPE_UNSIGNED (type)
11778 && TREE_CODE (arg1) == INTEGER_CST
11779 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11780 && TREE_INT_CST_HIGH (arg1) == -1)
11781 return omit_one_operand (type, integer_zero_node, arg0);
11783 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11784 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11785 strict_overflow_p = false;
11786 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11787 && (TYPE_UNSIGNED (type)
11788 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11791 /* Also optimize A % (C << N) where C is a power of 2,
11792 to A & ((C << N) - 1). */
11793 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11794 c = TREE_OPERAND (arg1, 0);
11796 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11798 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11799 build_int_cst (TREE_TYPE (arg1), 1));
11800 if (strict_overflow_p)
11801 fold_overflow_warning (("assuming signed overflow does not "
11802 "occur when simplifying "
11803 "X % (power of two)"),
11804 WARN_STRICT_OVERFLOW_MISC);
11805 return fold_build2 (BIT_AND_EXPR, type,
11806 fold_convert (type, arg0),
11807 fold_convert (type, mask));
11811 /* X % -C is the same as X % C. */
11812 if (code == TRUNC_MOD_EXPR
11813 && !TYPE_UNSIGNED (type)
11814 && TREE_CODE (arg1) == INTEGER_CST
11815 && !TREE_OVERFLOW (arg1)
11816 && TREE_INT_CST_HIGH (arg1) < 0
11817 && !TYPE_OVERFLOW_TRAPS (type)
11818 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11819 && !sign_bit_p (arg1, arg1))
11820 return fold_build2 (code, type, fold_convert (type, arg0),
11821 fold_convert (type, negate_expr (arg1)));
11823 /* X % -Y is the same as X % Y. */
11824 if (code == TRUNC_MOD_EXPR
11825 && !TYPE_UNSIGNED (type)
11826 && TREE_CODE (arg1) == NEGATE_EXPR
11827 && !TYPE_OVERFLOW_TRAPS (type))
11828 return fold_build2 (code, type, fold_convert (type, arg0),
11829 fold_convert (type, TREE_OPERAND (arg1, 0)));
11831 if (TREE_CODE (arg1) == INTEGER_CST
11832 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11833 &strict_overflow_p)))
11835 if (strict_overflow_p)
11836 fold_overflow_warning (("assuming signed overflow does not occur "
11837 "when simplifying modulus"),
11838 WARN_STRICT_OVERFLOW_MISC);
11839 return fold_convert (type, tem);
11846 if (integer_all_onesp (arg0))
11847 return omit_one_operand (type, arg0, arg1);
11851 /* Optimize -1 >> x for arithmetic right shifts. */
11852 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11853 && tree_expr_nonnegative_p (arg1))
11854 return omit_one_operand (type, arg0, arg1);
11855 /* ... fall through ... */
11859 if (integer_zerop (arg1))
11860 return non_lvalue (fold_convert (type, arg0));
11861 if (integer_zerop (arg0))
11862 return omit_one_operand (type, arg0, arg1);
11864 /* Since negative shift count is not well-defined,
11865 don't try to compute it in the compiler. */
11866 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11869 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11870 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11871 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11872 && host_integerp (TREE_OPERAND (arg0, 1), false)
11873 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11875 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11876 + TREE_INT_CST_LOW (arg1));
11878 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11879 being well defined. */
11880 if (low >= TYPE_PRECISION (type))
11882 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11883 low = low % TYPE_PRECISION (type);
11884 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11885 return omit_one_operand (type, build_int_cst (type, 0),
11886 TREE_OPERAND (arg0, 0));
11888 low = TYPE_PRECISION (type) - 1;
11891 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11892 build_int_cst (type, low));
11895 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11896 into x & ((unsigned)-1 >> c) for unsigned types. */
11897 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11898 || (TYPE_UNSIGNED (type)
11899 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11900 && host_integerp (arg1, false)
11901 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11902 && host_integerp (TREE_OPERAND (arg0, 1), false)
11903 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11905 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11906 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11912 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11914 lshift = build_int_cst (type, -1);
11915 lshift = int_const_binop (code, lshift, arg1, 0);
11917 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11921 /* Rewrite an LROTATE_EXPR by a constant into an
11922 RROTATE_EXPR by a new constant. */
11923 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11925 tree tem = build_int_cst (TREE_TYPE (arg1),
11926 TYPE_PRECISION (type));
11927 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11928 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11931 /* If we have a rotate of a bit operation with the rotate count and
11932 the second operand of the bit operation both constant,
11933 permute the two operations. */
11934 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11935 && (TREE_CODE (arg0) == BIT_AND_EXPR
11936 || TREE_CODE (arg0) == BIT_IOR_EXPR
11937 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11938 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11939 return fold_build2 (TREE_CODE (arg0), type,
11940 fold_build2 (code, type,
11941 TREE_OPERAND (arg0, 0), arg1),
11942 fold_build2 (code, type,
11943 TREE_OPERAND (arg0, 1), arg1));
11945 /* Two consecutive rotates adding up to the precision of the
11946 type can be ignored. */
11947 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11948 && TREE_CODE (arg0) == RROTATE_EXPR
11949 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11950 && TREE_INT_CST_HIGH (arg1) == 0
11951 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11952 && ((TREE_INT_CST_LOW (arg1)
11953 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11954 == (unsigned int) TYPE_PRECISION (type)))
11955 return TREE_OPERAND (arg0, 0);
11957 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11958 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11959 if the latter can be further optimized. */
11960 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11961 && TREE_CODE (arg0) == BIT_AND_EXPR
11962 && TREE_CODE (arg1) == INTEGER_CST
11963 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11965 tree mask = fold_build2 (code, type,
11966 fold_convert (type, TREE_OPERAND (arg0, 1)),
11968 tree shift = fold_build2 (code, type,
11969 fold_convert (type, TREE_OPERAND (arg0, 0)),
11971 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11979 if (operand_equal_p (arg0, arg1, 0))
11980 return omit_one_operand (type, arg0, arg1);
11981 if (INTEGRAL_TYPE_P (type)
11982 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11983 return omit_one_operand (type, arg1, arg0);
11984 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11990 if (operand_equal_p (arg0, arg1, 0))
11991 return omit_one_operand (type, arg0, arg1);
11992 if (INTEGRAL_TYPE_P (type)
11993 && TYPE_MAX_VALUE (type)
11994 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11995 return omit_one_operand (type, arg1, arg0);
11996 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
12001 case TRUTH_ANDIF_EXPR:
12002 /* Note that the operands of this must be ints
12003 and their values must be 0 or 1.
12004 ("true" is a fixed value perhaps depending on the language.) */
12005 /* If first arg is constant zero, return it. */
12006 if (integer_zerop (arg0))
12007 return fold_convert (type, arg0);
12008 case TRUTH_AND_EXPR:
12009 /* If either arg is constant true, drop it. */
12010 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12011 return non_lvalue (fold_convert (type, arg1));
12012 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12013 /* Preserve sequence points. */
12014 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12015 return non_lvalue (fold_convert (type, arg0));
12016 /* If second arg is constant zero, result is zero, but first arg
12017 must be evaluated. */
12018 if (integer_zerop (arg1))
12019 return omit_one_operand (type, arg1, arg0);
12020 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12021 case will be handled here. */
12022 if (integer_zerop (arg0))
12023 return omit_one_operand (type, arg0, arg1);
12025 /* !X && X is always false. */
12026 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12027 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12028 return omit_one_operand (type, integer_zero_node, arg1);
12029 /* X && !X is always false. */
12030 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12031 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12032 return omit_one_operand (type, integer_zero_node, arg0);
12034 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12035 means A >= Y && A != MAX, but in this case we know that
12038 if (!TREE_SIDE_EFFECTS (arg0)
12039 && !TREE_SIDE_EFFECTS (arg1))
12041 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12042 if (tem && !operand_equal_p (tem, arg0, 0))
12043 return fold_build2 (code, type, tem, arg1);
12045 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12046 if (tem && !operand_equal_p (tem, arg1, 0))
12047 return fold_build2 (code, type, arg0, tem);
12051 /* We only do these simplifications if we are optimizing. */
12055 /* Check for things like (A || B) && (A || C). We can convert this
12056 to A || (B && C). Note that either operator can be any of the four
12057 truth and/or operations and the transformation will still be
12058 valid. Also note that we only care about order for the
12059 ANDIF and ORIF operators. If B contains side effects, this
12060 might change the truth-value of A. */
12061 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12062 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12063 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12064 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12065 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12066 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12068 tree a00 = TREE_OPERAND (arg0, 0);
12069 tree a01 = TREE_OPERAND (arg0, 1);
12070 tree a10 = TREE_OPERAND (arg1, 0);
12071 tree a11 = TREE_OPERAND (arg1, 1);
12072 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12073 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12074 && (code == TRUTH_AND_EXPR
12075 || code == TRUTH_OR_EXPR));
12077 if (operand_equal_p (a00, a10, 0))
12078 return fold_build2 (TREE_CODE (arg0), type, a00,
12079 fold_build2 (code, type, a01, a11));
12080 else if (commutative && operand_equal_p (a00, a11, 0))
12081 return fold_build2 (TREE_CODE (arg0), type, a00,
12082 fold_build2 (code, type, a01, a10));
12083 else if (commutative && operand_equal_p (a01, a10, 0))
12084 return fold_build2 (TREE_CODE (arg0), type, a01,
12085 fold_build2 (code, type, a00, a11));
12087 /* This case if tricky because we must either have commutative
12088 operators or else A10 must not have side-effects. */
12090 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12091 && operand_equal_p (a01, a11, 0))
12092 return fold_build2 (TREE_CODE (arg0), type,
12093 fold_build2 (code, type, a00, a10),
12097 /* See if we can build a range comparison. */
12098 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12101 /* Check for the possibility of merging component references. If our
12102 lhs is another similar operation, try to merge its rhs with our
12103 rhs. Then try to merge our lhs and rhs. */
12104 if (TREE_CODE (arg0) == code
12105 && 0 != (tem = fold_truthop (code, type,
12106 TREE_OPERAND (arg0, 1), arg1)))
12107 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12109 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12114 case TRUTH_ORIF_EXPR:
12115 /* Note that the operands of this must be ints
12116 and their values must be 0 or true.
12117 ("true" is a fixed value perhaps depending on the language.) */
12118 /* If first arg is constant true, return it. */
12119 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12120 return fold_convert (type, arg0);
12121 case TRUTH_OR_EXPR:
12122 /* If either arg is constant zero, drop it. */
12123 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12124 return non_lvalue (fold_convert (type, arg1));
12125 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12126 /* Preserve sequence points. */
12127 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12128 return non_lvalue (fold_convert (type, arg0));
12129 /* If second arg is constant true, result is true, but we must
12130 evaluate first arg. */
12131 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12132 return omit_one_operand (type, arg1, arg0);
12133 /* Likewise for first arg, but note this only occurs here for
12135 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12136 return omit_one_operand (type, arg0, arg1);
12138 /* !X || X is always true. */
12139 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12140 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12141 return omit_one_operand (type, integer_one_node, arg1);
12142 /* X || !X is always true. */
12143 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12144 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12145 return omit_one_operand (type, integer_one_node, arg0);
12149 case TRUTH_XOR_EXPR:
12150 /* If the second arg is constant zero, drop it. */
12151 if (integer_zerop (arg1))
12152 return non_lvalue (fold_convert (type, arg0));
12153 /* If the second arg is constant true, this is a logical inversion. */
12154 if (integer_onep (arg1))
12156 /* Only call invert_truthvalue if operand is a truth value. */
12157 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12158 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12160 tem = invert_truthvalue (arg0);
12161 return non_lvalue (fold_convert (type, tem));
12163 /* Identical arguments cancel to zero. */
12164 if (operand_equal_p (arg0, arg1, 0))
12165 return omit_one_operand (type, integer_zero_node, arg0);
12167 /* !X ^ X is always true. */
12168 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12169 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12170 return omit_one_operand (type, integer_one_node, arg1);
12172 /* X ^ !X is always true. */
12173 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12174 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12175 return omit_one_operand (type, integer_one_node, arg0);
12181 tem = fold_comparison (code, type, op0, op1);
12182 if (tem != NULL_TREE)
12185 /* bool_var != 0 becomes bool_var. */
12186 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12187 && code == NE_EXPR)
12188 return non_lvalue (fold_convert (type, arg0));
12190 /* bool_var == 1 becomes bool_var. */
12191 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12192 && code == EQ_EXPR)
12193 return non_lvalue (fold_convert (type, arg0));
12195 /* bool_var != 1 becomes !bool_var. */
12196 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12197 && code == NE_EXPR)
12198 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12200 /* bool_var == 0 becomes !bool_var. */
12201 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12202 && code == EQ_EXPR)
12203 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12205 /* If this is an equality comparison of the address of two non-weak,
12206 unaliased symbols neither of which are extern (since we do not
12207 have access to attributes for externs), then we know the result. */
12208 if (TREE_CODE (arg0) == ADDR_EXPR
12209 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12210 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12211 && ! lookup_attribute ("alias",
12212 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12213 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12214 && TREE_CODE (arg1) == ADDR_EXPR
12215 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12216 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12217 && ! lookup_attribute ("alias",
12218 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12219 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12221 /* We know that we're looking at the address of two
12222 non-weak, unaliased, static _DECL nodes.
12224 It is both wasteful and incorrect to call operand_equal_p
12225 to compare the two ADDR_EXPR nodes. It is wasteful in that
12226 all we need to do is test pointer equality for the arguments
12227 to the two ADDR_EXPR nodes. It is incorrect to use
12228 operand_equal_p as that function is NOT equivalent to a
12229 C equality test. It can in fact return false for two
12230 objects which would test as equal using the C equality
12232 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12233 return constant_boolean_node (equal
12234 ? code == EQ_EXPR : code != EQ_EXPR,
12238 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12239 a MINUS_EXPR of a constant, we can convert it into a comparison with
12240 a revised constant as long as no overflow occurs. */
12241 if (TREE_CODE (arg1) == INTEGER_CST
12242 && (TREE_CODE (arg0) == PLUS_EXPR
12243 || TREE_CODE (arg0) == MINUS_EXPR)
12244 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12245 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12246 ? MINUS_EXPR : PLUS_EXPR,
12247 fold_convert (TREE_TYPE (arg0), arg1),
12248 TREE_OPERAND (arg0, 1), 0))
12249 && !TREE_OVERFLOW (tem))
12250 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12252 /* Similarly for a NEGATE_EXPR. */
12253 if (TREE_CODE (arg0) == NEGATE_EXPR
12254 && TREE_CODE (arg1) == INTEGER_CST
12255 && 0 != (tem = negate_expr (arg1))
12256 && TREE_CODE (tem) == INTEGER_CST
12257 && !TREE_OVERFLOW (tem))
12258 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12260 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12261 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12262 && TREE_CODE (arg1) == INTEGER_CST
12263 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12264 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12265 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12266 fold_convert (TREE_TYPE (arg0), arg1),
12267 TREE_OPERAND (arg0, 1)));
12269 /* Transform comparisons of the form X +- C CMP X. */
12270 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12271 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12272 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12273 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12274 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12276 tree cst = TREE_OPERAND (arg0, 1);
12278 if (code == EQ_EXPR
12279 && !integer_zerop (cst))
12280 return omit_two_operands (type, boolean_false_node,
12281 TREE_OPERAND (arg0, 0), arg1);
12283 return omit_two_operands (type, boolean_true_node,
12284 TREE_OPERAND (arg0, 0), arg1);
12287 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12288 for !=. Don't do this for ordered comparisons due to overflow. */
12289 if (TREE_CODE (arg0) == MINUS_EXPR
12290 && integer_zerop (arg1))
12291 return fold_build2 (code, type,
12292 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12294 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12295 if (TREE_CODE (arg0) == ABS_EXPR
12296 && (integer_zerop (arg1) || real_zerop (arg1)))
12297 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12299 /* If this is an EQ or NE comparison with zero and ARG0 is
12300 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12301 two operations, but the latter can be done in one less insn
12302 on machines that have only two-operand insns or on which a
12303 constant cannot be the first operand. */
12304 if (TREE_CODE (arg0) == BIT_AND_EXPR
12305 && integer_zerop (arg1))
12307 tree arg00 = TREE_OPERAND (arg0, 0);
12308 tree arg01 = TREE_OPERAND (arg0, 1);
12309 if (TREE_CODE (arg00) == LSHIFT_EXPR
12310 && integer_onep (TREE_OPERAND (arg00, 0)))
12312 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12313 arg01, TREE_OPERAND (arg00, 1));
12314 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12315 build_int_cst (TREE_TYPE (arg0), 1));
12316 return fold_build2 (code, type,
12317 fold_convert (TREE_TYPE (arg1), tem), arg1);
12319 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12320 && integer_onep (TREE_OPERAND (arg01, 0)))
12322 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12323 arg00, TREE_OPERAND (arg01, 1));
12324 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12325 build_int_cst (TREE_TYPE (arg0), 1));
12326 return fold_build2 (code, type,
12327 fold_convert (TREE_TYPE (arg1), tem), arg1);
12331 /* If this is an NE or EQ comparison of zero against the result of a
12332 signed MOD operation whose second operand is a power of 2, make
12333 the MOD operation unsigned since it is simpler and equivalent. */
12334 if (integer_zerop (arg1)
12335 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12336 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12337 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12338 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12339 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12340 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12342 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12343 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12344 fold_convert (newtype,
12345 TREE_OPERAND (arg0, 0)),
12346 fold_convert (newtype,
12347 TREE_OPERAND (arg0, 1)));
12349 return fold_build2 (code, type, newmod,
12350 fold_convert (newtype, arg1));
12353 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12354 C1 is a valid shift constant, and C2 is a power of two, i.e.
12356 if (TREE_CODE (arg0) == BIT_AND_EXPR
12357 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12358 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12360 && integer_pow2p (TREE_OPERAND (arg0, 1))
12361 && integer_zerop (arg1))
12363 tree itype = TREE_TYPE (arg0);
12364 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12365 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12367 /* Check for a valid shift count. */
12368 if (TREE_INT_CST_HIGH (arg001) == 0
12369 && TREE_INT_CST_LOW (arg001) < prec)
12371 tree arg01 = TREE_OPERAND (arg0, 1);
12372 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12373 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12374 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12375 can be rewritten as (X & (C2 << C1)) != 0. */
12376 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12378 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12379 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12380 return fold_build2 (code, type, tem, arg1);
12382 /* Otherwise, for signed (arithmetic) shifts,
12383 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12384 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12385 else if (!TYPE_UNSIGNED (itype))
12386 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12387 arg000, build_int_cst (itype, 0));
12388 /* Otherwise, of unsigned (logical) shifts,
12389 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12390 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12392 return omit_one_operand (type,
12393 code == EQ_EXPR ? integer_one_node
12394 : integer_zero_node,
12399 /* If this is an NE comparison of zero with an AND of one, remove the
12400 comparison since the AND will give the correct value. */
12401 if (code == NE_EXPR
12402 && integer_zerop (arg1)
12403 && TREE_CODE (arg0) == BIT_AND_EXPR
12404 && integer_onep (TREE_OPERAND (arg0, 1)))
12405 return fold_convert (type, arg0);
12407 /* If we have (A & C) == C where C is a power of 2, convert this into
12408 (A & C) != 0. Similarly for NE_EXPR. */
12409 if (TREE_CODE (arg0) == BIT_AND_EXPR
12410 && integer_pow2p (TREE_OPERAND (arg0, 1))
12411 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12412 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12413 arg0, fold_convert (TREE_TYPE (arg0),
12414 integer_zero_node));
12416 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12417 bit, then fold the expression into A < 0 or A >= 0. */
12418 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12422 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12423 Similarly for NE_EXPR. */
12424 if (TREE_CODE (arg0) == BIT_AND_EXPR
12425 && TREE_CODE (arg1) == INTEGER_CST
12426 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12428 tree notc = fold_build1 (BIT_NOT_EXPR,
12429 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12430 TREE_OPERAND (arg0, 1));
12431 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12433 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12434 if (integer_nonzerop (dandnotc))
12435 return omit_one_operand (type, rslt, arg0);
12438 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12439 Similarly for NE_EXPR. */
12440 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12441 && TREE_CODE (arg1) == INTEGER_CST
12442 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12444 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12445 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12446 TREE_OPERAND (arg0, 1), notd);
12447 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12448 if (integer_nonzerop (candnotd))
12449 return omit_one_operand (type, rslt, arg0);
12452 /* If this is a comparison of a field, we may be able to simplify it. */
12453 if ((TREE_CODE (arg0) == COMPONENT_REF
12454 || TREE_CODE (arg0) == BIT_FIELD_REF)
12455 /* Handle the constant case even without -O
12456 to make sure the warnings are given. */
12457 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12459 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12464 /* Optimize comparisons of strlen vs zero to a compare of the
12465 first character of the string vs zero. To wit,
12466 strlen(ptr) == 0 => *ptr == 0
12467 strlen(ptr) != 0 => *ptr != 0
12468 Other cases should reduce to one of these two (or a constant)
12469 due to the return value of strlen being unsigned. */
12470 if (TREE_CODE (arg0) == CALL_EXPR
12471 && integer_zerop (arg1))
12473 tree fndecl = get_callee_fndecl (arg0);
12476 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12477 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12478 && call_expr_nargs (arg0) == 1
12479 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12481 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12482 return fold_build2 (code, type, iref,
12483 build_int_cst (TREE_TYPE (iref), 0));
12487 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12488 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12489 if (TREE_CODE (arg0) == RSHIFT_EXPR
12490 && integer_zerop (arg1)
12491 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12493 tree arg00 = TREE_OPERAND (arg0, 0);
12494 tree arg01 = TREE_OPERAND (arg0, 1);
12495 tree itype = TREE_TYPE (arg00);
12496 if (TREE_INT_CST_HIGH (arg01) == 0
12497 && TREE_INT_CST_LOW (arg01)
12498 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12500 if (TYPE_UNSIGNED (itype))
12502 itype = signed_type_for (itype);
12503 arg00 = fold_convert (itype, arg00);
12505 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12506 type, arg00, build_int_cst (itype, 0));
12510 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12511 if (integer_zerop (arg1)
12512 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12513 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12514 TREE_OPERAND (arg0, 1));
12516 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12517 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12518 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12519 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12520 build_int_cst (TREE_TYPE (arg1), 0));
12521 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12522 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12523 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12524 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12525 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12526 build_int_cst (TREE_TYPE (arg1), 0));
12528 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12529 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12530 && TREE_CODE (arg1) == INTEGER_CST
12531 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12532 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12533 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12534 TREE_OPERAND (arg0, 1), arg1));
12536 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12537 (X & C) == 0 when C is a single bit. */
12538 if (TREE_CODE (arg0) == BIT_AND_EXPR
12539 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12540 && integer_zerop (arg1)
12541 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12543 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12544 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12545 TREE_OPERAND (arg0, 1));
12546 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12550 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12551 constant C is a power of two, i.e. a single bit. */
12552 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12553 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12554 && integer_zerop (arg1)
12555 && integer_pow2p (TREE_OPERAND (arg0, 1))
12556 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12557 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12559 tree arg00 = TREE_OPERAND (arg0, 0);
12560 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12561 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12564 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12565 when is C is a power of two, i.e. a single bit. */
12566 if (TREE_CODE (arg0) == BIT_AND_EXPR
12567 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12568 && integer_zerop (arg1)
12569 && integer_pow2p (TREE_OPERAND (arg0, 1))
12570 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12571 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12573 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12574 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12575 arg000, TREE_OPERAND (arg0, 1));
12576 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12577 tem, build_int_cst (TREE_TYPE (tem), 0));
12580 if (integer_zerop (arg1)
12581 && tree_expr_nonzero_p (arg0))
12583 tree res = constant_boolean_node (code==NE_EXPR, type);
12584 return omit_one_operand (type, res, arg0);
12587 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12588 if (TREE_CODE (arg0) == NEGATE_EXPR
12589 && TREE_CODE (arg1) == NEGATE_EXPR)
12590 return fold_build2 (code, type,
12591 TREE_OPERAND (arg0, 0),
12592 TREE_OPERAND (arg1, 0));
12594 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12595 if (TREE_CODE (arg0) == BIT_AND_EXPR
12596 && TREE_CODE (arg1) == BIT_AND_EXPR)
12598 tree arg00 = TREE_OPERAND (arg0, 0);
12599 tree arg01 = TREE_OPERAND (arg0, 1);
12600 tree arg10 = TREE_OPERAND (arg1, 0);
12601 tree arg11 = TREE_OPERAND (arg1, 1);
12602 tree itype = TREE_TYPE (arg0);
12604 if (operand_equal_p (arg01, arg11, 0))
12605 return fold_build2 (code, type,
12606 fold_build2 (BIT_AND_EXPR, itype,
12607 fold_build2 (BIT_XOR_EXPR, itype,
12610 build_int_cst (itype, 0));
12612 if (operand_equal_p (arg01, arg10, 0))
12613 return fold_build2 (code, type,
12614 fold_build2 (BIT_AND_EXPR, itype,
12615 fold_build2 (BIT_XOR_EXPR, itype,
12618 build_int_cst (itype, 0));
12620 if (operand_equal_p (arg00, arg11, 0))
12621 return fold_build2 (code, type,
12622 fold_build2 (BIT_AND_EXPR, itype,
12623 fold_build2 (BIT_XOR_EXPR, itype,
12626 build_int_cst (itype, 0));
12628 if (operand_equal_p (arg00, arg10, 0))
12629 return fold_build2 (code, type,
12630 fold_build2 (BIT_AND_EXPR, itype,
12631 fold_build2 (BIT_XOR_EXPR, itype,
12634 build_int_cst (itype, 0));
12637 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12638 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12640 tree arg00 = TREE_OPERAND (arg0, 0);
12641 tree arg01 = TREE_OPERAND (arg0, 1);
12642 tree arg10 = TREE_OPERAND (arg1, 0);
12643 tree arg11 = TREE_OPERAND (arg1, 1);
12644 tree itype = TREE_TYPE (arg0);
12646 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12647 operand_equal_p guarantees no side-effects so we don't need
12648 to use omit_one_operand on Z. */
12649 if (operand_equal_p (arg01, arg11, 0))
12650 return fold_build2 (code, type, arg00, arg10);
12651 if (operand_equal_p (arg01, arg10, 0))
12652 return fold_build2 (code, type, arg00, arg11);
12653 if (operand_equal_p (arg00, arg11, 0))
12654 return fold_build2 (code, type, arg01, arg10);
12655 if (operand_equal_p (arg00, arg10, 0))
12656 return fold_build2 (code, type, arg01, arg11);
12658 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12659 if (TREE_CODE (arg01) == INTEGER_CST
12660 && TREE_CODE (arg11) == INTEGER_CST)
12661 return fold_build2 (code, type,
12662 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12663 fold_build2 (BIT_XOR_EXPR, itype,
12668 /* Attempt to simplify equality/inequality comparisons of complex
12669 values. Only lower the comparison if the result is known or
12670 can be simplified to a single scalar comparison. */
12671 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12672 || TREE_CODE (arg0) == COMPLEX_CST)
12673 && (TREE_CODE (arg1) == COMPLEX_EXPR
12674 || TREE_CODE (arg1) == COMPLEX_CST))
12676 tree real0, imag0, real1, imag1;
12679 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12681 real0 = TREE_OPERAND (arg0, 0);
12682 imag0 = TREE_OPERAND (arg0, 1);
12686 real0 = TREE_REALPART (arg0);
12687 imag0 = TREE_IMAGPART (arg0);
12690 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12692 real1 = TREE_OPERAND (arg1, 0);
12693 imag1 = TREE_OPERAND (arg1, 1);
12697 real1 = TREE_REALPART (arg1);
12698 imag1 = TREE_IMAGPART (arg1);
12701 rcond = fold_binary (code, type, real0, real1);
12702 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12704 if (integer_zerop (rcond))
12706 if (code == EQ_EXPR)
12707 return omit_two_operands (type, boolean_false_node,
12709 return fold_build2 (NE_EXPR, type, imag0, imag1);
12713 if (code == NE_EXPR)
12714 return omit_two_operands (type, boolean_true_node,
12716 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12720 icond = fold_binary (code, type, imag0, imag1);
12721 if (icond && TREE_CODE (icond) == INTEGER_CST)
12723 if (integer_zerop (icond))
12725 if (code == EQ_EXPR)
12726 return omit_two_operands (type, boolean_false_node,
12728 return fold_build2 (NE_EXPR, type, real0, real1);
12732 if (code == NE_EXPR)
12733 return omit_two_operands (type, boolean_true_node,
12735 return fold_build2 (EQ_EXPR, type, real0, real1);
12746 tem = fold_comparison (code, type, op0, op1);
12747 if (tem != NULL_TREE)
12750 /* Transform comparisons of the form X +- C CMP X. */
12751 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12752 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12753 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12754 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12755 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12756 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12758 tree arg01 = TREE_OPERAND (arg0, 1);
12759 enum tree_code code0 = TREE_CODE (arg0);
12762 if (TREE_CODE (arg01) == REAL_CST)
12763 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12765 is_positive = tree_int_cst_sgn (arg01);
12767 /* (X - c) > X becomes false. */
12768 if (code == GT_EXPR
12769 && ((code0 == MINUS_EXPR && is_positive >= 0)
12770 || (code0 == PLUS_EXPR && is_positive <= 0)))
12772 if (TREE_CODE (arg01) == INTEGER_CST
12773 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12774 fold_overflow_warning (("assuming signed overflow does not "
12775 "occur when assuming that (X - c) > X "
12776 "is always false"),
12777 WARN_STRICT_OVERFLOW_ALL);
12778 return constant_boolean_node (0, type);
12781 /* Likewise (X + c) < X becomes false. */
12782 if (code == LT_EXPR
12783 && ((code0 == PLUS_EXPR && is_positive >= 0)
12784 || (code0 == MINUS_EXPR && is_positive <= 0)))
12786 if (TREE_CODE (arg01) == INTEGER_CST
12787 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12788 fold_overflow_warning (("assuming signed overflow does not "
12789 "occur when assuming that "
12790 "(X + c) < X is always false"),
12791 WARN_STRICT_OVERFLOW_ALL);
12792 return constant_boolean_node (0, type);
12795 /* Convert (X - c) <= X to true. */
12796 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12798 && ((code0 == MINUS_EXPR && is_positive >= 0)
12799 || (code0 == PLUS_EXPR && is_positive <= 0)))
12801 if (TREE_CODE (arg01) == INTEGER_CST
12802 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12803 fold_overflow_warning (("assuming signed overflow does not "
12804 "occur when assuming that "
12805 "(X - c) <= X is always true"),
12806 WARN_STRICT_OVERFLOW_ALL);
12807 return constant_boolean_node (1, type);
12810 /* Convert (X + c) >= X to true. */
12811 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12813 && ((code0 == PLUS_EXPR && is_positive >= 0)
12814 || (code0 == MINUS_EXPR && is_positive <= 0)))
12816 if (TREE_CODE (arg01) == INTEGER_CST
12817 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12818 fold_overflow_warning (("assuming signed overflow does not "
12819 "occur when assuming that "
12820 "(X + c) >= X is always true"),
12821 WARN_STRICT_OVERFLOW_ALL);
12822 return constant_boolean_node (1, type);
12825 if (TREE_CODE (arg01) == INTEGER_CST)
12827 /* Convert X + c > X and X - c < X to true for integers. */
12828 if (code == GT_EXPR
12829 && ((code0 == PLUS_EXPR && is_positive > 0)
12830 || (code0 == MINUS_EXPR && is_positive < 0)))
12832 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12833 fold_overflow_warning (("assuming signed overflow does "
12834 "not occur when assuming that "
12835 "(X + c) > X is always true"),
12836 WARN_STRICT_OVERFLOW_ALL);
12837 return constant_boolean_node (1, type);
12840 if (code == LT_EXPR
12841 && ((code0 == MINUS_EXPR && is_positive > 0)
12842 || (code0 == PLUS_EXPR && is_positive < 0)))
12844 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12845 fold_overflow_warning (("assuming signed overflow does "
12846 "not occur when assuming that "
12847 "(X - c) < X is always true"),
12848 WARN_STRICT_OVERFLOW_ALL);
12849 return constant_boolean_node (1, type);
12852 /* Convert X + c <= X and X - c >= X to false for integers. */
12853 if (code == LE_EXPR
12854 && ((code0 == PLUS_EXPR && is_positive > 0)
12855 || (code0 == MINUS_EXPR && is_positive < 0)))
12857 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12858 fold_overflow_warning (("assuming signed overflow does "
12859 "not occur when assuming that "
12860 "(X + c) <= X is always false"),
12861 WARN_STRICT_OVERFLOW_ALL);
12862 return constant_boolean_node (0, type);
12865 if (code == GE_EXPR
12866 && ((code0 == MINUS_EXPR && is_positive > 0)
12867 || (code0 == PLUS_EXPR && is_positive < 0)))
12869 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12870 fold_overflow_warning (("assuming signed overflow does "
12871 "not occur when assuming that "
12872 "(X - c) >= X is always false"),
12873 WARN_STRICT_OVERFLOW_ALL);
12874 return constant_boolean_node (0, type);
12879 /* Comparisons with the highest or lowest possible integer of
12880 the specified precision will have known values. */
12882 tree arg1_type = TREE_TYPE (arg1);
12883 unsigned int width = TYPE_PRECISION (arg1_type);
12885 if (TREE_CODE (arg1) == INTEGER_CST
12886 && width <= 2 * HOST_BITS_PER_WIDE_INT
12887 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12889 HOST_WIDE_INT signed_max_hi;
12890 unsigned HOST_WIDE_INT signed_max_lo;
12891 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12893 if (width <= HOST_BITS_PER_WIDE_INT)
12895 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12900 if (TYPE_UNSIGNED (arg1_type))
12902 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12908 max_lo = signed_max_lo;
12909 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12915 width -= HOST_BITS_PER_WIDE_INT;
12916 signed_max_lo = -1;
12917 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12922 if (TYPE_UNSIGNED (arg1_type))
12924 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12929 max_hi = signed_max_hi;
12930 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12934 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12935 && TREE_INT_CST_LOW (arg1) == max_lo)
12939 return omit_one_operand (type, integer_zero_node, arg0);
12942 return fold_build2 (EQ_EXPR, type, op0, op1);
12945 return omit_one_operand (type, integer_one_node, arg0);
12948 return fold_build2 (NE_EXPR, type, op0, op1);
12950 /* The GE_EXPR and LT_EXPR cases above are not normally
12951 reached because of previous transformations. */
12956 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12958 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12962 arg1 = const_binop (PLUS_EXPR, arg1,
12963 build_int_cst (TREE_TYPE (arg1), 1), 0);
12964 return fold_build2 (EQ_EXPR, type,
12965 fold_convert (TREE_TYPE (arg1), arg0),
12968 arg1 = const_binop (PLUS_EXPR, arg1,
12969 build_int_cst (TREE_TYPE (arg1), 1), 0);
12970 return fold_build2 (NE_EXPR, type,
12971 fold_convert (TREE_TYPE (arg1), arg0),
12976 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12978 && TREE_INT_CST_LOW (arg1) == min_lo)
12982 return omit_one_operand (type, integer_zero_node, arg0);
12985 return fold_build2 (EQ_EXPR, type, op0, op1);
12988 return omit_one_operand (type, integer_one_node, arg0);
12991 return fold_build2 (NE_EXPR, type, op0, op1);
12996 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12998 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13002 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13003 return fold_build2 (NE_EXPR, type,
13004 fold_convert (TREE_TYPE (arg1), arg0),
13007 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13008 return fold_build2 (EQ_EXPR, type,
13009 fold_convert (TREE_TYPE (arg1), arg0),
13015 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13016 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13017 && TYPE_UNSIGNED (arg1_type)
13018 /* We will flip the signedness of the comparison operator
13019 associated with the mode of arg1, so the sign bit is
13020 specified by this mode. Check that arg1 is the signed
13021 max associated with this sign bit. */
13022 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13023 /* signed_type does not work on pointer types. */
13024 && INTEGRAL_TYPE_P (arg1_type))
13026 /* The following case also applies to X < signed_max+1
13027 and X >= signed_max+1 because previous transformations. */
13028 if (code == LE_EXPR || code == GT_EXPR)
13031 st = signed_type_for (TREE_TYPE (arg1));
13032 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13033 type, fold_convert (st, arg0),
13034 build_int_cst (st, 0));
13040 /* If we are comparing an ABS_EXPR with a constant, we can
13041 convert all the cases into explicit comparisons, but they may
13042 well not be faster than doing the ABS and one comparison.
13043 But ABS (X) <= C is a range comparison, which becomes a subtraction
13044 and a comparison, and is probably faster. */
13045 if (code == LE_EXPR
13046 && TREE_CODE (arg1) == INTEGER_CST
13047 && TREE_CODE (arg0) == ABS_EXPR
13048 && ! TREE_SIDE_EFFECTS (arg0)
13049 && (0 != (tem = negate_expr (arg1)))
13050 && TREE_CODE (tem) == INTEGER_CST
13051 && !TREE_OVERFLOW (tem))
13052 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13053 build2 (GE_EXPR, type,
13054 TREE_OPERAND (arg0, 0), tem),
13055 build2 (LE_EXPR, type,
13056 TREE_OPERAND (arg0, 0), arg1));
13058 /* Convert ABS_EXPR<x> >= 0 to true. */
13059 strict_overflow_p = false;
13060 if (code == GE_EXPR
13061 && (integer_zerop (arg1)
13062 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13063 && real_zerop (arg1)))
13064 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13066 if (strict_overflow_p)
13067 fold_overflow_warning (("assuming signed overflow does not occur "
13068 "when simplifying comparison of "
13069 "absolute value and zero"),
13070 WARN_STRICT_OVERFLOW_CONDITIONAL);
13071 return omit_one_operand (type, integer_one_node, arg0);
13074 /* Convert ABS_EXPR<x> < 0 to false. */
13075 strict_overflow_p = false;
13076 if (code == LT_EXPR
13077 && (integer_zerop (arg1) || real_zerop (arg1))
13078 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13080 if (strict_overflow_p)
13081 fold_overflow_warning (("assuming signed overflow does not occur "
13082 "when simplifying comparison of "
13083 "absolute value and zero"),
13084 WARN_STRICT_OVERFLOW_CONDITIONAL);
13085 return omit_one_operand (type, integer_zero_node, arg0);
13088 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13089 and similarly for >= into !=. */
13090 if ((code == LT_EXPR || code == GE_EXPR)
13091 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13092 && TREE_CODE (arg1) == LSHIFT_EXPR
13093 && integer_onep (TREE_OPERAND (arg1, 0)))
13094 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13095 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13096 TREE_OPERAND (arg1, 1)),
13097 build_int_cst (TREE_TYPE (arg0), 0));
13099 if ((code == LT_EXPR || code == GE_EXPR)
13100 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13101 && CONVERT_EXPR_P (arg1)
13102 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13103 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13105 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13106 fold_convert (TREE_TYPE (arg0),
13107 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13108 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13110 build_int_cst (TREE_TYPE (arg0), 0));
13114 case UNORDERED_EXPR:
13122 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13124 t1 = fold_relational_const (code, type, arg0, arg1);
13125 if (t1 != NULL_TREE)
13129 /* If the first operand is NaN, the result is constant. */
13130 if (TREE_CODE (arg0) == REAL_CST
13131 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13132 && (code != LTGT_EXPR || ! flag_trapping_math))
13134 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13135 ? integer_zero_node
13136 : integer_one_node;
13137 return omit_one_operand (type, t1, arg1);
13140 /* If the second operand is NaN, the result is constant. */
13141 if (TREE_CODE (arg1) == REAL_CST
13142 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13143 && (code != LTGT_EXPR || ! flag_trapping_math))
13145 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13146 ? integer_zero_node
13147 : integer_one_node;
13148 return omit_one_operand (type, t1, arg0);
13151 /* Simplify unordered comparison of something with itself. */
13152 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13153 && operand_equal_p (arg0, arg1, 0))
13154 return constant_boolean_node (1, type);
13156 if (code == LTGT_EXPR
13157 && !flag_trapping_math
13158 && operand_equal_p (arg0, arg1, 0))
13159 return constant_boolean_node (0, type);
13161 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13163 tree targ0 = strip_float_extensions (arg0);
13164 tree targ1 = strip_float_extensions (arg1);
13165 tree newtype = TREE_TYPE (targ0);
13167 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13168 newtype = TREE_TYPE (targ1);
13170 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13171 return fold_build2 (code, type, fold_convert (newtype, targ0),
13172 fold_convert (newtype, targ1));
13177 case COMPOUND_EXPR:
13178 /* When pedantic, a compound expression can be neither an lvalue
13179 nor an integer constant expression. */
13180 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13182 /* Don't let (0, 0) be null pointer constant. */
13183 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13184 : fold_convert (type, arg1);
13185 return pedantic_non_lvalue (tem);
13188 if ((TREE_CODE (arg0) == REAL_CST
13189 && TREE_CODE (arg1) == REAL_CST)
13190 || (TREE_CODE (arg0) == INTEGER_CST
13191 && TREE_CODE (arg1) == INTEGER_CST))
13192 return build_complex (type, arg0, arg1);
13196 /* An ASSERT_EXPR should never be passed to fold_binary. */
13197 gcc_unreachable ();
13201 } /* switch (code) */
13204 /* Callback for walk_tree, looking for LABEL_EXPR.
13205 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
13206 Do not check the sub-tree of GOTO_EXPR. */
13209 contains_label_1 (tree *tp,
13210 int *walk_subtrees,
13211 void *data ATTRIBUTE_UNUSED)
13213 switch (TREE_CODE (*tp))
13218 *walk_subtrees = 0;
13225 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
13226 accessible from outside the sub-tree. Returns NULL_TREE if no
13227 addressable label is found. */
13230 contains_label_p (tree st)
13232 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13235 /* Fold a ternary expression of code CODE and type TYPE with operands
13236 OP0, OP1, and OP2. Return the folded expression if folding is
13237 successful. Otherwise, return NULL_TREE. */
13240 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13243 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13244 enum tree_code_class kind = TREE_CODE_CLASS (code);
13246 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13247 && TREE_CODE_LENGTH (code) == 3);
13249 /* Strip any conversions that don't change the mode. This is safe
13250 for every expression, except for a comparison expression because
13251 its signedness is derived from its operands. So, in the latter
13252 case, only strip conversions that don't change the signedness.
13254 Note that this is done as an internal manipulation within the
13255 constant folder, in order to find the simplest representation of
13256 the arguments so that their form can be studied. In any cases,
13257 the appropriate type conversions should be put back in the tree
13258 that will get out of the constant folder. */
13273 case COMPONENT_REF:
13274 if (TREE_CODE (arg0) == CONSTRUCTOR
13275 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13277 unsigned HOST_WIDE_INT idx;
13279 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13286 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13287 so all simple results must be passed through pedantic_non_lvalue. */
13288 if (TREE_CODE (arg0) == INTEGER_CST)
13290 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13291 tem = integer_zerop (arg0) ? op2 : op1;
13292 /* Only optimize constant conditions when the selected branch
13293 has the same type as the COND_EXPR. This avoids optimizing
13294 away "c ? x : throw", where the throw has a void type.
13295 Avoid throwing away that operand which contains label. */
13296 if ((!TREE_SIDE_EFFECTS (unused_op)
13297 || !contains_label_p (unused_op))
13298 && (! VOID_TYPE_P (TREE_TYPE (tem))
13299 || VOID_TYPE_P (type)))
13300 return pedantic_non_lvalue (tem);
13303 if (operand_equal_p (arg1, op2, 0))
13304 return pedantic_omit_one_operand (type, arg1, arg0);
13306 /* If we have A op B ? A : C, we may be able to convert this to a
13307 simpler expression, depending on the operation and the values
13308 of B and C. Signed zeros prevent all of these transformations,
13309 for reasons given above each one.
13311 Also try swapping the arguments and inverting the conditional. */
13312 if (COMPARISON_CLASS_P (arg0)
13313 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13314 arg1, TREE_OPERAND (arg0, 1))
13315 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13317 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13322 if (COMPARISON_CLASS_P (arg0)
13323 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13325 TREE_OPERAND (arg0, 1))
13326 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13328 tem = fold_truth_not_expr (arg0);
13329 if (tem && COMPARISON_CLASS_P (tem))
13331 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13337 /* If the second operand is simpler than the third, swap them
13338 since that produces better jump optimization results. */
13339 if (truth_value_p (TREE_CODE (arg0))
13340 && tree_swap_operands_p (op1, op2, false))
13342 /* See if this can be inverted. If it can't, possibly because
13343 it was a floating-point inequality comparison, don't do
13345 tem = fold_truth_not_expr (arg0);
13347 return fold_build3 (code, type, tem, op2, op1);
13350 /* Convert A ? 1 : 0 to simply A. */
13351 if (integer_onep (op1)
13352 && integer_zerop (op2)
13353 /* If we try to convert OP0 to our type, the
13354 call to fold will try to move the conversion inside
13355 a COND, which will recurse. In that case, the COND_EXPR
13356 is probably the best choice, so leave it alone. */
13357 && type == TREE_TYPE (arg0))
13358 return pedantic_non_lvalue (arg0);
13360 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13361 over COND_EXPR in cases such as floating point comparisons. */
13362 if (integer_zerop (op1)
13363 && integer_onep (op2)
13364 && truth_value_p (TREE_CODE (arg0)))
13365 return pedantic_non_lvalue (fold_convert (type,
13366 invert_truthvalue (arg0)));
13368 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13369 if (TREE_CODE (arg0) == LT_EXPR
13370 && integer_zerop (TREE_OPERAND (arg0, 1))
13371 && integer_zerop (op2)
13372 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13374 /* sign_bit_p only checks ARG1 bits within A's precision.
13375 If <sign bit of A> has wider type than A, bits outside
13376 of A's precision in <sign bit of A> need to be checked.
13377 If they are all 0, this optimization needs to be done
13378 in unsigned A's type, if they are all 1 in signed A's type,
13379 otherwise this can't be done. */
13380 if (TYPE_PRECISION (TREE_TYPE (tem))
13381 < TYPE_PRECISION (TREE_TYPE (arg1))
13382 && TYPE_PRECISION (TREE_TYPE (tem))
13383 < TYPE_PRECISION (type))
13385 unsigned HOST_WIDE_INT mask_lo;
13386 HOST_WIDE_INT mask_hi;
13387 int inner_width, outer_width;
13390 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13391 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13392 if (outer_width > TYPE_PRECISION (type))
13393 outer_width = TYPE_PRECISION (type);
13395 if (outer_width > HOST_BITS_PER_WIDE_INT)
13397 mask_hi = ((unsigned HOST_WIDE_INT) -1
13398 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13404 mask_lo = ((unsigned HOST_WIDE_INT) -1
13405 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13407 if (inner_width > HOST_BITS_PER_WIDE_INT)
13409 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13410 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13414 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13415 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13417 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13418 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13420 tem_type = signed_type_for (TREE_TYPE (tem));
13421 tem = fold_convert (tem_type, tem);
13423 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13424 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13426 tem_type = unsigned_type_for (TREE_TYPE (tem));
13427 tem = fold_convert (tem_type, tem);
13434 return fold_convert (type,
13435 fold_build2 (BIT_AND_EXPR,
13436 TREE_TYPE (tem), tem,
13437 fold_convert (TREE_TYPE (tem),
13441 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13442 already handled above. */
13443 if (TREE_CODE (arg0) == BIT_AND_EXPR
13444 && integer_onep (TREE_OPERAND (arg0, 1))
13445 && integer_zerop (op2)
13446 && integer_pow2p (arg1))
13448 tree tem = TREE_OPERAND (arg0, 0);
13450 if (TREE_CODE (tem) == RSHIFT_EXPR
13451 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13452 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13453 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13454 return fold_build2 (BIT_AND_EXPR, type,
13455 TREE_OPERAND (tem, 0), arg1);
13458 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13459 is probably obsolete because the first operand should be a
13460 truth value (that's why we have the two cases above), but let's
13461 leave it in until we can confirm this for all front-ends. */
13462 if (integer_zerop (op2)
13463 && TREE_CODE (arg0) == NE_EXPR
13464 && integer_zerop (TREE_OPERAND (arg0, 1))
13465 && integer_pow2p (arg1)
13466 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13467 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13468 arg1, OEP_ONLY_CONST))
13469 return pedantic_non_lvalue (fold_convert (type,
13470 TREE_OPERAND (arg0, 0)));
13472 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13473 if (integer_zerop (op2)
13474 && truth_value_p (TREE_CODE (arg0))
13475 && truth_value_p (TREE_CODE (arg1)))
13476 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13477 fold_convert (type, arg0),
13480 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13481 if (integer_onep (op2)
13482 && truth_value_p (TREE_CODE (arg0))
13483 && truth_value_p (TREE_CODE (arg1)))
13485 /* Only perform transformation if ARG0 is easily inverted. */
13486 tem = fold_truth_not_expr (arg0);
13488 return fold_build2 (TRUTH_ORIF_EXPR, type,
13489 fold_convert (type, tem),
13493 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13494 if (integer_zerop (arg1)
13495 && truth_value_p (TREE_CODE (arg0))
13496 && truth_value_p (TREE_CODE (op2)))
13498 /* Only perform transformation if ARG0 is easily inverted. */
13499 tem = fold_truth_not_expr (arg0);
13501 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13502 fold_convert (type, tem),
13506 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13507 if (integer_onep (arg1)
13508 && truth_value_p (TREE_CODE (arg0))
13509 && truth_value_p (TREE_CODE (op2)))
13510 return fold_build2 (TRUTH_ORIF_EXPR, type,
13511 fold_convert (type, arg0),
13517 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13518 of fold_ternary on them. */
13519 gcc_unreachable ();
13521 case BIT_FIELD_REF:
13522 if ((TREE_CODE (arg0) == VECTOR_CST
13523 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13524 && type == TREE_TYPE (TREE_TYPE (arg0)))
13526 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13527 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13530 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13531 && (idx % width) == 0
13532 && (idx = idx / width)
13533 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13535 tree elements = NULL_TREE;
13537 if (TREE_CODE (arg0) == VECTOR_CST)
13538 elements = TREE_VECTOR_CST_ELTS (arg0);
13541 unsigned HOST_WIDE_INT idx;
13544 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13545 elements = tree_cons (NULL_TREE, value, elements);
13547 while (idx-- > 0 && elements)
13548 elements = TREE_CHAIN (elements);
13550 return TREE_VALUE (elements);
13552 return fold_convert (type, integer_zero_node);
13556 /* A bit-field-ref that referenced the full argument can be stripped. */
13557 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13558 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13559 && integer_zerop (op2))
13560 return fold_convert (type, arg0);
13566 } /* switch (code) */
13569 /* Perform constant folding and related simplification of EXPR.
13570 The related simplifications include x*1 => x, x*0 => 0, etc.,
13571 and application of the associative law.
13572 NOP_EXPR conversions may be removed freely (as long as we
13573 are careful not to change the type of the overall expression).
13574 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13575 but we can constant-fold them if they have constant operands. */
13577 #ifdef ENABLE_FOLD_CHECKING
13578 # define fold(x) fold_1 (x)
13579 static tree fold_1 (tree);
13585 const tree t = expr;
13586 enum tree_code code = TREE_CODE (t);
13587 enum tree_code_class kind = TREE_CODE_CLASS (code);
13590 /* Return right away if a constant. */
13591 if (kind == tcc_constant)
13594 /* CALL_EXPR-like objects with variable numbers of operands are
13595 treated specially. */
13596 if (kind == tcc_vl_exp)
13598 if (code == CALL_EXPR)
13600 tem = fold_call_expr (expr, false);
13601 return tem ? tem : expr;
13606 if (IS_EXPR_CODE_CLASS (kind))
13608 tree type = TREE_TYPE (t);
13609 tree op0, op1, op2;
13611 switch (TREE_CODE_LENGTH (code))
13614 op0 = TREE_OPERAND (t, 0);
13615 tem = fold_unary (code, type, op0);
13616 return tem ? tem : expr;
13618 op0 = TREE_OPERAND (t, 0);
13619 op1 = TREE_OPERAND (t, 1);
13620 tem = fold_binary (code, type, op0, op1);
13621 return tem ? tem : expr;
13623 op0 = TREE_OPERAND (t, 0);
13624 op1 = TREE_OPERAND (t, 1);
13625 op2 = TREE_OPERAND (t, 2);
13626 tem = fold_ternary (code, type, op0, op1, op2);
13627 return tem ? tem : expr;
13637 tree op0 = TREE_OPERAND (t, 0);
13638 tree op1 = TREE_OPERAND (t, 1);
13640 if (TREE_CODE (op1) == INTEGER_CST
13641 && TREE_CODE (op0) == CONSTRUCTOR
13642 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13644 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13645 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13646 unsigned HOST_WIDE_INT begin = 0;
13648 /* Find a matching index by means of a binary search. */
13649 while (begin != end)
13651 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13652 tree index = VEC_index (constructor_elt, elts, middle)->index;
13654 if (TREE_CODE (index) == INTEGER_CST
13655 && tree_int_cst_lt (index, op1))
13656 begin = middle + 1;
13657 else if (TREE_CODE (index) == INTEGER_CST
13658 && tree_int_cst_lt (op1, index))
13660 else if (TREE_CODE (index) == RANGE_EXPR
13661 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13662 begin = middle + 1;
13663 else if (TREE_CODE (index) == RANGE_EXPR
13664 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13667 return VEC_index (constructor_elt, elts, middle)->value;
13675 return fold (DECL_INITIAL (t));
13679 } /* switch (code) */
13682 #ifdef ENABLE_FOLD_CHECKING
13685 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13686 static void fold_check_failed (const_tree, const_tree);
13687 void print_fold_checksum (const_tree);
13689 /* When --enable-checking=fold, compute a digest of expr before
13690 and after actual fold call to see if fold did not accidentally
13691 change original expr. */
13697 struct md5_ctx ctx;
13698 unsigned char checksum_before[16], checksum_after[16];
13701 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13702 md5_init_ctx (&ctx);
13703 fold_checksum_tree (expr, &ctx, ht);
13704 md5_finish_ctx (&ctx, checksum_before);
13707 ret = fold_1 (expr);
13709 md5_init_ctx (&ctx);
13710 fold_checksum_tree (expr, &ctx, ht);
13711 md5_finish_ctx (&ctx, checksum_after);
13714 if (memcmp (checksum_before, checksum_after, 16))
13715 fold_check_failed (expr, ret);
13721 print_fold_checksum (const_tree expr)
13723 struct md5_ctx ctx;
13724 unsigned char checksum[16], cnt;
13727 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13728 md5_init_ctx (&ctx);
13729 fold_checksum_tree (expr, &ctx, ht);
13730 md5_finish_ctx (&ctx, checksum);
13732 for (cnt = 0; cnt < 16; ++cnt)
13733 fprintf (stderr, "%02x", checksum[cnt]);
13734 putc ('\n', stderr);
13738 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13740 internal_error ("fold check: original tree changed by fold");
13744 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13747 enum tree_code code;
13748 union tree_node buf;
13753 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13754 <= sizeof (struct tree_function_decl))
13755 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13758 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13762 code = TREE_CODE (expr);
13763 if (TREE_CODE_CLASS (code) == tcc_declaration
13764 && DECL_ASSEMBLER_NAME_SET_P (expr))
13766 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13767 memcpy ((char *) &buf, expr, tree_size (expr));
13768 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13769 expr = (tree) &buf;
13771 else if (TREE_CODE_CLASS (code) == tcc_type
13772 && (TYPE_POINTER_TO (expr)
13773 || TYPE_REFERENCE_TO (expr)
13774 || TYPE_CACHED_VALUES_P (expr)
13775 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13776 || TYPE_NEXT_VARIANT (expr)))
13778 /* Allow these fields to be modified. */
13780 memcpy ((char *) &buf, expr, tree_size (expr));
13781 expr = tmp = (tree) &buf;
13782 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13783 TYPE_POINTER_TO (tmp) = NULL;
13784 TYPE_REFERENCE_TO (tmp) = NULL;
13785 TYPE_NEXT_VARIANT (tmp) = NULL;
13786 if (TYPE_CACHED_VALUES_P (tmp))
13788 TYPE_CACHED_VALUES_P (tmp) = 0;
13789 TYPE_CACHED_VALUES (tmp) = NULL;
13792 md5_process_bytes (expr, tree_size (expr), ctx);
13793 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13794 if (TREE_CODE_CLASS (code) != tcc_type
13795 && TREE_CODE_CLASS (code) != tcc_declaration
13796 && code != TREE_LIST
13797 && code != SSA_NAME)
13798 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13799 switch (TREE_CODE_CLASS (code))
13805 md5_process_bytes (TREE_STRING_POINTER (expr),
13806 TREE_STRING_LENGTH (expr), ctx);
13809 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13810 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13813 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13819 case tcc_exceptional:
13823 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13824 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13825 expr = TREE_CHAIN (expr);
13826 goto recursive_label;
13829 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13830 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13836 case tcc_expression:
13837 case tcc_reference:
13838 case tcc_comparison:
13841 case tcc_statement:
13843 len = TREE_OPERAND_LENGTH (expr);
13844 for (i = 0; i < len; ++i)
13845 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13847 case tcc_declaration:
13848 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13849 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13850 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13852 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13853 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13854 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13855 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13856 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13858 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13859 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13861 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13863 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13864 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13865 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13869 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13870 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13871 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13872 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13873 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13874 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13875 if (INTEGRAL_TYPE_P (expr)
13876 || SCALAR_FLOAT_TYPE_P (expr))
13878 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13879 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13881 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13882 if (TREE_CODE (expr) == RECORD_TYPE
13883 || TREE_CODE (expr) == UNION_TYPE
13884 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13885 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13886 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13893 /* Helper function for outputting the checksum of a tree T. When
13894 debugging with gdb, you can "define mynext" to be "next" followed
13895 by "call debug_fold_checksum (op0)", then just trace down till the
13899 debug_fold_checksum (const_tree t)
13902 unsigned char checksum[16];
13903 struct md5_ctx ctx;
13904 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13906 md5_init_ctx (&ctx);
13907 fold_checksum_tree (t, &ctx, ht);
13908 md5_finish_ctx (&ctx, checksum);
13911 for (i = 0; i < 16; i++)
13912 fprintf (stderr, "%d ", checksum[i]);
13914 fprintf (stderr, "\n");
13919 /* Fold a unary tree expression with code CODE of type TYPE with an
13920 operand OP0. Return a folded expression if successful. Otherwise,
13921 return a tree expression with code CODE of type TYPE with an
13925 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13928 #ifdef ENABLE_FOLD_CHECKING
13929 unsigned char checksum_before[16], checksum_after[16];
13930 struct md5_ctx ctx;
13933 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13934 md5_init_ctx (&ctx);
13935 fold_checksum_tree (op0, &ctx, ht);
13936 md5_finish_ctx (&ctx, checksum_before);
13940 tem = fold_unary (code, type, op0);
13942 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13944 #ifdef ENABLE_FOLD_CHECKING
13945 md5_init_ctx (&ctx);
13946 fold_checksum_tree (op0, &ctx, ht);
13947 md5_finish_ctx (&ctx, checksum_after);
13950 if (memcmp (checksum_before, checksum_after, 16))
13951 fold_check_failed (op0, tem);
13956 /* Fold a binary tree expression with code CODE of type TYPE with
13957 operands OP0 and OP1. Return a folded expression if successful.
13958 Otherwise, return a tree expression with code CODE of type TYPE
13959 with operands OP0 and OP1. */
13962 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13966 #ifdef ENABLE_FOLD_CHECKING
13967 unsigned char checksum_before_op0[16],
13968 checksum_before_op1[16],
13969 checksum_after_op0[16],
13970 checksum_after_op1[16];
13971 struct md5_ctx ctx;
13974 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13975 md5_init_ctx (&ctx);
13976 fold_checksum_tree (op0, &ctx, ht);
13977 md5_finish_ctx (&ctx, checksum_before_op0);
13980 md5_init_ctx (&ctx);
13981 fold_checksum_tree (op1, &ctx, ht);
13982 md5_finish_ctx (&ctx, checksum_before_op1);
13986 tem = fold_binary (code, type, op0, op1);
13988 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13990 #ifdef ENABLE_FOLD_CHECKING
13991 md5_init_ctx (&ctx);
13992 fold_checksum_tree (op0, &ctx, ht);
13993 md5_finish_ctx (&ctx, checksum_after_op0);
13996 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13997 fold_check_failed (op0, tem);
13999 md5_init_ctx (&ctx);
14000 fold_checksum_tree (op1, &ctx, ht);
14001 md5_finish_ctx (&ctx, checksum_after_op1);
14004 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14005 fold_check_failed (op1, tem);
14010 /* Fold a ternary tree expression with code CODE of type TYPE with
14011 operands OP0, OP1, and OP2. Return a folded expression if
14012 successful. Otherwise, return a tree expression with code CODE of
14013 type TYPE with operands OP0, OP1, and OP2. */
14016 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
14020 #ifdef ENABLE_FOLD_CHECKING
14021 unsigned char checksum_before_op0[16],
14022 checksum_before_op1[16],
14023 checksum_before_op2[16],
14024 checksum_after_op0[16],
14025 checksum_after_op1[16],
14026 checksum_after_op2[16];
14027 struct md5_ctx ctx;
14030 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14031 md5_init_ctx (&ctx);
14032 fold_checksum_tree (op0, &ctx, ht);
14033 md5_finish_ctx (&ctx, checksum_before_op0);
14036 md5_init_ctx (&ctx);
14037 fold_checksum_tree (op1, &ctx, ht);
14038 md5_finish_ctx (&ctx, checksum_before_op1);
14041 md5_init_ctx (&ctx);
14042 fold_checksum_tree (op2, &ctx, ht);
14043 md5_finish_ctx (&ctx, checksum_before_op2);
14047 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14048 tem = fold_ternary (code, type, op0, op1, op2);
14050 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14052 #ifdef ENABLE_FOLD_CHECKING
14053 md5_init_ctx (&ctx);
14054 fold_checksum_tree (op0, &ctx, ht);
14055 md5_finish_ctx (&ctx, checksum_after_op0);
14058 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14059 fold_check_failed (op0, tem);
14061 md5_init_ctx (&ctx);
14062 fold_checksum_tree (op1, &ctx, ht);
14063 md5_finish_ctx (&ctx, checksum_after_op1);
14066 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14067 fold_check_failed (op1, tem);
14069 md5_init_ctx (&ctx);
14070 fold_checksum_tree (op2, &ctx, ht);
14071 md5_finish_ctx (&ctx, checksum_after_op2);
14074 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14075 fold_check_failed (op2, tem);
14080 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14081 arguments in ARGARRAY, and a null static chain.
14082 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14083 of type TYPE from the given operands as constructed by build_call_array. */
14086 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14089 #ifdef ENABLE_FOLD_CHECKING
14090 unsigned char checksum_before_fn[16],
14091 checksum_before_arglist[16],
14092 checksum_after_fn[16],
14093 checksum_after_arglist[16];
14094 struct md5_ctx ctx;
14098 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14099 md5_init_ctx (&ctx);
14100 fold_checksum_tree (fn, &ctx, ht);
14101 md5_finish_ctx (&ctx, checksum_before_fn);
14104 md5_init_ctx (&ctx);
14105 for (i = 0; i < nargs; i++)
14106 fold_checksum_tree (argarray[i], &ctx, ht);
14107 md5_finish_ctx (&ctx, checksum_before_arglist);
14111 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14113 #ifdef ENABLE_FOLD_CHECKING
14114 md5_init_ctx (&ctx);
14115 fold_checksum_tree (fn, &ctx, ht);
14116 md5_finish_ctx (&ctx, checksum_after_fn);
14119 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14120 fold_check_failed (fn, tem);
14122 md5_init_ctx (&ctx);
14123 for (i = 0; i < nargs; i++)
14124 fold_checksum_tree (argarray[i], &ctx, ht);
14125 md5_finish_ctx (&ctx, checksum_after_arglist);
14128 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14129 fold_check_failed (NULL_TREE, tem);
14134 /* Perform constant folding and related simplification of initializer
14135 expression EXPR. These behave identically to "fold_buildN" but ignore
14136 potential run-time traps and exceptions that fold must preserve. */
14138 #define START_FOLD_INIT \
14139 int saved_signaling_nans = flag_signaling_nans;\
14140 int saved_trapping_math = flag_trapping_math;\
14141 int saved_rounding_math = flag_rounding_math;\
14142 int saved_trapv = flag_trapv;\
14143 int saved_folding_initializer = folding_initializer;\
14144 flag_signaling_nans = 0;\
14145 flag_trapping_math = 0;\
14146 flag_rounding_math = 0;\
14148 folding_initializer = 1;
14150 #define END_FOLD_INIT \
14151 flag_signaling_nans = saved_signaling_nans;\
14152 flag_trapping_math = saved_trapping_math;\
14153 flag_rounding_math = saved_rounding_math;\
14154 flag_trapv = saved_trapv;\
14155 folding_initializer = saved_folding_initializer;
14158 fold_build1_initializer (enum tree_code code, tree type, tree op)
14163 result = fold_build1 (code, type, op);
14170 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14175 result = fold_build2 (code, type, op0, op1);
14182 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14188 result = fold_build3 (code, type, op0, op1, op2);
14195 fold_build_call_array_initializer (tree type, tree fn,
14196 int nargs, tree *argarray)
14201 result = fold_build_call_array (type, fn, nargs, argarray);
14207 #undef START_FOLD_INIT
14208 #undef END_FOLD_INIT
14210 /* Determine if first argument is a multiple of second argument. Return 0 if
14211 it is not, or we cannot easily determined it to be.
14213 An example of the sort of thing we care about (at this point; this routine
14214 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14215 fold cases do now) is discovering that
14217 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14223 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14225 This code also handles discovering that
14227 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14229 is a multiple of 8 so we don't have to worry about dealing with a
14230 possible remainder.
14232 Note that we *look* inside a SAVE_EXPR only to determine how it was
14233 calculated; it is not safe for fold to do much of anything else with the
14234 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14235 at run time. For example, the latter example above *cannot* be implemented
14236 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14237 evaluation time of the original SAVE_EXPR is not necessarily the same at
14238 the time the new expression is evaluated. The only optimization of this
14239 sort that would be valid is changing
14241 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14245 SAVE_EXPR (I) * SAVE_EXPR (J)
14247 (where the same SAVE_EXPR (J) is used in the original and the
14248 transformed version). */
14251 multiple_of_p (tree type, const_tree top, const_tree bottom)
14253 if (operand_equal_p (top, bottom, 0))
14256 if (TREE_CODE (type) != INTEGER_TYPE)
14259 switch (TREE_CODE (top))
14262 /* Bitwise and provides a power of two multiple. If the mask is
14263 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14264 if (!integer_pow2p (bottom))
14269 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14270 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14274 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14275 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14278 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14282 op1 = TREE_OPERAND (top, 1);
14283 /* const_binop may not detect overflow correctly,
14284 so check for it explicitly here. */
14285 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14286 > TREE_INT_CST_LOW (op1)
14287 && TREE_INT_CST_HIGH (op1) == 0
14288 && 0 != (t1 = fold_convert (type,
14289 const_binop (LSHIFT_EXPR,
14292 && !TREE_OVERFLOW (t1))
14293 return multiple_of_p (type, t1, bottom);
14298 /* Can't handle conversions from non-integral or wider integral type. */
14299 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14300 || (TYPE_PRECISION (type)
14301 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14304 /* .. fall through ... */
14307 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14310 if (TREE_CODE (bottom) != INTEGER_CST
14311 || integer_zerop (bottom)
14312 || (TYPE_UNSIGNED (type)
14313 && (tree_int_cst_sgn (top) < 0
14314 || tree_int_cst_sgn (bottom) < 0)))
14316 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14324 /* Return true if CODE or TYPE is known to be non-negative. */
14327 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14329 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14330 && truth_value_p (code))
14331 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14332 have a signed:1 type (where the value is -1 and 0). */
14337 /* Return true if (CODE OP0) is known to be non-negative. If the return
14338 value is based on the assumption that signed overflow is undefined,
14339 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14340 *STRICT_OVERFLOW_P. */
14343 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14344 bool *strict_overflow_p)
14346 if (TYPE_UNSIGNED (type))
14352 /* We can't return 1 if flag_wrapv is set because
14353 ABS_EXPR<INT_MIN> = INT_MIN. */
14354 if (!INTEGRAL_TYPE_P (type))
14356 if (TYPE_OVERFLOW_UNDEFINED (type))
14358 *strict_overflow_p = true;
14363 case NON_LVALUE_EXPR:
14365 case FIX_TRUNC_EXPR:
14366 return tree_expr_nonnegative_warnv_p (op0,
14367 strict_overflow_p);
14371 tree inner_type = TREE_TYPE (op0);
14372 tree outer_type = type;
14374 if (TREE_CODE (outer_type) == REAL_TYPE)
14376 if (TREE_CODE (inner_type) == REAL_TYPE)
14377 return tree_expr_nonnegative_warnv_p (op0,
14378 strict_overflow_p);
14379 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14381 if (TYPE_UNSIGNED (inner_type))
14383 return tree_expr_nonnegative_warnv_p (op0,
14384 strict_overflow_p);
14387 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14389 if (TREE_CODE (inner_type) == REAL_TYPE)
14390 return tree_expr_nonnegative_warnv_p (op0,
14391 strict_overflow_p);
14392 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14393 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14394 && TYPE_UNSIGNED (inner_type);
14400 return tree_simple_nonnegative_warnv_p (code, type);
14403 /* We don't know sign of `t', so be conservative and return false. */
14407 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14408 value is based on the assumption that signed overflow is undefined,
14409 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14410 *STRICT_OVERFLOW_P. */
14413 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14414 tree op1, bool *strict_overflow_p)
14416 if (TYPE_UNSIGNED (type))
14421 case POINTER_PLUS_EXPR:
14423 if (FLOAT_TYPE_P (type))
14424 return (tree_expr_nonnegative_warnv_p (op0,
14426 && tree_expr_nonnegative_warnv_p (op1,
14427 strict_overflow_p));
14429 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14430 both unsigned and at least 2 bits shorter than the result. */
14431 if (TREE_CODE (type) == INTEGER_TYPE
14432 && TREE_CODE (op0) == NOP_EXPR
14433 && TREE_CODE (op1) == NOP_EXPR)
14435 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14436 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14437 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14438 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14440 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14441 TYPE_PRECISION (inner2)) + 1;
14442 return prec < TYPE_PRECISION (type);
14448 if (FLOAT_TYPE_P (type))
14450 /* x * x for floating point x is always non-negative. */
14451 if (operand_equal_p (op0, op1, 0))
14453 return (tree_expr_nonnegative_warnv_p (op0,
14455 && tree_expr_nonnegative_warnv_p (op1,
14456 strict_overflow_p));
14459 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14460 both unsigned and their total bits is shorter than the result. */
14461 if (TREE_CODE (type) == INTEGER_TYPE
14462 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14463 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14465 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14466 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14468 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14469 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14472 bool unsigned0 = TYPE_UNSIGNED (inner0);
14473 bool unsigned1 = TYPE_UNSIGNED (inner1);
14475 if (TREE_CODE (op0) == INTEGER_CST)
14476 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14478 if (TREE_CODE (op1) == INTEGER_CST)
14479 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14481 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14482 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14484 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14485 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14486 : TYPE_PRECISION (inner0);
14488 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14489 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14490 : TYPE_PRECISION (inner1);
14492 return precision0 + precision1 < TYPE_PRECISION (type);
14499 return (tree_expr_nonnegative_warnv_p (op0,
14501 || tree_expr_nonnegative_warnv_p (op1,
14502 strict_overflow_p));
14508 case TRUNC_DIV_EXPR:
14509 case CEIL_DIV_EXPR:
14510 case FLOOR_DIV_EXPR:
14511 case ROUND_DIV_EXPR:
14512 return (tree_expr_nonnegative_warnv_p (op0,
14514 && tree_expr_nonnegative_warnv_p (op1,
14515 strict_overflow_p));
14517 case TRUNC_MOD_EXPR:
14518 case CEIL_MOD_EXPR:
14519 case FLOOR_MOD_EXPR:
14520 case ROUND_MOD_EXPR:
14521 return tree_expr_nonnegative_warnv_p (op0,
14522 strict_overflow_p);
14524 return tree_simple_nonnegative_warnv_p (code, type);
14527 /* We don't know sign of `t', so be conservative and return false. */
14531 /* Return true if T is known to be non-negative. If the return
14532 value is based on the assumption that signed overflow is undefined,
14533 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14534 *STRICT_OVERFLOW_P. */
14537 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14539 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14542 switch (TREE_CODE (t))
14545 return tree_int_cst_sgn (t) >= 0;
14548 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14551 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14554 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14556 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14557 strict_overflow_p));
14559 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14562 /* We don't know sign of `t', so be conservative and return false. */
14566 /* Return true if T is known to be non-negative. If the return
14567 value is based on the assumption that signed overflow is undefined,
14568 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14569 *STRICT_OVERFLOW_P. */
14572 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14573 tree arg0, tree arg1, bool *strict_overflow_p)
14575 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14576 switch (DECL_FUNCTION_CODE (fndecl))
14578 CASE_FLT_FN (BUILT_IN_ACOS):
14579 CASE_FLT_FN (BUILT_IN_ACOSH):
14580 CASE_FLT_FN (BUILT_IN_CABS):
14581 CASE_FLT_FN (BUILT_IN_COSH):
14582 CASE_FLT_FN (BUILT_IN_ERFC):
14583 CASE_FLT_FN (BUILT_IN_EXP):
14584 CASE_FLT_FN (BUILT_IN_EXP10):
14585 CASE_FLT_FN (BUILT_IN_EXP2):
14586 CASE_FLT_FN (BUILT_IN_FABS):
14587 CASE_FLT_FN (BUILT_IN_FDIM):
14588 CASE_FLT_FN (BUILT_IN_HYPOT):
14589 CASE_FLT_FN (BUILT_IN_POW10):
14590 CASE_INT_FN (BUILT_IN_FFS):
14591 CASE_INT_FN (BUILT_IN_PARITY):
14592 CASE_INT_FN (BUILT_IN_POPCOUNT):
14593 case BUILT_IN_BSWAP32:
14594 case BUILT_IN_BSWAP64:
14598 CASE_FLT_FN (BUILT_IN_SQRT):
14599 /* sqrt(-0.0) is -0.0. */
14600 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14602 return tree_expr_nonnegative_warnv_p (arg0,
14603 strict_overflow_p);
14605 CASE_FLT_FN (BUILT_IN_ASINH):
14606 CASE_FLT_FN (BUILT_IN_ATAN):
14607 CASE_FLT_FN (BUILT_IN_ATANH):
14608 CASE_FLT_FN (BUILT_IN_CBRT):
14609 CASE_FLT_FN (BUILT_IN_CEIL):
14610 CASE_FLT_FN (BUILT_IN_ERF):
14611 CASE_FLT_FN (BUILT_IN_EXPM1):
14612 CASE_FLT_FN (BUILT_IN_FLOOR):
14613 CASE_FLT_FN (BUILT_IN_FMOD):
14614 CASE_FLT_FN (BUILT_IN_FREXP):
14615 CASE_FLT_FN (BUILT_IN_LCEIL):
14616 CASE_FLT_FN (BUILT_IN_LDEXP):
14617 CASE_FLT_FN (BUILT_IN_LFLOOR):
14618 CASE_FLT_FN (BUILT_IN_LLCEIL):
14619 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14620 CASE_FLT_FN (BUILT_IN_LLRINT):
14621 CASE_FLT_FN (BUILT_IN_LLROUND):
14622 CASE_FLT_FN (BUILT_IN_LRINT):
14623 CASE_FLT_FN (BUILT_IN_LROUND):
14624 CASE_FLT_FN (BUILT_IN_MODF):
14625 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14626 CASE_FLT_FN (BUILT_IN_RINT):
14627 CASE_FLT_FN (BUILT_IN_ROUND):
14628 CASE_FLT_FN (BUILT_IN_SCALB):
14629 CASE_FLT_FN (BUILT_IN_SCALBLN):
14630 CASE_FLT_FN (BUILT_IN_SCALBN):
14631 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14632 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14633 CASE_FLT_FN (BUILT_IN_SINH):
14634 CASE_FLT_FN (BUILT_IN_TANH):
14635 CASE_FLT_FN (BUILT_IN_TRUNC):
14636 /* True if the 1st argument is nonnegative. */
14637 return tree_expr_nonnegative_warnv_p (arg0,
14638 strict_overflow_p);
14640 CASE_FLT_FN (BUILT_IN_FMAX):
14641 /* True if the 1st OR 2nd arguments are nonnegative. */
14642 return (tree_expr_nonnegative_warnv_p (arg0,
14644 || (tree_expr_nonnegative_warnv_p (arg1,
14645 strict_overflow_p)));
14647 CASE_FLT_FN (BUILT_IN_FMIN):
14648 /* True if the 1st AND 2nd arguments are nonnegative. */
14649 return (tree_expr_nonnegative_warnv_p (arg0,
14651 && (tree_expr_nonnegative_warnv_p (arg1,
14652 strict_overflow_p)));
14654 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14655 /* True if the 2nd argument is nonnegative. */
14656 return tree_expr_nonnegative_warnv_p (arg1,
14657 strict_overflow_p);
14659 CASE_FLT_FN (BUILT_IN_POWI):
14660 /* True if the 1st argument is nonnegative or the second
14661 argument is an even integer. */
14662 if (TREE_CODE (arg1) == INTEGER_CST
14663 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14665 return tree_expr_nonnegative_warnv_p (arg0,
14666 strict_overflow_p);
14668 CASE_FLT_FN (BUILT_IN_POW):
14669 /* True if the 1st argument is nonnegative or the second
14670 argument is an even integer valued real. */
14671 if (TREE_CODE (arg1) == REAL_CST)
14676 c = TREE_REAL_CST (arg1);
14677 n = real_to_integer (&c);
14680 REAL_VALUE_TYPE cint;
14681 real_from_integer (&cint, VOIDmode, n,
14682 n < 0 ? -1 : 0, 0);
14683 if (real_identical (&c, &cint))
14687 return tree_expr_nonnegative_warnv_p (arg0,
14688 strict_overflow_p);
14693 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14697 /* Return true if T is known to be non-negative. If the return
14698 value is based on the assumption that signed overflow is undefined,
14699 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14700 *STRICT_OVERFLOW_P. */
14703 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14705 enum tree_code code = TREE_CODE (t);
14706 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14713 tree temp = TARGET_EXPR_SLOT (t);
14714 t = TARGET_EXPR_INITIAL (t);
14716 /* If the initializer is non-void, then it's a normal expression
14717 that will be assigned to the slot. */
14718 if (!VOID_TYPE_P (t))
14719 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14721 /* Otherwise, the initializer sets the slot in some way. One common
14722 way is an assignment statement at the end of the initializer. */
14725 if (TREE_CODE (t) == BIND_EXPR)
14726 t = expr_last (BIND_EXPR_BODY (t));
14727 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14728 || TREE_CODE (t) == TRY_CATCH_EXPR)
14729 t = expr_last (TREE_OPERAND (t, 0));
14730 else if (TREE_CODE (t) == STATEMENT_LIST)
14735 if (TREE_CODE (t) == MODIFY_EXPR
14736 && TREE_OPERAND (t, 0) == temp)
14737 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14738 strict_overflow_p);
14745 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14746 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14748 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14749 get_callee_fndecl (t),
14752 strict_overflow_p);
14754 case COMPOUND_EXPR:
14756 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14757 strict_overflow_p);
14759 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14760 strict_overflow_p);
14762 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14763 strict_overflow_p);
14766 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14770 /* We don't know sign of `t', so be conservative and return false. */
14774 /* Return true if T is known to be non-negative. If the return
14775 value is based on the assumption that signed overflow is undefined,
14776 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14777 *STRICT_OVERFLOW_P. */
14780 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14782 enum tree_code code;
14783 if (t == error_mark_node)
14786 code = TREE_CODE (t);
14787 switch (TREE_CODE_CLASS (code))
14790 case tcc_comparison:
14791 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14793 TREE_OPERAND (t, 0),
14794 TREE_OPERAND (t, 1),
14795 strict_overflow_p);
14798 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14800 TREE_OPERAND (t, 0),
14801 strict_overflow_p);
14804 case tcc_declaration:
14805 case tcc_reference:
14806 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14814 case TRUTH_AND_EXPR:
14815 case TRUTH_OR_EXPR:
14816 case TRUTH_XOR_EXPR:
14817 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14819 TREE_OPERAND (t, 0),
14820 TREE_OPERAND (t, 1),
14821 strict_overflow_p);
14822 case TRUTH_NOT_EXPR:
14823 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14825 TREE_OPERAND (t, 0),
14826 strict_overflow_p);
14833 case WITH_SIZE_EXPR:
14837 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14840 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14844 /* Return true if `t' is known to be non-negative. Handle warnings
14845 about undefined signed overflow. */
14848 tree_expr_nonnegative_p (tree t)
14850 bool ret, strict_overflow_p;
14852 strict_overflow_p = false;
14853 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14854 if (strict_overflow_p)
14855 fold_overflow_warning (("assuming signed overflow does not occur when "
14856 "determining that expression is always "
14858 WARN_STRICT_OVERFLOW_MISC);
14863 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14864 For floating point we further ensure that T is not denormal.
14865 Similar logic is present in nonzero_address in rtlanal.h.
14867 If the return value is based on the assumption that signed overflow
14868 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14869 change *STRICT_OVERFLOW_P. */
14872 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14873 bool *strict_overflow_p)
14878 return tree_expr_nonzero_warnv_p (op0,
14879 strict_overflow_p);
14883 tree inner_type = TREE_TYPE (op0);
14884 tree outer_type = type;
14886 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14887 && tree_expr_nonzero_warnv_p (op0,
14888 strict_overflow_p));
14892 case NON_LVALUE_EXPR:
14893 return tree_expr_nonzero_warnv_p (op0,
14894 strict_overflow_p);
14903 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14904 For floating point we further ensure that T is not denormal.
14905 Similar logic is present in nonzero_address in rtlanal.h.
14907 If the return value is based on the assumption that signed overflow
14908 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14909 change *STRICT_OVERFLOW_P. */
14912 tree_binary_nonzero_warnv_p (enum tree_code code,
14915 tree op1, bool *strict_overflow_p)
14917 bool sub_strict_overflow_p;
14920 case POINTER_PLUS_EXPR:
14922 if (TYPE_OVERFLOW_UNDEFINED (type))
14924 /* With the presence of negative values it is hard
14925 to say something. */
14926 sub_strict_overflow_p = false;
14927 if (!tree_expr_nonnegative_warnv_p (op0,
14928 &sub_strict_overflow_p)
14929 || !tree_expr_nonnegative_warnv_p (op1,
14930 &sub_strict_overflow_p))
14932 /* One of operands must be positive and the other non-negative. */
14933 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14934 overflows, on a twos-complement machine the sum of two
14935 nonnegative numbers can never be zero. */
14936 return (tree_expr_nonzero_warnv_p (op0,
14938 || tree_expr_nonzero_warnv_p (op1,
14939 strict_overflow_p));
14944 if (TYPE_OVERFLOW_UNDEFINED (type))
14946 if (tree_expr_nonzero_warnv_p (op0,
14948 && tree_expr_nonzero_warnv_p (op1,
14949 strict_overflow_p))
14951 *strict_overflow_p = true;
14958 sub_strict_overflow_p = false;
14959 if (tree_expr_nonzero_warnv_p (op0,
14960 &sub_strict_overflow_p)
14961 && tree_expr_nonzero_warnv_p (op1,
14962 &sub_strict_overflow_p))
14964 if (sub_strict_overflow_p)
14965 *strict_overflow_p = true;
14970 sub_strict_overflow_p = false;
14971 if (tree_expr_nonzero_warnv_p (op0,
14972 &sub_strict_overflow_p))
14974 if (sub_strict_overflow_p)
14975 *strict_overflow_p = true;
14977 /* When both operands are nonzero, then MAX must be too. */
14978 if (tree_expr_nonzero_warnv_p (op1,
14979 strict_overflow_p))
14982 /* MAX where operand 0 is positive is positive. */
14983 return tree_expr_nonnegative_warnv_p (op0,
14984 strict_overflow_p);
14986 /* MAX where operand 1 is positive is positive. */
14987 else if (tree_expr_nonzero_warnv_p (op1,
14988 &sub_strict_overflow_p)
14989 && tree_expr_nonnegative_warnv_p (op1,
14990 &sub_strict_overflow_p))
14992 if (sub_strict_overflow_p)
14993 *strict_overflow_p = true;
14999 return (tree_expr_nonzero_warnv_p (op1,
15001 || tree_expr_nonzero_warnv_p (op0,
15002 strict_overflow_p));
15011 /* Return true when T is an address and is known to be nonzero.
15012 For floating point we further ensure that T is not denormal.
15013 Similar logic is present in nonzero_address in rtlanal.h.
15015 If the return value is based on the assumption that signed overflow
15016 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15017 change *STRICT_OVERFLOW_P. */
15020 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15022 bool sub_strict_overflow_p;
15023 switch (TREE_CODE (t))
15026 return !integer_zerop (t);
15030 tree base = get_base_address (TREE_OPERAND (t, 0));
15035 /* Weak declarations may link to NULL. */
15036 if (VAR_OR_FUNCTION_DECL_P (base))
15037 return !DECL_WEAK (base);
15039 /* Constants are never weak. */
15040 if (CONSTANT_CLASS_P (base))
15047 sub_strict_overflow_p = false;
15048 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15049 &sub_strict_overflow_p)
15050 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15051 &sub_strict_overflow_p))
15053 if (sub_strict_overflow_p)
15054 *strict_overflow_p = true;
15065 /* Return true when T is an address and is known to be nonzero.
15066 For floating point we further ensure that T is not denormal.
15067 Similar logic is present in nonzero_address in rtlanal.h.
15069 If the return value is based on the assumption that signed overflow
15070 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15071 change *STRICT_OVERFLOW_P. */
15074 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15076 tree type = TREE_TYPE (t);
15077 enum tree_code code;
15079 /* Doing something useful for floating point would need more work. */
15080 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15083 code = TREE_CODE (t);
15084 switch (TREE_CODE_CLASS (code))
15087 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15088 strict_overflow_p);
15090 case tcc_comparison:
15091 return tree_binary_nonzero_warnv_p (code, type,
15092 TREE_OPERAND (t, 0),
15093 TREE_OPERAND (t, 1),
15094 strict_overflow_p);
15096 case tcc_declaration:
15097 case tcc_reference:
15098 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15106 case TRUTH_NOT_EXPR:
15107 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15108 strict_overflow_p);
15110 case TRUTH_AND_EXPR:
15111 case TRUTH_OR_EXPR:
15112 case TRUTH_XOR_EXPR:
15113 return tree_binary_nonzero_warnv_p (code, type,
15114 TREE_OPERAND (t, 0),
15115 TREE_OPERAND (t, 1),
15116 strict_overflow_p);
15123 case WITH_SIZE_EXPR:
15127 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15129 case COMPOUND_EXPR:
15132 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15133 strict_overflow_p);
15136 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15137 strict_overflow_p);
15140 return alloca_call_p (t);
15148 /* Return true when T is an address and is known to be nonzero.
15149 Handle warnings about undefined signed overflow. */
15152 tree_expr_nonzero_p (tree t)
15154 bool ret, strict_overflow_p;
15156 strict_overflow_p = false;
15157 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15158 if (strict_overflow_p)
15159 fold_overflow_warning (("assuming signed overflow does not occur when "
15160 "determining that expression is always "
15162 WARN_STRICT_OVERFLOW_MISC);
15166 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15167 attempt to fold the expression to a constant without modifying TYPE,
15170 If the expression could be simplified to a constant, then return
15171 the constant. If the expression would not be simplified to a
15172 constant, then return NULL_TREE. */
15175 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15177 tree tem = fold_binary (code, type, op0, op1);
15178 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15181 /* Given the components of a unary expression CODE, TYPE and OP0,
15182 attempt to fold the expression to a constant without modifying
15185 If the expression could be simplified to a constant, then return
15186 the constant. If the expression would not be simplified to a
15187 constant, then return NULL_TREE. */
15190 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15192 tree tem = fold_unary (code, type, op0);
15193 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15196 /* If EXP represents referencing an element in a constant string
15197 (either via pointer arithmetic or array indexing), return the
15198 tree representing the value accessed, otherwise return NULL. */
15201 fold_read_from_constant_string (tree exp)
15203 if ((TREE_CODE (exp) == INDIRECT_REF
15204 || TREE_CODE (exp) == ARRAY_REF)
15205 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15207 tree exp1 = TREE_OPERAND (exp, 0);
15211 if (TREE_CODE (exp) == INDIRECT_REF)
15212 string = string_constant (exp1, &index);
15215 tree low_bound = array_ref_low_bound (exp);
15216 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15218 /* Optimize the special-case of a zero lower bound.
15220 We convert the low_bound to sizetype to avoid some problems
15221 with constant folding. (E.g. suppose the lower bound is 1,
15222 and its mode is QI. Without the conversion,l (ARRAY
15223 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15224 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15225 if (! integer_zerop (low_bound))
15226 index = size_diffop (index, fold_convert (sizetype, low_bound));
15232 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15233 && TREE_CODE (string) == STRING_CST
15234 && TREE_CODE (index) == INTEGER_CST
15235 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15236 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15238 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15239 return build_int_cst_type (TREE_TYPE (exp),
15240 (TREE_STRING_POINTER (string)
15241 [TREE_INT_CST_LOW (index)]));
15246 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15247 an integer constant, real, or fixed-point constant.
15249 TYPE is the type of the result. */
15252 fold_negate_const (tree arg0, tree type)
15254 tree t = NULL_TREE;
15256 switch (TREE_CODE (arg0))
15260 unsigned HOST_WIDE_INT low;
15261 HOST_WIDE_INT high;
15262 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15263 TREE_INT_CST_HIGH (arg0),
15265 t = force_fit_type_double (type, low, high, 1,
15266 (overflow | TREE_OVERFLOW (arg0))
15267 && !TYPE_UNSIGNED (type));
15272 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15277 FIXED_VALUE_TYPE f;
15278 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15279 &(TREE_FIXED_CST (arg0)), NULL,
15280 TYPE_SATURATING (type));
15281 t = build_fixed (type, f);
15282 /* Propagate overflow flags. */
15283 if (overflow_p | TREE_OVERFLOW (arg0))
15285 TREE_OVERFLOW (t) = 1;
15286 TREE_CONSTANT_OVERFLOW (t) = 1;
15288 else if (TREE_CONSTANT_OVERFLOW (arg0))
15289 TREE_CONSTANT_OVERFLOW (t) = 1;
15294 gcc_unreachable ();
15300 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15301 an integer constant or real constant.
15303 TYPE is the type of the result. */
15306 fold_abs_const (tree arg0, tree type)
15308 tree t = NULL_TREE;
15310 switch (TREE_CODE (arg0))
15313 /* If the value is unsigned, then the absolute value is
15314 the same as the ordinary value. */
15315 if (TYPE_UNSIGNED (type))
15317 /* Similarly, if the value is non-negative. */
15318 else if (INT_CST_LT (integer_minus_one_node, arg0))
15320 /* If the value is negative, then the absolute value is
15324 unsigned HOST_WIDE_INT low;
15325 HOST_WIDE_INT high;
15326 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15327 TREE_INT_CST_HIGH (arg0),
15329 t = force_fit_type_double (type, low, high, -1,
15330 overflow | TREE_OVERFLOW (arg0));
15335 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15336 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15342 gcc_unreachable ();
15348 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15349 constant. TYPE is the type of the result. */
15352 fold_not_const (tree arg0, tree type)
15354 tree t = NULL_TREE;
15356 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15358 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15359 ~TREE_INT_CST_HIGH (arg0), 0,
15360 TREE_OVERFLOW (arg0));
15365 /* Given CODE, a relational operator, the target type, TYPE and two
15366 constant operands OP0 and OP1, return the result of the
15367 relational operation. If the result is not a compile time
15368 constant, then return NULL_TREE. */
15371 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15373 int result, invert;
15375 /* From here on, the only cases we handle are when the result is
15376 known to be a constant. */
15378 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15380 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15381 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15383 /* Handle the cases where either operand is a NaN. */
15384 if (real_isnan (c0) || real_isnan (c1))
15394 case UNORDERED_EXPR:
15408 if (flag_trapping_math)
15414 gcc_unreachable ();
15417 return constant_boolean_node (result, type);
15420 return constant_boolean_node (real_compare (code, c0, c1), type);
15423 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15425 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15426 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15427 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15430 /* Handle equality/inequality of complex constants. */
15431 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15433 tree rcond = fold_relational_const (code, type,
15434 TREE_REALPART (op0),
15435 TREE_REALPART (op1));
15436 tree icond = fold_relational_const (code, type,
15437 TREE_IMAGPART (op0),
15438 TREE_IMAGPART (op1));
15439 if (code == EQ_EXPR)
15440 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15441 else if (code == NE_EXPR)
15442 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15447 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15449 To compute GT, swap the arguments and do LT.
15450 To compute GE, do LT and invert the result.
15451 To compute LE, swap the arguments, do LT and invert the result.
15452 To compute NE, do EQ and invert the result.
15454 Therefore, the code below must handle only EQ and LT. */
15456 if (code == LE_EXPR || code == GT_EXPR)
15461 code = swap_tree_comparison (code);
15464 /* Note that it is safe to invert for real values here because we
15465 have already handled the one case that it matters. */
15468 if (code == NE_EXPR || code == GE_EXPR)
15471 code = invert_tree_comparison (code, false);
15474 /* Compute a result for LT or EQ if args permit;
15475 Otherwise return T. */
15476 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15478 if (code == EQ_EXPR)
15479 result = tree_int_cst_equal (op0, op1);
15480 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15481 result = INT_CST_LT_UNSIGNED (op0, op1);
15483 result = INT_CST_LT (op0, op1);
15490 return constant_boolean_node (result, type);
15493 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15494 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15498 fold_build_cleanup_point_expr (tree type, tree expr)
15500 /* If the expression does not have side effects then we don't have to wrap
15501 it with a cleanup point expression. */
15502 if (!TREE_SIDE_EFFECTS (expr))
15505 /* If the expression is a return, check to see if the expression inside the
15506 return has no side effects or the right hand side of the modify expression
15507 inside the return. If either don't have side effects set we don't need to
15508 wrap the expression in a cleanup point expression. Note we don't check the
15509 left hand side of the modify because it should always be a return decl. */
15510 if (TREE_CODE (expr) == RETURN_EXPR)
15512 tree op = TREE_OPERAND (expr, 0);
15513 if (!op || !TREE_SIDE_EFFECTS (op))
15515 op = TREE_OPERAND (op, 1);
15516 if (!TREE_SIDE_EFFECTS (op))
15520 return build1 (CLEANUP_POINT_EXPR, type, expr);
15523 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15524 of an indirection through OP0, or NULL_TREE if no simplification is
15528 fold_indirect_ref_1 (tree type, tree op0)
15534 subtype = TREE_TYPE (sub);
15535 if (!POINTER_TYPE_P (subtype))
15538 if (TREE_CODE (sub) == ADDR_EXPR)
15540 tree op = TREE_OPERAND (sub, 0);
15541 tree optype = TREE_TYPE (op);
15542 /* *&CONST_DECL -> to the value of the const decl. */
15543 if (TREE_CODE (op) == CONST_DECL)
15544 return DECL_INITIAL (op);
15545 /* *&p => p; make sure to handle *&"str"[cst] here. */
15546 if (type == optype)
15548 tree fop = fold_read_from_constant_string (op);
15554 /* *(foo *)&fooarray => fooarray[0] */
15555 else if (TREE_CODE (optype) == ARRAY_TYPE
15556 && type == TREE_TYPE (optype))
15558 tree type_domain = TYPE_DOMAIN (optype);
15559 tree min_val = size_zero_node;
15560 if (type_domain && TYPE_MIN_VALUE (type_domain))
15561 min_val = TYPE_MIN_VALUE (type_domain);
15562 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15564 /* *(foo *)&complexfoo => __real__ complexfoo */
15565 else if (TREE_CODE (optype) == COMPLEX_TYPE
15566 && type == TREE_TYPE (optype))
15567 return fold_build1 (REALPART_EXPR, type, op);
15568 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15569 else if (TREE_CODE (optype) == VECTOR_TYPE
15570 && type == TREE_TYPE (optype))
15572 tree part_width = TYPE_SIZE (type);
15573 tree index = bitsize_int (0);
15574 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15578 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15579 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15580 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15582 tree op00 = TREE_OPERAND (sub, 0);
15583 tree op01 = TREE_OPERAND (sub, 1);
15587 op00type = TREE_TYPE (op00);
15588 if (TREE_CODE (op00) == ADDR_EXPR
15589 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15590 && type == TREE_TYPE (TREE_TYPE (op00type)))
15592 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15593 tree part_width = TYPE_SIZE (type);
15594 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15595 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15596 tree index = bitsize_int (indexi);
15598 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15599 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15600 part_width, index);
15606 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15607 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15608 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15610 tree op00 = TREE_OPERAND (sub, 0);
15611 tree op01 = TREE_OPERAND (sub, 1);
15615 op00type = TREE_TYPE (op00);
15616 if (TREE_CODE (op00) == ADDR_EXPR
15617 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15618 && type == TREE_TYPE (TREE_TYPE (op00type)))
15620 tree size = TYPE_SIZE_UNIT (type);
15621 if (tree_int_cst_equal (size, op01))
15622 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15626 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15627 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15628 && type == TREE_TYPE (TREE_TYPE (subtype)))
15631 tree min_val = size_zero_node;
15632 sub = build_fold_indirect_ref (sub);
15633 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15634 if (type_domain && TYPE_MIN_VALUE (type_domain))
15635 min_val = TYPE_MIN_VALUE (type_domain);
15636 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15642 /* Builds an expression for an indirection through T, simplifying some
15646 build_fold_indirect_ref (tree t)
15648 tree type = TREE_TYPE (TREE_TYPE (t));
15649 tree sub = fold_indirect_ref_1 (type, t);
15654 return build1 (INDIRECT_REF, type, t);
15657 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15660 fold_indirect_ref (tree t)
15662 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15670 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15671 whose result is ignored. The type of the returned tree need not be
15672 the same as the original expression. */
15675 fold_ignored_result (tree t)
15677 if (!TREE_SIDE_EFFECTS (t))
15678 return integer_zero_node;
15681 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15684 t = TREE_OPERAND (t, 0);
15688 case tcc_comparison:
15689 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15690 t = TREE_OPERAND (t, 0);
15691 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15692 t = TREE_OPERAND (t, 1);
15697 case tcc_expression:
15698 switch (TREE_CODE (t))
15700 case COMPOUND_EXPR:
15701 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15703 t = TREE_OPERAND (t, 0);
15707 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15708 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15710 t = TREE_OPERAND (t, 0);
15723 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15724 This can only be applied to objects of a sizetype. */
15727 round_up (tree value, int divisor)
15729 tree div = NULL_TREE;
15731 gcc_assert (divisor > 0);
15735 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15736 have to do anything. Only do this when we are not given a const,
15737 because in that case, this check is more expensive than just
15739 if (TREE_CODE (value) != INTEGER_CST)
15741 div = build_int_cst (TREE_TYPE (value), divisor);
15743 if (multiple_of_p (TREE_TYPE (value), value, div))
15747 /* If divisor is a power of two, simplify this to bit manipulation. */
15748 if (divisor == (divisor & -divisor))
15750 if (TREE_CODE (value) == INTEGER_CST)
15752 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15753 unsigned HOST_WIDE_INT high;
15756 if ((low & (divisor - 1)) == 0)
15759 overflow_p = TREE_OVERFLOW (value);
15760 high = TREE_INT_CST_HIGH (value);
15761 low &= ~(divisor - 1);
15770 return force_fit_type_double (TREE_TYPE (value), low, high,
15777 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15778 value = size_binop (PLUS_EXPR, value, t);
15779 t = build_int_cst (TREE_TYPE (value), -divisor);
15780 value = size_binop (BIT_AND_EXPR, value, t);
15786 div = build_int_cst (TREE_TYPE (value), divisor);
15787 value = size_binop (CEIL_DIV_EXPR, value, div);
15788 value = size_binop (MULT_EXPR, value, div);
15794 /* Likewise, but round down. */
15797 round_down (tree value, int divisor)
15799 tree div = NULL_TREE;
15801 gcc_assert (divisor > 0);
15805 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15806 have to do anything. Only do this when we are not given a const,
15807 because in that case, this check is more expensive than just
15809 if (TREE_CODE (value) != INTEGER_CST)
15811 div = build_int_cst (TREE_TYPE (value), divisor);
15813 if (multiple_of_p (TREE_TYPE (value), value, div))
15817 /* If divisor is a power of two, simplify this to bit manipulation. */
15818 if (divisor == (divisor & -divisor))
15822 t = build_int_cst (TREE_TYPE (value), -divisor);
15823 value = size_binop (BIT_AND_EXPR, value, t);
15828 div = build_int_cst (TREE_TYPE (value), divisor);
15829 value = size_binop (FLOOR_DIV_EXPR, value, div);
15830 value = size_binop (MULT_EXPR, value, div);
15836 /* Returns the pointer to the base of the object addressed by EXP and
15837 extracts the information about the offset of the access, storing it
15838 to PBITPOS and POFFSET. */
15841 split_address_to_core_and_offset (tree exp,
15842 HOST_WIDE_INT *pbitpos, tree *poffset)
15845 enum machine_mode mode;
15846 int unsignedp, volatilep;
15847 HOST_WIDE_INT bitsize;
15849 if (TREE_CODE (exp) == ADDR_EXPR)
15851 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15852 poffset, &mode, &unsignedp, &volatilep,
15854 core = fold_addr_expr (core);
15860 *poffset = NULL_TREE;
15866 /* Returns true if addresses of E1 and E2 differ by a constant, false
15867 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15870 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15873 HOST_WIDE_INT bitpos1, bitpos2;
15874 tree toffset1, toffset2, tdiff, type;
15876 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15877 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15879 if (bitpos1 % BITS_PER_UNIT != 0
15880 || bitpos2 % BITS_PER_UNIT != 0
15881 || !operand_equal_p (core1, core2, 0))
15884 if (toffset1 && toffset2)
15886 type = TREE_TYPE (toffset1);
15887 if (type != TREE_TYPE (toffset2))
15888 toffset2 = fold_convert (type, toffset2);
15890 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15891 if (!cst_and_fits_in_hwi (tdiff))
15894 *diff = int_cst_value (tdiff);
15896 else if (toffset1 || toffset2)
15898 /* If only one of the offsets is non-constant, the difference cannot
15905 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15909 /* Simplify the floating point expression EXP when the sign of the
15910 result is not significant. Return NULL_TREE if no simplification
15914 fold_strip_sign_ops (tree exp)
15918 switch (TREE_CODE (exp))
15922 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15923 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15927 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15929 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15930 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15931 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15932 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15933 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15934 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15937 case COMPOUND_EXPR:
15938 arg0 = TREE_OPERAND (exp, 0);
15939 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15941 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15945 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15946 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15948 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15949 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15950 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15955 const enum built_in_function fcode = builtin_mathfn_code (exp);
15958 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15959 /* Strip copysign function call, return the 1st argument. */
15960 arg0 = CALL_EXPR_ARG (exp, 0);
15961 arg1 = CALL_EXPR_ARG (exp, 1);
15962 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15965 /* Strip sign ops from the argument of "odd" math functions. */
15966 if (negate_mathfn_p (fcode))
15968 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15970 return build_call_expr (get_callee_fndecl (exp), 1, arg0);