1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
114 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
115 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
116 enum machine_mode *, int *, int *,
118 static int all_ones_mask_p (const_tree, int);
119 static tree sign_bit_p (tree, const_tree);
120 static int simple_operand_p (const_tree);
121 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
122 static tree range_predecessor (tree);
123 static tree range_successor (tree);
124 static tree make_range (tree, int *, tree *, tree *, bool *);
125 static tree build_range_check (tree, tree, int, tree, tree);
126 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
128 static tree fold_range_test (enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
133 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
134 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
135 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
206 int sign_extended_type;
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = TYPE_PRECISION (type);
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 else if (prec == HOST_BITS_PER_WIDE_INT)
245 if ((HOST_WIDE_INT)l1 < 0)
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
285 int sign_extended_type;
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
293 overflow = fit_double_type (low, high, &low, &high, type);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
300 || (overflowable > 0 && sign_extended_type))
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 unsigned HOST_WIDE_INT l;
331 h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1
332 + (unsigned HOST_WIDE_INT) h2
339 return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
343 return OVERFLOW_SUM_SIGN (h1, h2, h);
346 /* Negate a doubleword integer with doubleword result.
347 Return nonzero if the operation overflows, assuming it's signed.
348 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
352 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
359 return (*hv & h1) < 0;
369 /* Multiply two doubleword integers with doubleword result.
370 Return nonzero if the operation overflows according to UNSIGNED_P.
371 Each argument is given as two `HOST_WIDE_INT' pieces.
372 One argument is L1 and H1; the other, L2 and H2.
373 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
376 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
377 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
378 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
381 HOST_WIDE_INT arg1[4];
382 HOST_WIDE_INT arg2[4];
383 HOST_WIDE_INT prod[4 * 2];
384 unsigned HOST_WIDE_INT carry;
386 unsigned HOST_WIDE_INT toplow, neglow;
387 HOST_WIDE_INT tophigh, neghigh;
389 encode (arg1, l1, h1);
390 encode (arg2, l2, h2);
392 memset (prod, 0, sizeof prod);
394 for (i = 0; i < 4; i++)
397 for (j = 0; j < 4; j++)
400 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
401 carry += arg1[i] * arg2[j];
402 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
404 prod[k] = LOWPART (carry);
405 carry = HIGHPART (carry);
410 decode (prod, lv, hv);
411 decode (prod + 4, &toplow, &tophigh);
413 /* Unsigned overflow is immediate. */
415 return (toplow | tophigh) != 0;
417 /* Check for signed overflow by calculating the signed representation of the
418 top half of the result; it should agree with the low half's sign bit. */
421 neg_double (l2, h2, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
426 neg_double (l1, h1, &neglow, &neghigh);
427 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
429 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
432 /* Shift the doubleword integer in L1, H1 left by COUNT places
433 keeping only PREC bits of result.
434 Shift right if COUNT is negative.
435 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
436 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
439 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
440 HOST_WIDE_INT count, unsigned int prec,
441 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
443 unsigned HOST_WIDE_INT signmask;
447 rshift_double (l1, h1, -count, prec, lv, hv, arith);
451 if (SHIFT_COUNT_TRUNCATED)
454 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
456 /* Shifting by the host word size is undefined according to the
457 ANSI standard, so we must handle this as a special case. */
461 else if (count >= HOST_BITS_PER_WIDE_INT)
463 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
468 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
469 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
473 /* Sign extend all bits that are beyond the precision. */
475 signmask = -((prec > HOST_BITS_PER_WIDE_INT
476 ? ((unsigned HOST_WIDE_INT) *hv
477 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
478 : (*lv >> (prec - 1))) & 1);
480 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
482 else if (prec >= HOST_BITS_PER_WIDE_INT)
484 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
485 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
490 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
491 *lv |= signmask << prec;
495 /* Shift the doubleword integer in L1, H1 right by COUNT places
496 keeping only PREC bits of result. COUNT must be positive.
497 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
498 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
501 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
502 HOST_WIDE_INT count, unsigned int prec,
503 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
506 unsigned HOST_WIDE_INT signmask;
509 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
512 if (SHIFT_COUNT_TRUNCATED)
515 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
517 /* Shifting by the host word size is undefined according to the
518 ANSI standard, so we must handle this as a special case. */
522 else if (count >= HOST_BITS_PER_WIDE_INT)
525 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
529 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
531 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
534 /* Zero / sign extend all bits that are beyond the precision. */
536 if (count >= (HOST_WIDE_INT)prec)
541 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
543 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
545 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
546 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
551 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
552 *lv |= signmask << (prec - count);
556 /* Rotate the doubleword integer in L1, H1 left by COUNT places
557 keeping only PREC bits of result.
558 Rotate right if COUNT is negative.
559 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
562 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
563 HOST_WIDE_INT count, unsigned int prec,
564 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
566 unsigned HOST_WIDE_INT s1l, s2l;
567 HOST_WIDE_INT s1h, s2h;
573 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
574 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
579 /* Rotate the doubleword integer in L1, H1 left by COUNT places
580 keeping only PREC bits of result. COUNT must be positive.
581 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
584 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
585 HOST_WIDE_INT count, unsigned int prec,
586 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
588 unsigned HOST_WIDE_INT s1l, s2l;
589 HOST_WIDE_INT s1h, s2h;
595 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
596 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
601 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
602 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
603 CODE is a tree code for a kind of division, one of
604 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
606 It controls how the quotient is rounded to an integer.
607 Return nonzero if the operation overflows.
608 UNS nonzero says do unsigned division. */
611 div_and_round_double (enum tree_code code, int uns,
612 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
613 HOST_WIDE_INT hnum_orig,
614 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
615 HOST_WIDE_INT hden_orig,
616 unsigned HOST_WIDE_INT *lquo,
617 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
621 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
622 HOST_WIDE_INT den[4], quo[4];
624 unsigned HOST_WIDE_INT work;
625 unsigned HOST_WIDE_INT carry = 0;
626 unsigned HOST_WIDE_INT lnum = lnum_orig;
627 HOST_WIDE_INT hnum = hnum_orig;
628 unsigned HOST_WIDE_INT lden = lden_orig;
629 HOST_WIDE_INT hden = hden_orig;
632 if (hden == 0 && lden == 0)
633 overflow = 1, lden = 1;
635 /* Calculate quotient sign and convert operands to unsigned. */
641 /* (minimum integer) / (-1) is the only overflow case. */
642 if (neg_double (lnum, hnum, &lnum, &hnum)
643 && ((HOST_WIDE_INT) lden & hden) == -1)
649 neg_double (lden, hden, &lden, &hden);
653 if (hnum == 0 && hden == 0)
654 { /* single precision */
656 /* This unsigned division rounds toward zero. */
662 { /* trivial case: dividend < divisor */
663 /* hden != 0 already checked. */
670 memset (quo, 0, sizeof quo);
672 memset (num, 0, sizeof num); /* to zero 9th element */
673 memset (den, 0, sizeof den);
675 encode (num, lnum, hnum);
676 encode (den, lden, hden);
678 /* Special code for when the divisor < BASE. */
679 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
681 /* hnum != 0 already checked. */
682 for (i = 4 - 1; i >= 0; i--)
684 work = num[i] + carry * BASE;
685 quo[i] = work / lden;
691 /* Full double precision division,
692 with thanks to Don Knuth's "Seminumerical Algorithms". */
693 int num_hi_sig, den_hi_sig;
694 unsigned HOST_WIDE_INT quo_est, scale;
696 /* Find the highest nonzero divisor digit. */
697 for (i = 4 - 1;; i--)
704 /* Insure that the first digit of the divisor is at least BASE/2.
705 This is required by the quotient digit estimation algorithm. */
707 scale = BASE / (den[den_hi_sig] + 1);
709 { /* scale divisor and dividend */
711 for (i = 0; i <= 4 - 1; i++)
713 work = (num[i] * scale) + carry;
714 num[i] = LOWPART (work);
715 carry = HIGHPART (work);
720 for (i = 0; i <= 4 - 1; i++)
722 work = (den[i] * scale) + carry;
723 den[i] = LOWPART (work);
724 carry = HIGHPART (work);
725 if (den[i] != 0) den_hi_sig = i;
732 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
734 /* Guess the next quotient digit, quo_est, by dividing the first
735 two remaining dividend digits by the high order quotient digit.
736 quo_est is never low and is at most 2 high. */
737 unsigned HOST_WIDE_INT tmp;
739 num_hi_sig = i + den_hi_sig + 1;
740 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
741 if (num[num_hi_sig] != den[den_hi_sig])
742 quo_est = work / den[den_hi_sig];
746 /* Refine quo_est so it's usually correct, and at most one high. */
747 tmp = work - quo_est * den[den_hi_sig];
749 && (den[den_hi_sig - 1] * quo_est
750 > (tmp * BASE + num[num_hi_sig - 2])))
753 /* Try QUO_EST as the quotient digit, by multiplying the
754 divisor by QUO_EST and subtracting from the remaining dividend.
755 Keep in mind that QUO_EST is the I - 1st digit. */
758 for (j = 0; j <= den_hi_sig; j++)
760 work = quo_est * den[j] + carry;
761 carry = HIGHPART (work);
762 work = num[i + j] - LOWPART (work);
763 num[i + j] = LOWPART (work);
764 carry += HIGHPART (work) != 0;
767 /* If quo_est was high by one, then num[i] went negative and
768 we need to correct things. */
769 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
772 carry = 0; /* add divisor back in */
773 for (j = 0; j <= den_hi_sig; j++)
775 work = num[i + j] + den[j] + carry;
776 carry = HIGHPART (work);
777 num[i + j] = LOWPART (work);
780 num [num_hi_sig] += carry;
783 /* Store the quotient digit. */
788 decode (quo, lquo, hquo);
791 /* If result is negative, make it so. */
793 neg_double (*lquo, *hquo, lquo, hquo);
795 /* Compute trial remainder: rem = num - (quo * den) */
796 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
797 neg_double (*lrem, *hrem, lrem, hrem);
798 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
803 case TRUNC_MOD_EXPR: /* round toward zero */
804 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
808 case FLOOR_MOD_EXPR: /* round toward negative infinity */
809 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
812 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
820 case CEIL_MOD_EXPR: /* round toward positive infinity */
821 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
823 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
831 case ROUND_MOD_EXPR: /* round to closest integer */
833 unsigned HOST_WIDE_INT labs_rem = *lrem;
834 HOST_WIDE_INT habs_rem = *hrem;
835 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
836 HOST_WIDE_INT habs_den = hden, htwice;
838 /* Get absolute values. */
840 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
842 neg_double (lden, hden, &labs_den, &habs_den);
844 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
845 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
846 labs_rem, habs_rem, <wice, &htwice);
848 if (((unsigned HOST_WIDE_INT) habs_den
849 < (unsigned HOST_WIDE_INT) htwice)
850 || (((unsigned HOST_WIDE_INT) habs_den
851 == (unsigned HOST_WIDE_INT) htwice)
852 && (labs_den <= ltwice)))
856 add_double (*lquo, *hquo,
857 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
860 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
872 /* Compute true remainder: rem = num - (quo * den) */
873 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
874 neg_double (*lrem, *hrem, lrem, hrem);
875 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
879 /* If ARG2 divides ARG1 with zero remainder, carries out the division
880 of type CODE and returns the quotient.
881 Otherwise returns NULL_TREE. */
884 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
886 unsigned HOST_WIDE_INT int1l, int2l;
887 HOST_WIDE_INT int1h, int2h;
888 unsigned HOST_WIDE_INT quol, reml;
889 HOST_WIDE_INT quoh, remh;
890 tree type = TREE_TYPE (arg1);
891 int uns = TYPE_UNSIGNED (type);
893 int1l = TREE_INT_CST_LOW (arg1);
894 int1h = TREE_INT_CST_HIGH (arg1);
895 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
896 &obj[some_exotic_number]. */
897 if (POINTER_TYPE_P (type))
900 type = signed_type_for (type);
901 fit_double_type (int1l, int1h, &int1l, &int1h,
905 fit_double_type (int1l, int1h, &int1l, &int1h, type);
906 int2l = TREE_INT_CST_LOW (arg2);
907 int2h = TREE_INT_CST_HIGH (arg2);
909 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
910 &quol, &quoh, &reml, &remh);
911 if (remh != 0 || reml != 0)
914 return build_int_cst_wide (type, quol, quoh);
917 /* This is nonzero if we should defer warnings about undefined
918 overflow. This facility exists because these warnings are a
919 special case. The code to estimate loop iterations does not want
920 to issue any warnings, since it works with expressions which do not
921 occur in user code. Various bits of cleanup code call fold(), but
922 only use the result if it has certain characteristics (e.g., is a
923 constant); that code only wants to issue a warning if the result is
926 static int fold_deferring_overflow_warnings;
928 /* If a warning about undefined overflow is deferred, this is the
929 warning. Note that this may cause us to turn two warnings into
930 one, but that is fine since it is sufficient to only give one
931 warning per expression. */
933 static const char* fold_deferred_overflow_warning;
935 /* If a warning about undefined overflow is deferred, this is the
936 level at which the warning should be emitted. */
938 static enum warn_strict_overflow_code fold_deferred_overflow_code;
940 /* Start deferring overflow warnings. We could use a stack here to
941 permit nested calls, but at present it is not necessary. */
944 fold_defer_overflow_warnings (void)
946 ++fold_deferring_overflow_warnings;
949 /* Stop deferring overflow warnings. If there is a pending warning,
950 and ISSUE is true, then issue the warning if appropriate. STMT is
951 the statement with which the warning should be associated (used for
952 location information); STMT may be NULL. CODE is the level of the
953 warning--a warn_strict_overflow_code value. This function will use
954 the smaller of CODE and the deferred code when deciding whether to
955 issue the warning. CODE may be zero to mean to always use the
959 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
964 gcc_assert (fold_deferring_overflow_warnings > 0);
965 --fold_deferring_overflow_warnings;
966 if (fold_deferring_overflow_warnings > 0)
968 if (fold_deferred_overflow_warning != NULL
970 && code < (int) fold_deferred_overflow_code)
971 fold_deferred_overflow_code = code;
975 warnmsg = fold_deferred_overflow_warning;
976 fold_deferred_overflow_warning = NULL;
978 if (!issue || warnmsg == NULL)
981 if (gimple_no_warning_p (stmt))
984 /* Use the smallest code level when deciding to issue the
986 if (code == 0 || code > (int) fold_deferred_overflow_code)
987 code = fold_deferred_overflow_code;
989 if (!issue_strict_overflow_warning (code))
993 locus = input_location;
995 locus = gimple_location (stmt);
996 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
999 /* Stop deferring overflow warnings, ignoring any deferred
1003 fold_undefer_and_ignore_overflow_warnings (void)
1005 fold_undefer_overflow_warnings (false, NULL, 0);
1008 /* Whether we are deferring overflow warnings. */
1011 fold_deferring_overflow_warnings_p (void)
1013 return fold_deferring_overflow_warnings > 0;
1016 /* This is called when we fold something based on the fact that signed
1017 overflow is undefined. */
1020 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1022 if (fold_deferring_overflow_warnings > 0)
1024 if (fold_deferred_overflow_warning == NULL
1025 || wc < fold_deferred_overflow_code)
1027 fold_deferred_overflow_warning = gmsgid;
1028 fold_deferred_overflow_code = wc;
1031 else if (issue_strict_overflow_warning (wc))
1032 warning (OPT_Wstrict_overflow, gmsgid);
1035 /* Return true if the built-in mathematical function specified by CODE
1036 is odd, i.e. -f(x) == f(-x). */
1039 negate_mathfn_p (enum built_in_function code)
1043 CASE_FLT_FN (BUILT_IN_ASIN):
1044 CASE_FLT_FN (BUILT_IN_ASINH):
1045 CASE_FLT_FN (BUILT_IN_ATAN):
1046 CASE_FLT_FN (BUILT_IN_ATANH):
1047 CASE_FLT_FN (BUILT_IN_CASIN):
1048 CASE_FLT_FN (BUILT_IN_CASINH):
1049 CASE_FLT_FN (BUILT_IN_CATAN):
1050 CASE_FLT_FN (BUILT_IN_CATANH):
1051 CASE_FLT_FN (BUILT_IN_CBRT):
1052 CASE_FLT_FN (BUILT_IN_CPROJ):
1053 CASE_FLT_FN (BUILT_IN_CSIN):
1054 CASE_FLT_FN (BUILT_IN_CSINH):
1055 CASE_FLT_FN (BUILT_IN_CTAN):
1056 CASE_FLT_FN (BUILT_IN_CTANH):
1057 CASE_FLT_FN (BUILT_IN_ERF):
1058 CASE_FLT_FN (BUILT_IN_LLROUND):
1059 CASE_FLT_FN (BUILT_IN_LROUND):
1060 CASE_FLT_FN (BUILT_IN_ROUND):
1061 CASE_FLT_FN (BUILT_IN_SIN):
1062 CASE_FLT_FN (BUILT_IN_SINH):
1063 CASE_FLT_FN (BUILT_IN_TAN):
1064 CASE_FLT_FN (BUILT_IN_TANH):
1065 CASE_FLT_FN (BUILT_IN_TRUNC):
1068 CASE_FLT_FN (BUILT_IN_LLRINT):
1069 CASE_FLT_FN (BUILT_IN_LRINT):
1070 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1071 CASE_FLT_FN (BUILT_IN_RINT):
1072 return !flag_rounding_math;
1080 /* Check whether we may negate an integer constant T without causing
1084 may_negate_without_overflow_p (const_tree t)
1086 unsigned HOST_WIDE_INT val;
1090 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1092 type = TREE_TYPE (t);
1093 if (TYPE_UNSIGNED (type))
1096 prec = TYPE_PRECISION (type);
1097 if (prec > HOST_BITS_PER_WIDE_INT)
1099 if (TREE_INT_CST_LOW (t) != 0)
1101 prec -= HOST_BITS_PER_WIDE_INT;
1102 val = TREE_INT_CST_HIGH (t);
1105 val = TREE_INT_CST_LOW (t);
1106 if (prec < HOST_BITS_PER_WIDE_INT)
1107 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1108 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1111 /* Determine whether an expression T can be cheaply negated using
1112 the function negate_expr without introducing undefined overflow. */
1115 negate_expr_p (tree t)
1122 type = TREE_TYPE (t);
1124 STRIP_SIGN_NOPS (t);
1125 switch (TREE_CODE (t))
1128 if (TYPE_OVERFLOW_WRAPS (type))
1131 /* Check that -CST will not overflow type. */
1132 return may_negate_without_overflow_p (t);
1134 return (INTEGRAL_TYPE_P (type)
1135 && TYPE_OVERFLOW_WRAPS (type));
1143 return negate_expr_p (TREE_REALPART (t))
1144 && negate_expr_p (TREE_IMAGPART (t));
1147 return negate_expr_p (TREE_OPERAND (t, 0))
1148 && negate_expr_p (TREE_OPERAND (t, 1));
1151 return negate_expr_p (TREE_OPERAND (t, 0));
1154 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1155 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1157 /* -(A + B) -> (-B) - A. */
1158 if (negate_expr_p (TREE_OPERAND (t, 1))
1159 && reorder_operands_p (TREE_OPERAND (t, 0),
1160 TREE_OPERAND (t, 1)))
1162 /* -(A + B) -> (-A) - B. */
1163 return negate_expr_p (TREE_OPERAND (t, 0));
1166 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1167 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1168 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1169 && reorder_operands_p (TREE_OPERAND (t, 0),
1170 TREE_OPERAND (t, 1));
1173 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1179 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1180 return negate_expr_p (TREE_OPERAND (t, 1))
1181 || negate_expr_p (TREE_OPERAND (t, 0));
1184 case TRUNC_DIV_EXPR:
1185 case ROUND_DIV_EXPR:
1186 case FLOOR_DIV_EXPR:
1188 case EXACT_DIV_EXPR:
1189 /* In general we can't negate A / B, because if A is INT_MIN and
1190 B is 1, we may turn this into INT_MIN / -1 which is undefined
1191 and actually traps on some architectures. But if overflow is
1192 undefined, we can negate, because - (INT_MIN / 1) is an
1194 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1195 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1197 return negate_expr_p (TREE_OPERAND (t, 1))
1198 || negate_expr_p (TREE_OPERAND (t, 0));
1201 /* Negate -((double)float) as (double)(-float). */
1202 if (TREE_CODE (type) == REAL_TYPE)
1204 tree tem = strip_float_extensions (t);
1206 return negate_expr_p (tem);
1211 /* Negate -f(x) as f(-x). */
1212 if (negate_mathfn_p (builtin_mathfn_code (t)))
1213 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1217 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1218 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1220 tree op1 = TREE_OPERAND (t, 1);
1221 if (TREE_INT_CST_HIGH (op1) == 0
1222 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1223 == TREE_INT_CST_LOW (op1))
1234 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1235 simplification is possible.
1236 If negate_expr_p would return true for T, NULL_TREE will never be
1240 fold_negate_expr (tree t)
1242 tree type = TREE_TYPE (t);
1245 switch (TREE_CODE (t))
1247 /* Convert - (~A) to A + 1. */
1249 if (INTEGRAL_TYPE_P (type))
1250 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1251 build_int_cst (type, 1));
1255 tem = fold_negate_const (t, type);
1256 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1257 || !TYPE_OVERFLOW_TRAPS (type))
1262 tem = fold_negate_const (t, type);
1263 /* Two's complement FP formats, such as c4x, may overflow. */
1264 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1269 tem = fold_negate_const (t, type);
1274 tree rpart = negate_expr (TREE_REALPART (t));
1275 tree ipart = negate_expr (TREE_IMAGPART (t));
1277 if ((TREE_CODE (rpart) == REAL_CST
1278 && TREE_CODE (ipart) == REAL_CST)
1279 || (TREE_CODE (rpart) == INTEGER_CST
1280 && TREE_CODE (ipart) == INTEGER_CST))
1281 return build_complex (type, rpart, ipart);
1286 if (negate_expr_p (t))
1287 return fold_build2 (COMPLEX_EXPR, type,
1288 fold_negate_expr (TREE_OPERAND (t, 0)),
1289 fold_negate_expr (TREE_OPERAND (t, 1)));
1293 if (negate_expr_p (t))
1294 return fold_build1 (CONJ_EXPR, type,
1295 fold_negate_expr (TREE_OPERAND (t, 0)));
1299 return TREE_OPERAND (t, 0);
1302 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1303 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1305 /* -(A + B) -> (-B) - A. */
1306 if (negate_expr_p (TREE_OPERAND (t, 1))
1307 && reorder_operands_p (TREE_OPERAND (t, 0),
1308 TREE_OPERAND (t, 1)))
1310 tem = negate_expr (TREE_OPERAND (t, 1));
1311 return fold_build2 (MINUS_EXPR, type,
1312 tem, TREE_OPERAND (t, 0));
1315 /* -(A + B) -> (-A) - B. */
1316 if (negate_expr_p (TREE_OPERAND (t, 0)))
1318 tem = negate_expr (TREE_OPERAND (t, 0));
1319 return fold_build2 (MINUS_EXPR, type,
1320 tem, TREE_OPERAND (t, 1));
1326 /* - (A - B) -> B - A */
1327 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1328 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1329 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1330 return fold_build2 (MINUS_EXPR, type,
1331 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1335 if (TYPE_UNSIGNED (type))
1341 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1343 tem = TREE_OPERAND (t, 1);
1344 if (negate_expr_p (tem))
1345 return fold_build2 (TREE_CODE (t), type,
1346 TREE_OPERAND (t, 0), negate_expr (tem));
1347 tem = TREE_OPERAND (t, 0);
1348 if (negate_expr_p (tem))
1349 return fold_build2 (TREE_CODE (t), type,
1350 negate_expr (tem), TREE_OPERAND (t, 1));
1354 case TRUNC_DIV_EXPR:
1355 case ROUND_DIV_EXPR:
1356 case FLOOR_DIV_EXPR:
1358 case EXACT_DIV_EXPR:
1359 /* In general we can't negate A / B, because if A is INT_MIN and
1360 B is 1, we may turn this into INT_MIN / -1 which is undefined
1361 and actually traps on some architectures. But if overflow is
1362 undefined, we can negate, because - (INT_MIN / 1) is an
1364 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1366 const char * const warnmsg = G_("assuming signed overflow does not "
1367 "occur when negating a division");
1368 tem = TREE_OPERAND (t, 1);
1369 if (negate_expr_p (tem))
1371 if (INTEGRAL_TYPE_P (type)
1372 && (TREE_CODE (tem) != INTEGER_CST
1373 || integer_onep (tem)))
1374 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1375 return fold_build2 (TREE_CODE (t), type,
1376 TREE_OPERAND (t, 0), negate_expr (tem));
1378 tem = TREE_OPERAND (t, 0);
1379 if (negate_expr_p (tem))
1381 if (INTEGRAL_TYPE_P (type)
1382 && (TREE_CODE (tem) != INTEGER_CST
1383 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1384 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1385 return fold_build2 (TREE_CODE (t), type,
1386 negate_expr (tem), TREE_OPERAND (t, 1));
1392 /* Convert -((double)float) into (double)(-float). */
1393 if (TREE_CODE (type) == REAL_TYPE)
1395 tem = strip_float_extensions (t);
1396 if (tem != t && negate_expr_p (tem))
1397 return fold_convert (type, negate_expr (tem));
1402 /* Negate -f(x) as f(-x). */
1403 if (negate_mathfn_p (builtin_mathfn_code (t))
1404 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1408 fndecl = get_callee_fndecl (t);
1409 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1410 return build_call_expr (fndecl, 1, arg);
1415 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1416 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1418 tree op1 = TREE_OPERAND (t, 1);
1419 if (TREE_INT_CST_HIGH (op1) == 0
1420 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1421 == TREE_INT_CST_LOW (op1))
1423 tree ntype = TYPE_UNSIGNED (type)
1424 ? signed_type_for (type)
1425 : unsigned_type_for (type);
1426 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1427 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1428 return fold_convert (type, temp);
1440 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1441 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1442 return NULL_TREE. */
1445 negate_expr (tree t)
1452 type = TREE_TYPE (t);
1453 STRIP_SIGN_NOPS (t);
1455 tem = fold_negate_expr (t);
1457 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1458 return fold_convert (type, tem);
1461 /* Split a tree IN into a constant, literal and variable parts that could be
1462 combined with CODE to make IN. "constant" means an expression with
1463 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1464 commutative arithmetic operation. Store the constant part into *CONP,
1465 the literal in *LITP and return the variable part. If a part isn't
1466 present, set it to null. If the tree does not decompose in this way,
1467 return the entire tree as the variable part and the other parts as null.
1469 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1470 case, we negate an operand that was subtracted. Except if it is a
1471 literal for which we use *MINUS_LITP instead.
1473 If NEGATE_P is true, we are negating all of IN, again except a literal
1474 for which we use *MINUS_LITP instead.
1476 If IN is itself a literal or constant, return it as appropriate.
1478 Note that we do not guarantee that any of the three values will be the
1479 same type as IN, but they will have the same signedness and mode. */
1482 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1483 tree *minus_litp, int negate_p)
1491 /* Strip any conversions that don't change the machine mode or signedness. */
1492 STRIP_SIGN_NOPS (in);
1494 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1495 || TREE_CODE (in) == FIXED_CST)
1497 else if (TREE_CODE (in) == code
1498 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1499 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1500 /* We can associate addition and subtraction together (even
1501 though the C standard doesn't say so) for integers because
1502 the value is not affected. For reals, the value might be
1503 affected, so we can't. */
1504 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1505 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1507 tree op0 = TREE_OPERAND (in, 0);
1508 tree op1 = TREE_OPERAND (in, 1);
1509 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1510 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1512 /* First see if either of the operands is a literal, then a constant. */
1513 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1514 || TREE_CODE (op0) == FIXED_CST)
1515 *litp = op0, op0 = 0;
1516 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1517 || TREE_CODE (op1) == FIXED_CST)
1518 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1520 if (op0 != 0 && TREE_CONSTANT (op0))
1521 *conp = op0, op0 = 0;
1522 else if (op1 != 0 && TREE_CONSTANT (op1))
1523 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1525 /* If we haven't dealt with either operand, this is not a case we can
1526 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1527 if (op0 != 0 && op1 != 0)
1532 var = op1, neg_var_p = neg1_p;
1534 /* Now do any needed negations. */
1536 *minus_litp = *litp, *litp = 0;
1538 *conp = negate_expr (*conp);
1540 var = negate_expr (var);
1542 else if (TREE_CONSTANT (in))
1550 *minus_litp = *litp, *litp = 0;
1551 else if (*minus_litp)
1552 *litp = *minus_litp, *minus_litp = 0;
1553 *conp = negate_expr (*conp);
1554 var = negate_expr (var);
1560 /* Re-associate trees split by the above function. T1 and T2 are either
1561 expressions to associate or null. Return the new expression, if any. If
1562 we build an operation, do it in TYPE and with CODE. */
1565 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1572 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1573 try to fold this since we will have infinite recursion. But do
1574 deal with any NEGATE_EXPRs. */
1575 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1576 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1578 if (code == PLUS_EXPR)
1580 if (TREE_CODE (t1) == NEGATE_EXPR)
1581 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1582 fold_convert (type, TREE_OPERAND (t1, 0)));
1583 else if (TREE_CODE (t2) == NEGATE_EXPR)
1584 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1585 fold_convert (type, TREE_OPERAND (t2, 0)));
1586 else if (integer_zerop (t2))
1587 return fold_convert (type, t1);
1589 else if (code == MINUS_EXPR)
1591 if (integer_zerop (t2))
1592 return fold_convert (type, t1);
1595 return build2 (code, type, fold_convert (type, t1),
1596 fold_convert (type, t2));
1599 return fold_build2 (code, type, fold_convert (type, t1),
1600 fold_convert (type, t2));
1603 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1604 for use in int_const_binop, size_binop and size_diffop. */
1607 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1609 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1611 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1626 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1627 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1628 && TYPE_MODE (type1) == TYPE_MODE (type2);
1632 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1633 to produce a new constant. Return NULL_TREE if we don't know how
1634 to evaluate CODE at compile-time.
1636 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1639 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1641 unsigned HOST_WIDE_INT int1l, int2l;
1642 HOST_WIDE_INT int1h, int2h;
1643 unsigned HOST_WIDE_INT low;
1645 unsigned HOST_WIDE_INT garbagel;
1646 HOST_WIDE_INT garbageh;
1648 tree type = TREE_TYPE (arg1);
1649 int uns = TYPE_UNSIGNED (type);
1651 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1654 int1l = TREE_INT_CST_LOW (arg1);
1655 int1h = TREE_INT_CST_HIGH (arg1);
1656 int2l = TREE_INT_CST_LOW (arg2);
1657 int2h = TREE_INT_CST_HIGH (arg2);
1662 low = int1l | int2l, hi = int1h | int2h;
1666 low = int1l ^ int2l, hi = int1h ^ int2h;
1670 low = int1l & int2l, hi = int1h & int2h;
1676 /* It's unclear from the C standard whether shifts can overflow.
1677 The following code ignores overflow; perhaps a C standard
1678 interpretation ruling is needed. */
1679 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1686 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1691 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1695 neg_double (int2l, int2h, &low, &hi);
1696 add_double (int1l, int1h, low, hi, &low, &hi);
1697 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1701 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1704 case TRUNC_DIV_EXPR:
1705 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1706 case EXACT_DIV_EXPR:
1707 /* This is a shortcut for a common special case. */
1708 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1709 && !TREE_OVERFLOW (arg1)
1710 && !TREE_OVERFLOW (arg2)
1711 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1713 if (code == CEIL_DIV_EXPR)
1716 low = int1l / int2l, hi = 0;
1720 /* ... fall through ... */
1722 case ROUND_DIV_EXPR:
1723 if (int2h == 0 && int2l == 0)
1725 if (int2h == 0 && int2l == 1)
1727 low = int1l, hi = int1h;
1730 if (int1l == int2l && int1h == int2h
1731 && ! (int1l == 0 && int1h == 0))
1736 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1737 &low, &hi, &garbagel, &garbageh);
1740 case TRUNC_MOD_EXPR:
1741 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1742 /* This is a shortcut for a common special case. */
1743 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1744 && !TREE_OVERFLOW (arg1)
1745 && !TREE_OVERFLOW (arg2)
1746 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1748 if (code == CEIL_MOD_EXPR)
1750 low = int1l % int2l, hi = 0;
1754 /* ... fall through ... */
1756 case ROUND_MOD_EXPR:
1757 if (int2h == 0 && int2l == 0)
1759 overflow = div_and_round_double (code, uns,
1760 int1l, int1h, int2l, int2h,
1761 &garbagel, &garbageh, &low, &hi);
1767 low = (((unsigned HOST_WIDE_INT) int1h
1768 < (unsigned HOST_WIDE_INT) int2h)
1769 || (((unsigned HOST_WIDE_INT) int1h
1770 == (unsigned HOST_WIDE_INT) int2h)
1773 low = (int1h < int2h
1774 || (int1h == int2h && int1l < int2l));
1776 if (low == (code == MIN_EXPR))
1777 low = int1l, hi = int1h;
1779 low = int2l, hi = int2h;
1788 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1790 /* Propagate overflow flags ourselves. */
1791 if (((!uns || is_sizetype) && overflow)
1792 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1795 TREE_OVERFLOW (t) = 1;
1799 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1800 ((!uns || is_sizetype) && overflow)
1801 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1806 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1807 constant. We assume ARG1 and ARG2 have the same data type, or at least
1808 are the same kind of constant and the same machine mode. Return zero if
1809 combining the constants is not allowed in the current operating mode.
1811 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1814 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1816 /* Sanity check for the recursive cases. */
1823 if (TREE_CODE (arg1) == INTEGER_CST)
1824 return int_const_binop (code, arg1, arg2, notrunc);
1826 if (TREE_CODE (arg1) == REAL_CST)
1828 enum machine_mode mode;
1831 REAL_VALUE_TYPE value;
1832 REAL_VALUE_TYPE result;
1836 /* The following codes are handled by real_arithmetic. */
1851 d1 = TREE_REAL_CST (arg1);
1852 d2 = TREE_REAL_CST (arg2);
1854 type = TREE_TYPE (arg1);
1855 mode = TYPE_MODE (type);
1857 /* Don't perform operation if we honor signaling NaNs and
1858 either operand is a NaN. */
1859 if (HONOR_SNANS (mode)
1860 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1863 /* Don't perform operation if it would raise a division
1864 by zero exception. */
1865 if (code == RDIV_EXPR
1866 && REAL_VALUES_EQUAL (d2, dconst0)
1867 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1870 /* If either operand is a NaN, just return it. Otherwise, set up
1871 for floating-point trap; we return an overflow. */
1872 if (REAL_VALUE_ISNAN (d1))
1874 else if (REAL_VALUE_ISNAN (d2))
1877 inexact = real_arithmetic (&value, code, &d1, &d2);
1878 real_convert (&result, mode, &value);
1880 /* Don't constant fold this floating point operation if
1881 the result has overflowed and flag_trapping_math. */
1882 if (flag_trapping_math
1883 && MODE_HAS_INFINITIES (mode)
1884 && REAL_VALUE_ISINF (result)
1885 && !REAL_VALUE_ISINF (d1)
1886 && !REAL_VALUE_ISINF (d2))
1889 /* Don't constant fold this floating point operation if the
1890 result may dependent upon the run-time rounding mode and
1891 flag_rounding_math is set, or if GCC's software emulation
1892 is unable to accurately represent the result. */
1893 if ((flag_rounding_math
1894 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1895 && (inexact || !real_identical (&result, &value)))
1898 t = build_real (type, result);
1900 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1904 if (TREE_CODE (arg1) == FIXED_CST)
1906 FIXED_VALUE_TYPE f1;
1907 FIXED_VALUE_TYPE f2;
1908 FIXED_VALUE_TYPE result;
1913 /* The following codes are handled by fixed_arithmetic. */
1919 case TRUNC_DIV_EXPR:
1920 f2 = TREE_FIXED_CST (arg2);
1925 f2.data.high = TREE_INT_CST_HIGH (arg2);
1926 f2.data.low = TREE_INT_CST_LOW (arg2);
1934 f1 = TREE_FIXED_CST (arg1);
1935 type = TREE_TYPE (arg1);
1936 sat_p = TYPE_SATURATING (type);
1937 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1938 t = build_fixed (type, result);
1939 /* Propagate overflow flags. */
1940 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1942 TREE_OVERFLOW (t) = 1;
1943 TREE_CONSTANT_OVERFLOW (t) = 1;
1945 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1946 TREE_CONSTANT_OVERFLOW (t) = 1;
1950 if (TREE_CODE (arg1) == COMPLEX_CST)
1952 tree type = TREE_TYPE (arg1);
1953 tree r1 = TREE_REALPART (arg1);
1954 tree i1 = TREE_IMAGPART (arg1);
1955 tree r2 = TREE_REALPART (arg2);
1956 tree i2 = TREE_IMAGPART (arg2);
1963 real = const_binop (code, r1, r2, notrunc);
1964 imag = const_binop (code, i1, i2, notrunc);
1968 real = const_binop (MINUS_EXPR,
1969 const_binop (MULT_EXPR, r1, r2, notrunc),
1970 const_binop (MULT_EXPR, i1, i2, notrunc),
1972 imag = const_binop (PLUS_EXPR,
1973 const_binop (MULT_EXPR, r1, i2, notrunc),
1974 const_binop (MULT_EXPR, i1, r2, notrunc),
1981 = const_binop (PLUS_EXPR,
1982 const_binop (MULT_EXPR, r2, r2, notrunc),
1983 const_binop (MULT_EXPR, i2, i2, notrunc),
1986 = const_binop (PLUS_EXPR,
1987 const_binop (MULT_EXPR, r1, r2, notrunc),
1988 const_binop (MULT_EXPR, i1, i2, notrunc),
1991 = const_binop (MINUS_EXPR,
1992 const_binop (MULT_EXPR, i1, r2, notrunc),
1993 const_binop (MULT_EXPR, r1, i2, notrunc),
1996 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1997 code = TRUNC_DIV_EXPR;
1999 real = const_binop (code, t1, magsquared, notrunc);
2000 imag = const_binop (code, t2, magsquared, notrunc);
2009 return build_complex (type, real, imag);
2015 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2016 indicates which particular sizetype to create. */
2019 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2021 return build_int_cst (sizetype_tab[(int) kind], number);
2024 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2025 is a tree code. The type of the result is taken from the operands.
2026 Both must be equivalent integer types, ala int_binop_types_match_p.
2027 If the operands are constant, so is the result. */
2030 size_binop (enum tree_code code, tree arg0, tree arg1)
2032 tree type = TREE_TYPE (arg0);
2034 if (arg0 == error_mark_node || arg1 == error_mark_node)
2035 return error_mark_node;
2037 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2040 /* Handle the special case of two integer constants faster. */
2041 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2043 /* And some specific cases even faster than that. */
2044 if (code == PLUS_EXPR)
2046 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2048 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2051 else if (code == MINUS_EXPR)
2053 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2056 else if (code == MULT_EXPR)
2058 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2062 /* Handle general case of two integer constants. */
2063 return int_const_binop (code, arg0, arg1, 0);
2066 return fold_build2 (code, type, arg0, arg1);
2069 /* Given two values, either both of sizetype or both of bitsizetype,
2070 compute the difference between the two values. Return the value
2071 in signed type corresponding to the type of the operands. */
2074 size_diffop (tree arg0, tree arg1)
2076 tree type = TREE_TYPE (arg0);
2079 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2082 /* If the type is already signed, just do the simple thing. */
2083 if (!TYPE_UNSIGNED (type))
2084 return size_binop (MINUS_EXPR, arg0, arg1);
2086 if (type == sizetype)
2088 else if (type == bitsizetype)
2089 ctype = sbitsizetype;
2091 ctype = signed_type_for (type);
2093 /* If either operand is not a constant, do the conversions to the signed
2094 type and subtract. The hardware will do the right thing with any
2095 overflow in the subtraction. */
2096 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2097 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2098 fold_convert (ctype, arg1));
2100 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2101 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2102 overflow) and negate (which can't either). Special-case a result
2103 of zero while we're here. */
2104 if (tree_int_cst_equal (arg0, arg1))
2105 return build_int_cst (ctype, 0);
2106 else if (tree_int_cst_lt (arg1, arg0))
2107 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2109 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2110 fold_convert (ctype, size_binop (MINUS_EXPR,
2114 /* A subroutine of fold_convert_const handling conversions of an
2115 INTEGER_CST to another integer type. */
2118 fold_convert_const_int_from_int (tree type, const_tree arg1)
2122 /* Given an integer constant, make new constant with new type,
2123 appropriately sign-extended or truncated. */
2124 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2125 TREE_INT_CST_HIGH (arg1),
2126 /* Don't set the overflow when
2127 converting from a pointer, */
2128 !POINTER_TYPE_P (TREE_TYPE (arg1))
2129 /* or to a sizetype with same signedness
2130 and the precision is unchanged.
2131 ??? sizetype is always sign-extended,
2132 but its signedness depends on the
2133 frontend. Thus we see spurious overflows
2134 here if we do not check this. */
2135 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2136 == TYPE_PRECISION (type))
2137 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2138 == TYPE_UNSIGNED (type))
2139 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2140 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2141 || (TREE_CODE (type) == INTEGER_TYPE
2142 && TYPE_IS_SIZETYPE (type)))),
2143 (TREE_INT_CST_HIGH (arg1) < 0
2144 && (TYPE_UNSIGNED (type)
2145 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2146 | TREE_OVERFLOW (arg1));
2151 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2152 to an integer type. */
2155 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2160 /* The following code implements the floating point to integer
2161 conversion rules required by the Java Language Specification,
2162 that IEEE NaNs are mapped to zero and values that overflow
2163 the target precision saturate, i.e. values greater than
2164 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2165 are mapped to INT_MIN. These semantics are allowed by the
2166 C and C++ standards that simply state that the behavior of
2167 FP-to-integer conversion is unspecified upon overflow. */
2169 HOST_WIDE_INT high, low;
2171 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2175 case FIX_TRUNC_EXPR:
2176 real_trunc (&r, VOIDmode, &x);
2183 /* If R is NaN, return zero and show we have an overflow. */
2184 if (REAL_VALUE_ISNAN (r))
2191 /* See if R is less than the lower bound or greater than the
2196 tree lt = TYPE_MIN_VALUE (type);
2197 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2198 if (REAL_VALUES_LESS (r, l))
2201 high = TREE_INT_CST_HIGH (lt);
2202 low = TREE_INT_CST_LOW (lt);
2208 tree ut = TYPE_MAX_VALUE (type);
2211 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2212 if (REAL_VALUES_LESS (u, r))
2215 high = TREE_INT_CST_HIGH (ut);
2216 low = TREE_INT_CST_LOW (ut);
2222 REAL_VALUE_TO_INT (&low, &high, r);
2224 t = force_fit_type_double (type, low, high, -1,
2225 overflow | TREE_OVERFLOW (arg1));
2229 /* A subroutine of fold_convert_const handling conversions of a
2230 FIXED_CST to an integer type. */
2233 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2236 double_int temp, temp_trunc;
2239 /* Right shift FIXED_CST to temp by fbit. */
2240 temp = TREE_FIXED_CST (arg1).data;
2241 mode = TREE_FIXED_CST (arg1).mode;
2242 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2244 lshift_double (temp.low, temp.high,
2245 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2246 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2248 /* Left shift temp to temp_trunc by fbit. */
2249 lshift_double (temp.low, temp.high,
2250 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2251 &temp_trunc.low, &temp_trunc.high,
2252 SIGNED_FIXED_POINT_MODE_P (mode));
2259 temp_trunc.high = 0;
2262 /* If FIXED_CST is negative, we need to round the value toward 0.
2263 By checking if the fractional bits are not zero to add 1 to temp. */
2264 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2265 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2270 temp = double_int_add (temp, one);
2273 /* Given a fixed-point constant, make new constant with new type,
2274 appropriately sign-extended or truncated. */
2275 t = force_fit_type_double (type, temp.low, temp.high, -1,
2277 && (TYPE_UNSIGNED (type)
2278 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2279 | TREE_OVERFLOW (arg1));
2284 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2285 to another floating point type. */
2288 fold_convert_const_real_from_real (tree type, const_tree arg1)
2290 REAL_VALUE_TYPE value;
2293 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2294 t = build_real (type, value);
2296 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2300 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2301 to a floating point type. */
2304 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2306 REAL_VALUE_TYPE value;
2309 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2310 t = build_real (type, value);
2312 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2313 TREE_CONSTANT_OVERFLOW (t)
2314 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2318 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2319 to another fixed-point type. */
2322 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2324 FIXED_VALUE_TYPE value;
2328 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2329 TYPE_SATURATING (type));
2330 t = build_fixed (type, value);
2332 /* Propagate overflow flags. */
2333 if (overflow_p | TREE_OVERFLOW (arg1))
2335 TREE_OVERFLOW (t) = 1;
2336 TREE_CONSTANT_OVERFLOW (t) = 1;
2338 else if (TREE_CONSTANT_OVERFLOW (arg1))
2339 TREE_CONSTANT_OVERFLOW (t) = 1;
2343 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2344 to a fixed-point type. */
2347 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2349 FIXED_VALUE_TYPE value;
2353 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2354 TREE_INT_CST (arg1),
2355 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2356 TYPE_SATURATING (type));
2357 t = build_fixed (type, value);
2359 /* Propagate overflow flags. */
2360 if (overflow_p | TREE_OVERFLOW (arg1))
2362 TREE_OVERFLOW (t) = 1;
2363 TREE_CONSTANT_OVERFLOW (t) = 1;
2365 else if (TREE_CONSTANT_OVERFLOW (arg1))
2366 TREE_CONSTANT_OVERFLOW (t) = 1;
2370 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2371 to a fixed-point type. */
2374 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2376 FIXED_VALUE_TYPE value;
2380 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2381 &TREE_REAL_CST (arg1),
2382 TYPE_SATURATING (type));
2383 t = build_fixed (type, value);
2385 /* Propagate overflow flags. */
2386 if (overflow_p | TREE_OVERFLOW (arg1))
2388 TREE_OVERFLOW (t) = 1;
2389 TREE_CONSTANT_OVERFLOW (t) = 1;
2391 else if (TREE_CONSTANT_OVERFLOW (arg1))
2392 TREE_CONSTANT_OVERFLOW (t) = 1;
2396 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2397 type TYPE. If no simplification can be done return NULL_TREE. */
2400 fold_convert_const (enum tree_code code, tree type, tree arg1)
2402 if (TREE_TYPE (arg1) == type)
2405 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2406 || TREE_CODE (type) == OFFSET_TYPE)
2408 if (TREE_CODE (arg1) == INTEGER_CST)
2409 return fold_convert_const_int_from_int (type, arg1);
2410 else if (TREE_CODE (arg1) == REAL_CST)
2411 return fold_convert_const_int_from_real (code, type, arg1);
2412 else if (TREE_CODE (arg1) == FIXED_CST)
2413 return fold_convert_const_int_from_fixed (type, arg1);
2415 else if (TREE_CODE (type) == REAL_TYPE)
2417 if (TREE_CODE (arg1) == INTEGER_CST)
2418 return build_real_from_int_cst (type, arg1);
2419 else if (TREE_CODE (arg1) == REAL_CST)
2420 return fold_convert_const_real_from_real (type, arg1);
2421 else if (TREE_CODE (arg1) == FIXED_CST)
2422 return fold_convert_const_real_from_fixed (type, arg1);
2424 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2426 if (TREE_CODE (arg1) == FIXED_CST)
2427 return fold_convert_const_fixed_from_fixed (type, arg1);
2428 else if (TREE_CODE (arg1) == INTEGER_CST)
2429 return fold_convert_const_fixed_from_int (type, arg1);
2430 else if (TREE_CODE (arg1) == REAL_CST)
2431 return fold_convert_const_fixed_from_real (type, arg1);
2436 /* Construct a vector of zero elements of vector type TYPE. */
2439 build_zero_vector (tree type)
2444 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2445 units = TYPE_VECTOR_SUBPARTS (type);
2448 for (i = 0; i < units; i++)
2449 list = tree_cons (NULL_TREE, elem, list);
2450 return build_vector (type, list);
2453 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2456 fold_convertible_p (const_tree type, const_tree arg)
2458 tree orig = TREE_TYPE (arg);
2463 if (TREE_CODE (arg) == ERROR_MARK
2464 || TREE_CODE (type) == ERROR_MARK
2465 || TREE_CODE (orig) == ERROR_MARK)
2468 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2471 switch (TREE_CODE (type))
2473 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2474 case POINTER_TYPE: case REFERENCE_TYPE:
2476 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2477 || TREE_CODE (orig) == OFFSET_TYPE)
2479 return (TREE_CODE (orig) == VECTOR_TYPE
2480 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2483 case FIXED_POINT_TYPE:
2487 return TREE_CODE (type) == TREE_CODE (orig);
2494 /* Convert expression ARG to type TYPE. Used by the middle-end for
2495 simple conversions in preference to calling the front-end's convert. */
2498 fold_convert (tree type, tree arg)
2500 tree orig = TREE_TYPE (arg);
2506 if (TREE_CODE (arg) == ERROR_MARK
2507 || TREE_CODE (type) == ERROR_MARK
2508 || TREE_CODE (orig) == ERROR_MARK)
2509 return error_mark_node;
2511 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2512 return fold_build1 (NOP_EXPR, type, arg);
2514 switch (TREE_CODE (type))
2516 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2517 case POINTER_TYPE: case REFERENCE_TYPE:
2519 if (TREE_CODE (arg) == INTEGER_CST)
2521 tem = fold_convert_const (NOP_EXPR, type, arg);
2522 if (tem != NULL_TREE)
2525 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2526 || TREE_CODE (orig) == OFFSET_TYPE)
2527 return fold_build1 (NOP_EXPR, type, arg);
2528 if (TREE_CODE (orig) == COMPLEX_TYPE)
2530 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2531 return fold_convert (type, tem);
2533 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2534 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2535 return fold_build1 (NOP_EXPR, type, arg);
2538 if (TREE_CODE (arg) == INTEGER_CST)
2540 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2541 if (tem != NULL_TREE)
2544 else if (TREE_CODE (arg) == REAL_CST)
2546 tem = fold_convert_const (NOP_EXPR, type, arg);
2547 if (tem != NULL_TREE)
2550 else if (TREE_CODE (arg) == FIXED_CST)
2552 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2553 if (tem != NULL_TREE)
2557 switch (TREE_CODE (orig))
2560 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2561 case POINTER_TYPE: case REFERENCE_TYPE:
2562 return fold_build1 (FLOAT_EXPR, type, arg);
2565 return fold_build1 (NOP_EXPR, type, arg);
2567 case FIXED_POINT_TYPE:
2568 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2571 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2572 return fold_convert (type, tem);
2578 case FIXED_POINT_TYPE:
2579 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2580 || TREE_CODE (arg) == REAL_CST)
2582 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2583 if (tem != NULL_TREE)
2587 switch (TREE_CODE (orig))
2589 case FIXED_POINT_TYPE:
2594 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2597 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2598 return fold_convert (type, tem);
2605 switch (TREE_CODE (orig))
2608 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2609 case POINTER_TYPE: case REFERENCE_TYPE:
2611 case FIXED_POINT_TYPE:
2612 return fold_build2 (COMPLEX_EXPR, type,
2613 fold_convert (TREE_TYPE (type), arg),
2614 fold_convert (TREE_TYPE (type),
2615 integer_zero_node));
2620 if (TREE_CODE (arg) == COMPLEX_EXPR)
2622 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2623 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2624 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2627 arg = save_expr (arg);
2628 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2629 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2630 rpart = fold_convert (TREE_TYPE (type), rpart);
2631 ipart = fold_convert (TREE_TYPE (type), ipart);
2632 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2640 if (integer_zerop (arg))
2641 return build_zero_vector (type);
2642 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2643 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2644 || TREE_CODE (orig) == VECTOR_TYPE);
2645 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2648 tem = fold_ignored_result (arg);
2649 if (TREE_CODE (tem) == MODIFY_EXPR)
2651 return fold_build1 (NOP_EXPR, type, tem);
2658 /* Return false if expr can be assumed not to be an lvalue, true
2662 maybe_lvalue_p (const_tree x)
2664 /* We only need to wrap lvalue tree codes. */
2665 switch (TREE_CODE (x))
2676 case ALIGN_INDIRECT_REF:
2677 case MISALIGNED_INDIRECT_REF:
2679 case ARRAY_RANGE_REF:
2685 case PREINCREMENT_EXPR:
2686 case PREDECREMENT_EXPR:
2688 case TRY_CATCH_EXPR:
2689 case WITH_CLEANUP_EXPR:
2700 /* Assume the worst for front-end tree codes. */
2701 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2709 /* Return an expr equal to X but certainly not valid as an lvalue. */
2714 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2719 if (! maybe_lvalue_p (x))
2721 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2724 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2725 Zero means allow extended lvalues. */
2727 int pedantic_lvalues;
2729 /* When pedantic, return an expr equal to X but certainly not valid as a
2730 pedantic lvalue. Otherwise, return X. */
2733 pedantic_non_lvalue (tree x)
2735 if (pedantic_lvalues)
2736 return non_lvalue (x);
2741 /* Given a tree comparison code, return the code that is the logical inverse
2742 of the given code. It is not safe to do this for floating-point
2743 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2744 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2747 invert_tree_comparison (enum tree_code code, bool honor_nans)
2749 if (honor_nans && flag_trapping_math)
2759 return honor_nans ? UNLE_EXPR : LE_EXPR;
2761 return honor_nans ? UNLT_EXPR : LT_EXPR;
2763 return honor_nans ? UNGE_EXPR : GE_EXPR;
2765 return honor_nans ? UNGT_EXPR : GT_EXPR;
2779 return UNORDERED_EXPR;
2780 case UNORDERED_EXPR:
2781 return ORDERED_EXPR;
2787 /* Similar, but return the comparison that results if the operands are
2788 swapped. This is safe for floating-point. */
2791 swap_tree_comparison (enum tree_code code)
2798 case UNORDERED_EXPR:
2824 /* Convert a comparison tree code from an enum tree_code representation
2825 into a compcode bit-based encoding. This function is the inverse of
2826 compcode_to_comparison. */
2828 static enum comparison_code
2829 comparison_to_compcode (enum tree_code code)
2846 return COMPCODE_ORD;
2847 case UNORDERED_EXPR:
2848 return COMPCODE_UNORD;
2850 return COMPCODE_UNLT;
2852 return COMPCODE_UNEQ;
2854 return COMPCODE_UNLE;
2856 return COMPCODE_UNGT;
2858 return COMPCODE_LTGT;
2860 return COMPCODE_UNGE;
2866 /* Convert a compcode bit-based encoding of a comparison operator back
2867 to GCC's enum tree_code representation. This function is the
2868 inverse of comparison_to_compcode. */
2870 static enum tree_code
2871 compcode_to_comparison (enum comparison_code code)
2888 return ORDERED_EXPR;
2889 case COMPCODE_UNORD:
2890 return UNORDERED_EXPR;
2908 /* Return a tree for the comparison which is the combination of
2909 doing the AND or OR (depending on CODE) of the two operations LCODE
2910 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2911 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2912 if this makes the transformation invalid. */
2915 combine_comparisons (enum tree_code code, enum tree_code lcode,
2916 enum tree_code rcode, tree truth_type,
2917 tree ll_arg, tree lr_arg)
2919 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2920 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2921 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2922 enum comparison_code compcode;
2926 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2927 compcode = lcompcode & rcompcode;
2930 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2931 compcode = lcompcode | rcompcode;
2940 /* Eliminate unordered comparisons, as well as LTGT and ORD
2941 which are not used unless the mode has NaNs. */
2942 compcode &= ~COMPCODE_UNORD;
2943 if (compcode == COMPCODE_LTGT)
2944 compcode = COMPCODE_NE;
2945 else if (compcode == COMPCODE_ORD)
2946 compcode = COMPCODE_TRUE;
2948 else if (flag_trapping_math)
2950 /* Check that the original operation and the optimized ones will trap
2951 under the same condition. */
2952 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2953 && (lcompcode != COMPCODE_EQ)
2954 && (lcompcode != COMPCODE_ORD);
2955 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2956 && (rcompcode != COMPCODE_EQ)
2957 && (rcompcode != COMPCODE_ORD);
2958 bool trap = (compcode & COMPCODE_UNORD) == 0
2959 && (compcode != COMPCODE_EQ)
2960 && (compcode != COMPCODE_ORD);
2962 /* In a short-circuited boolean expression the LHS might be
2963 such that the RHS, if evaluated, will never trap. For
2964 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2965 if neither x nor y is NaN. (This is a mixed blessing: for
2966 example, the expression above will never trap, hence
2967 optimizing it to x < y would be invalid). */
2968 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2969 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2972 /* If the comparison was short-circuited, and only the RHS
2973 trapped, we may now generate a spurious trap. */
2975 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2978 /* If we changed the conditions that cause a trap, we lose. */
2979 if ((ltrap || rtrap) != trap)
2983 if (compcode == COMPCODE_TRUE)
2984 return constant_boolean_node (true, truth_type);
2985 else if (compcode == COMPCODE_FALSE)
2986 return constant_boolean_node (false, truth_type);
2988 return fold_build2 (compcode_to_comparison (compcode),
2989 truth_type, ll_arg, lr_arg);
2992 /* Return nonzero if two operands (typically of the same tree node)
2993 are necessarily equal. If either argument has side-effects this
2994 function returns zero. FLAGS modifies behavior as follows:
2996 If OEP_ONLY_CONST is set, only return nonzero for constants.
2997 This function tests whether the operands are indistinguishable;
2998 it does not test whether they are equal using C's == operation.
2999 The distinction is important for IEEE floating point, because
3000 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3001 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3003 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3004 even though it may hold multiple values during a function.
3005 This is because a GCC tree node guarantees that nothing else is
3006 executed between the evaluation of its "operands" (which may often
3007 be evaluated in arbitrary order). Hence if the operands themselves
3008 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3009 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3010 unset means assuming isochronic (or instantaneous) tree equivalence.
3011 Unless comparing arbitrary expression trees, such as from different
3012 statements, this flag can usually be left unset.
3014 If OEP_PURE_SAME is set, then pure functions with identical arguments
3015 are considered the same. It is used when the caller has other ways
3016 to ensure that global memory is unchanged in between. */
3019 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3021 /* If either is ERROR_MARK, they aren't equal. */
3022 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3025 /* Check equality of integer constants before bailing out due to
3026 precision differences. */
3027 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3028 return tree_int_cst_equal (arg0, arg1);
3030 /* If both types don't have the same signedness, then we can't consider
3031 them equal. We must check this before the STRIP_NOPS calls
3032 because they may change the signedness of the arguments. As pointers
3033 strictly don't have a signedness, require either two pointers or
3034 two non-pointers as well. */
3035 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3036 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3039 /* If both types don't have the same precision, then it is not safe
3041 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3047 /* In case both args are comparisons but with different comparison
3048 code, try to swap the comparison operands of one arg to produce
3049 a match and compare that variant. */
3050 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3051 && COMPARISON_CLASS_P (arg0)
3052 && COMPARISON_CLASS_P (arg1))
3054 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3056 if (TREE_CODE (arg0) == swap_code)
3057 return operand_equal_p (TREE_OPERAND (arg0, 0),
3058 TREE_OPERAND (arg1, 1), flags)
3059 && operand_equal_p (TREE_OPERAND (arg0, 1),
3060 TREE_OPERAND (arg1, 0), flags);
3063 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3064 /* This is needed for conversions and for COMPONENT_REF.
3065 Might as well play it safe and always test this. */
3066 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3067 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3068 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3071 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3072 We don't care about side effects in that case because the SAVE_EXPR
3073 takes care of that for us. In all other cases, two expressions are
3074 equal if they have no side effects. If we have two identical
3075 expressions with side effects that should be treated the same due
3076 to the only side effects being identical SAVE_EXPR's, that will
3077 be detected in the recursive calls below. */
3078 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3079 && (TREE_CODE (arg0) == SAVE_EXPR
3080 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3083 /* Next handle constant cases, those for which we can return 1 even
3084 if ONLY_CONST is set. */
3085 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3086 switch (TREE_CODE (arg0))
3089 return tree_int_cst_equal (arg0, arg1);
3092 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3093 TREE_FIXED_CST (arg1));
3096 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3097 TREE_REAL_CST (arg1)))
3101 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3103 /* If we do not distinguish between signed and unsigned zero,
3104 consider them equal. */
3105 if (real_zerop (arg0) && real_zerop (arg1))
3114 v1 = TREE_VECTOR_CST_ELTS (arg0);
3115 v2 = TREE_VECTOR_CST_ELTS (arg1);
3118 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3121 v1 = TREE_CHAIN (v1);
3122 v2 = TREE_CHAIN (v2);
3129 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3131 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3135 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3136 && ! memcmp (TREE_STRING_POINTER (arg0),
3137 TREE_STRING_POINTER (arg1),
3138 TREE_STRING_LENGTH (arg0)));
3141 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3147 if (flags & OEP_ONLY_CONST)
3150 /* Define macros to test an operand from arg0 and arg1 for equality and a
3151 variant that allows null and views null as being different from any
3152 non-null value. In the latter case, if either is null, the both
3153 must be; otherwise, do the normal comparison. */
3154 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3155 TREE_OPERAND (arg1, N), flags)
3157 #define OP_SAME_WITH_NULL(N) \
3158 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3159 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3161 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3164 /* Two conversions are equal only if signedness and modes match. */
3165 switch (TREE_CODE (arg0))
3168 case FIX_TRUNC_EXPR:
3169 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3170 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3180 case tcc_comparison:
3182 if (OP_SAME (0) && OP_SAME (1))
3185 /* For commutative ops, allow the other order. */
3186 return (commutative_tree_code (TREE_CODE (arg0))
3187 && operand_equal_p (TREE_OPERAND (arg0, 0),
3188 TREE_OPERAND (arg1, 1), flags)
3189 && operand_equal_p (TREE_OPERAND (arg0, 1),
3190 TREE_OPERAND (arg1, 0), flags));
3193 /* If either of the pointer (or reference) expressions we are
3194 dereferencing contain a side effect, these cannot be equal. */
3195 if (TREE_SIDE_EFFECTS (arg0)
3196 || TREE_SIDE_EFFECTS (arg1))
3199 switch (TREE_CODE (arg0))
3202 case ALIGN_INDIRECT_REF:
3203 case MISALIGNED_INDIRECT_REF:
3209 case ARRAY_RANGE_REF:
3210 /* Operands 2 and 3 may be null.
3211 Compare the array index by value if it is constant first as we
3212 may have different types but same value here. */
3214 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3215 TREE_OPERAND (arg1, 1))
3217 && OP_SAME_WITH_NULL (2)
3218 && OP_SAME_WITH_NULL (3));
3221 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3222 may be NULL when we're called to compare MEM_EXPRs. */
3223 return OP_SAME_WITH_NULL (0)
3225 && OP_SAME_WITH_NULL (2);
3228 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3234 case tcc_expression:
3235 switch (TREE_CODE (arg0))
3238 case TRUTH_NOT_EXPR:
3241 case TRUTH_ANDIF_EXPR:
3242 case TRUTH_ORIF_EXPR:
3243 return OP_SAME (0) && OP_SAME (1);
3245 case TRUTH_AND_EXPR:
3247 case TRUTH_XOR_EXPR:
3248 if (OP_SAME (0) && OP_SAME (1))
3251 /* Otherwise take into account this is a commutative operation. */
3252 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3253 TREE_OPERAND (arg1, 1), flags)
3254 && operand_equal_p (TREE_OPERAND (arg0, 1),
3255 TREE_OPERAND (arg1, 0), flags));
3258 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3265 switch (TREE_CODE (arg0))
3268 /* If the CALL_EXPRs call different functions, then they
3269 clearly can not be equal. */
3270 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3275 unsigned int cef = call_expr_flags (arg0);
3276 if (flags & OEP_PURE_SAME)
3277 cef &= ECF_CONST | ECF_PURE;
3284 /* Now see if all the arguments are the same. */
3286 const_call_expr_arg_iterator iter0, iter1;
3288 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3289 a1 = first_const_call_expr_arg (arg1, &iter1);
3291 a0 = next_const_call_expr_arg (&iter0),
3292 a1 = next_const_call_expr_arg (&iter1))
3293 if (! operand_equal_p (a0, a1, flags))
3296 /* If we get here and both argument lists are exhausted
3297 then the CALL_EXPRs are equal. */
3298 return ! (a0 || a1);
3304 case tcc_declaration:
3305 /* Consider __builtin_sqrt equal to sqrt. */
3306 return (TREE_CODE (arg0) == FUNCTION_DECL
3307 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3308 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3309 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3316 #undef OP_SAME_WITH_NULL
3319 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3320 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3322 When in doubt, return 0. */
3325 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3327 int unsignedp1, unsignedpo;
3328 tree primarg0, primarg1, primother;
3329 unsigned int correct_width;
3331 if (operand_equal_p (arg0, arg1, 0))
3334 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3335 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3338 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3339 and see if the inner values are the same. This removes any
3340 signedness comparison, which doesn't matter here. */
3341 primarg0 = arg0, primarg1 = arg1;
3342 STRIP_NOPS (primarg0);
3343 STRIP_NOPS (primarg1);
3344 if (operand_equal_p (primarg0, primarg1, 0))
3347 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3348 actual comparison operand, ARG0.
3350 First throw away any conversions to wider types
3351 already present in the operands. */
3353 primarg1 = get_narrower (arg1, &unsignedp1);
3354 primother = get_narrower (other, &unsignedpo);
3356 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3357 if (unsignedp1 == unsignedpo
3358 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3359 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3361 tree type = TREE_TYPE (arg0);
3363 /* Make sure shorter operand is extended the right way
3364 to match the longer operand. */
3365 primarg1 = fold_convert (signed_or_unsigned_type_for
3366 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3368 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3375 /* See if ARG is an expression that is either a comparison or is performing
3376 arithmetic on comparisons. The comparisons must only be comparing
3377 two different values, which will be stored in *CVAL1 and *CVAL2; if
3378 they are nonzero it means that some operands have already been found.
3379 No variables may be used anywhere else in the expression except in the
3380 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3381 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3383 If this is true, return 1. Otherwise, return zero. */
3386 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3388 enum tree_code code = TREE_CODE (arg);
3389 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3391 /* We can handle some of the tcc_expression cases here. */
3392 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3394 else if (tclass == tcc_expression
3395 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3396 || code == COMPOUND_EXPR))
3397 tclass = tcc_binary;
3399 else if (tclass == tcc_expression && code == SAVE_EXPR
3400 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3402 /* If we've already found a CVAL1 or CVAL2, this expression is
3403 two complex to handle. */
3404 if (*cval1 || *cval2)
3414 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3417 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3418 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3419 cval1, cval2, save_p));
3424 case tcc_expression:
3425 if (code == COND_EXPR)
3426 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3427 cval1, cval2, save_p)
3428 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3429 cval1, cval2, save_p)
3430 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3431 cval1, cval2, save_p));
3434 case tcc_comparison:
3435 /* First see if we can handle the first operand, then the second. For
3436 the second operand, we know *CVAL1 can't be zero. It must be that
3437 one side of the comparison is each of the values; test for the
3438 case where this isn't true by failing if the two operands
3441 if (operand_equal_p (TREE_OPERAND (arg, 0),
3442 TREE_OPERAND (arg, 1), 0))
3446 *cval1 = TREE_OPERAND (arg, 0);
3447 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3449 else if (*cval2 == 0)
3450 *cval2 = TREE_OPERAND (arg, 0);
3451 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3456 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3458 else if (*cval2 == 0)
3459 *cval2 = TREE_OPERAND (arg, 1);
3460 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3472 /* ARG is a tree that is known to contain just arithmetic operations and
3473 comparisons. Evaluate the operations in the tree substituting NEW0 for
3474 any occurrence of OLD0 as an operand of a comparison and likewise for
3478 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3480 tree type = TREE_TYPE (arg);
3481 enum tree_code code = TREE_CODE (arg);
3482 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3484 /* We can handle some of the tcc_expression cases here. */
3485 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3487 else if (tclass == tcc_expression
3488 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3489 tclass = tcc_binary;
3494 return fold_build1 (code, type,
3495 eval_subst (TREE_OPERAND (arg, 0),
3496 old0, new0, old1, new1));
3499 return fold_build2 (code, type,
3500 eval_subst (TREE_OPERAND (arg, 0),
3501 old0, new0, old1, new1),
3502 eval_subst (TREE_OPERAND (arg, 1),
3503 old0, new0, old1, new1));
3505 case tcc_expression:
3509 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3512 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3515 return fold_build3 (code, type,
3516 eval_subst (TREE_OPERAND (arg, 0),
3517 old0, new0, old1, new1),
3518 eval_subst (TREE_OPERAND (arg, 1),
3519 old0, new0, old1, new1),
3520 eval_subst (TREE_OPERAND (arg, 2),
3521 old0, new0, old1, new1));
3525 /* Fall through - ??? */
3527 case tcc_comparison:
3529 tree arg0 = TREE_OPERAND (arg, 0);
3530 tree arg1 = TREE_OPERAND (arg, 1);
3532 /* We need to check both for exact equality and tree equality. The
3533 former will be true if the operand has a side-effect. In that
3534 case, we know the operand occurred exactly once. */
3536 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3538 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3541 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3543 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3546 return fold_build2 (code, type, arg0, arg1);
3554 /* Return a tree for the case when the result of an expression is RESULT
3555 converted to TYPE and OMITTED was previously an operand of the expression
3556 but is now not needed (e.g., we folded OMITTED * 0).
3558 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3559 the conversion of RESULT to TYPE. */
3562 omit_one_operand (tree type, tree result, tree omitted)
3564 tree t = fold_convert (type, result);
3566 /* If the resulting operand is an empty statement, just return the omitted
3567 statement casted to void. */
3568 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3569 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3571 if (TREE_SIDE_EFFECTS (omitted))
3572 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3574 return non_lvalue (t);
3577 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3580 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3582 tree t = fold_convert (type, result);
3584 /* If the resulting operand is an empty statement, just return the omitted
3585 statement casted to void. */
3586 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3587 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3589 if (TREE_SIDE_EFFECTS (omitted))
3590 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3592 return pedantic_non_lvalue (t);
3595 /* Return a tree for the case when the result of an expression is RESULT
3596 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3597 of the expression but are now not needed.
3599 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3600 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3601 evaluated before OMITTED2. Otherwise, if neither has side effects,
3602 just do the conversion of RESULT to TYPE. */
3605 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3607 tree t = fold_convert (type, result);
3609 if (TREE_SIDE_EFFECTS (omitted2))
3610 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3611 if (TREE_SIDE_EFFECTS (omitted1))
3612 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3614 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3618 /* Return a simplified tree node for the truth-negation of ARG. This
3619 never alters ARG itself. We assume that ARG is an operation that
3620 returns a truth value (0 or 1).
3622 FIXME: one would think we would fold the result, but it causes
3623 problems with the dominator optimizer. */
3626 fold_truth_not_expr (tree arg)
3628 tree type = TREE_TYPE (arg);
3629 enum tree_code code = TREE_CODE (arg);
3631 /* If this is a comparison, we can simply invert it, except for
3632 floating-point non-equality comparisons, in which case we just
3633 enclose a TRUTH_NOT_EXPR around what we have. */
3635 if (TREE_CODE_CLASS (code) == tcc_comparison)
3637 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3638 if (FLOAT_TYPE_P (op_type)
3639 && flag_trapping_math
3640 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3641 && code != NE_EXPR && code != EQ_EXPR)
3645 code = invert_tree_comparison (code,
3646 HONOR_NANS (TYPE_MODE (op_type)));
3647 if (code == ERROR_MARK)
3650 return build2 (code, type,
3651 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3658 return constant_boolean_node (integer_zerop (arg), type);
3660 case TRUTH_AND_EXPR:
3661 return build2 (TRUTH_OR_EXPR, type,
3662 invert_truthvalue (TREE_OPERAND (arg, 0)),
3663 invert_truthvalue (TREE_OPERAND (arg, 1)));
3666 return build2 (TRUTH_AND_EXPR, type,
3667 invert_truthvalue (TREE_OPERAND (arg, 0)),
3668 invert_truthvalue (TREE_OPERAND (arg, 1)));
3670 case TRUTH_XOR_EXPR:
3671 /* Here we can invert either operand. We invert the first operand
3672 unless the second operand is a TRUTH_NOT_EXPR in which case our
3673 result is the XOR of the first operand with the inside of the
3674 negation of the second operand. */
3676 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3677 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3678 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3680 return build2 (TRUTH_XOR_EXPR, type,
3681 invert_truthvalue (TREE_OPERAND (arg, 0)),
3682 TREE_OPERAND (arg, 1));
3684 case TRUTH_ANDIF_EXPR:
3685 return build2 (TRUTH_ORIF_EXPR, type,
3686 invert_truthvalue (TREE_OPERAND (arg, 0)),
3687 invert_truthvalue (TREE_OPERAND (arg, 1)));
3689 case TRUTH_ORIF_EXPR:
3690 return build2 (TRUTH_ANDIF_EXPR, type,
3691 invert_truthvalue (TREE_OPERAND (arg, 0)),
3692 invert_truthvalue (TREE_OPERAND (arg, 1)));
3694 case TRUTH_NOT_EXPR:
3695 return TREE_OPERAND (arg, 0);
3699 tree arg1 = TREE_OPERAND (arg, 1);
3700 tree arg2 = TREE_OPERAND (arg, 2);
3701 /* A COND_EXPR may have a throw as one operand, which
3702 then has void type. Just leave void operands
3704 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3705 VOID_TYPE_P (TREE_TYPE (arg1))
3706 ? arg1 : invert_truthvalue (arg1),
3707 VOID_TYPE_P (TREE_TYPE (arg2))
3708 ? arg2 : invert_truthvalue (arg2));
3712 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3713 invert_truthvalue (TREE_OPERAND (arg, 1)));
3715 case NON_LVALUE_EXPR:
3716 return invert_truthvalue (TREE_OPERAND (arg, 0));
3719 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3720 return build1 (TRUTH_NOT_EXPR, type, arg);
3724 return build1 (TREE_CODE (arg), type,
3725 invert_truthvalue (TREE_OPERAND (arg, 0)));
3728 if (!integer_onep (TREE_OPERAND (arg, 1)))
3730 return build2 (EQ_EXPR, type, arg,
3731 build_int_cst (type, 0));
3734 return build1 (TRUTH_NOT_EXPR, type, arg);
3736 case CLEANUP_POINT_EXPR:
3737 return build1 (CLEANUP_POINT_EXPR, type,
3738 invert_truthvalue (TREE_OPERAND (arg, 0)));
3747 /* Return a simplified tree node for the truth-negation of ARG. This
3748 never alters ARG itself. We assume that ARG is an operation that
3749 returns a truth value (0 or 1).
3751 FIXME: one would think we would fold the result, but it causes
3752 problems with the dominator optimizer. */
3755 invert_truthvalue (tree arg)
3759 if (TREE_CODE (arg) == ERROR_MARK)
3762 tem = fold_truth_not_expr (arg);
3764 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3769 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3770 operands are another bit-wise operation with a common input. If so,
3771 distribute the bit operations to save an operation and possibly two if
3772 constants are involved. For example, convert
3773 (A | B) & (A | C) into A | (B & C)
3774 Further simplification will occur if B and C are constants.
3776 If this optimization cannot be done, 0 will be returned. */
3779 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3784 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3785 || TREE_CODE (arg0) == code
3786 || (TREE_CODE (arg0) != BIT_AND_EXPR
3787 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3790 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3792 common = TREE_OPERAND (arg0, 0);
3793 left = TREE_OPERAND (arg0, 1);
3794 right = TREE_OPERAND (arg1, 1);
3796 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3798 common = TREE_OPERAND (arg0, 0);
3799 left = TREE_OPERAND (arg0, 1);
3800 right = TREE_OPERAND (arg1, 0);
3802 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3804 common = TREE_OPERAND (arg0, 1);
3805 left = TREE_OPERAND (arg0, 0);
3806 right = TREE_OPERAND (arg1, 1);
3808 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3810 common = TREE_OPERAND (arg0, 1);
3811 left = TREE_OPERAND (arg0, 0);
3812 right = TREE_OPERAND (arg1, 0);
3817 common = fold_convert (type, common);
3818 left = fold_convert (type, left);
3819 right = fold_convert (type, right);
3820 return fold_build2 (TREE_CODE (arg0), type, common,
3821 fold_build2 (code, type, left, right));
3824 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3825 with code CODE. This optimization is unsafe. */
3827 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3829 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3830 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3832 /* (A / C) +- (B / C) -> (A +- B) / C. */
3834 && operand_equal_p (TREE_OPERAND (arg0, 1),
3835 TREE_OPERAND (arg1, 1), 0))
3836 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3837 fold_build2 (code, type,
3838 TREE_OPERAND (arg0, 0),
3839 TREE_OPERAND (arg1, 0)),
3840 TREE_OPERAND (arg0, 1));
3842 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3843 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3844 TREE_OPERAND (arg1, 0), 0)
3845 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3846 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3848 REAL_VALUE_TYPE r0, r1;
3849 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3850 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3852 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3854 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3855 real_arithmetic (&r0, code, &r0, &r1);
3856 return fold_build2 (MULT_EXPR, type,
3857 TREE_OPERAND (arg0, 0),
3858 build_real (type, r0));
3864 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3865 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3868 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3869 HOST_WIDE_INT bitpos, int unsignedp)
3871 tree result, bftype;
3875 tree size = TYPE_SIZE (TREE_TYPE (inner));
3876 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3877 || POINTER_TYPE_P (TREE_TYPE (inner)))
3878 && host_integerp (size, 0)
3879 && tree_low_cst (size, 0) == bitsize)
3880 return fold_convert (type, inner);
3884 if (TYPE_PRECISION (bftype) != bitsize
3885 || TYPE_UNSIGNED (bftype) == !unsignedp)
3886 bftype = build_nonstandard_integer_type (bitsize, 0);
3888 result = build3 (BIT_FIELD_REF, bftype, inner,
3889 size_int (bitsize), bitsize_int (bitpos));
3892 result = fold_convert (type, result);
3897 /* Optimize a bit-field compare.
3899 There are two cases: First is a compare against a constant and the
3900 second is a comparison of two items where the fields are at the same
3901 bit position relative to the start of a chunk (byte, halfword, word)
3902 large enough to contain it. In these cases we can avoid the shift
3903 implicit in bitfield extractions.
3905 For constants, we emit a compare of the shifted constant with the
3906 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3907 compared. For two fields at the same position, we do the ANDs with the
3908 similar mask and compare the result of the ANDs.
3910 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3911 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3912 are the left and right operands of the comparison, respectively.
3914 If the optimization described above can be done, we return the resulting
3915 tree. Otherwise we return zero. */
3918 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3921 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3922 tree type = TREE_TYPE (lhs);
3923 tree signed_type, unsigned_type;
3924 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3925 enum machine_mode lmode, rmode, nmode;
3926 int lunsignedp, runsignedp;
3927 int lvolatilep = 0, rvolatilep = 0;
3928 tree linner, rinner = NULL_TREE;
3932 /* Get all the information about the extractions being done. If the bit size
3933 if the same as the size of the underlying object, we aren't doing an
3934 extraction at all and so can do nothing. We also don't want to
3935 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3936 then will no longer be able to replace it. */
3937 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3938 &lunsignedp, &lvolatilep, false);
3939 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3940 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3945 /* If this is not a constant, we can only do something if bit positions,
3946 sizes, and signedness are the same. */
3947 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3948 &runsignedp, &rvolatilep, false);
3950 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3951 || lunsignedp != runsignedp || offset != 0
3952 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3956 /* See if we can find a mode to refer to this field. We should be able to,
3957 but fail if we can't. */
3958 nmode = get_best_mode (lbitsize, lbitpos,
3959 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3960 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3961 TYPE_ALIGN (TREE_TYPE (rinner))),
3962 word_mode, lvolatilep || rvolatilep);
3963 if (nmode == VOIDmode)
3966 /* Set signed and unsigned types of the precision of this mode for the
3968 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3969 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3971 /* Compute the bit position and size for the new reference and our offset
3972 within it. If the new reference is the same size as the original, we
3973 won't optimize anything, so return zero. */
3974 nbitsize = GET_MODE_BITSIZE (nmode);
3975 nbitpos = lbitpos & ~ (nbitsize - 1);
3977 if (nbitsize == lbitsize)
3980 if (BYTES_BIG_ENDIAN)
3981 lbitpos = nbitsize - lbitsize - lbitpos;
3983 /* Make the mask to be used against the extracted field. */
3984 mask = build_int_cst_type (unsigned_type, -1);
3985 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3986 mask = const_binop (RSHIFT_EXPR, mask,
3987 size_int (nbitsize - lbitsize - lbitpos), 0);
3990 /* If not comparing with constant, just rework the comparison
3992 return fold_build2 (code, compare_type,
3993 fold_build2 (BIT_AND_EXPR, unsigned_type,
3994 make_bit_field_ref (linner,
3999 fold_build2 (BIT_AND_EXPR, unsigned_type,
4000 make_bit_field_ref (rinner,
4006 /* Otherwise, we are handling the constant case. See if the constant is too
4007 big for the field. Warn and return a tree of for 0 (false) if so. We do
4008 this not only for its own sake, but to avoid having to test for this
4009 error case below. If we didn't, we might generate wrong code.
4011 For unsigned fields, the constant shifted right by the field length should
4012 be all zero. For signed fields, the high-order bits should agree with
4017 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4018 fold_convert (unsigned_type, rhs),
4019 size_int (lbitsize), 0)))
4021 warning (0, "comparison is always %d due to width of bit-field",
4023 return constant_boolean_node (code == NE_EXPR, compare_type);
4028 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4029 size_int (lbitsize - 1), 0);
4030 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4032 warning (0, "comparison is always %d due to width of bit-field",
4034 return constant_boolean_node (code == NE_EXPR, compare_type);
4038 /* Single-bit compares should always be against zero. */
4039 if (lbitsize == 1 && ! integer_zerop (rhs))
4041 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4042 rhs = build_int_cst (type, 0);
4045 /* Make a new bitfield reference, shift the constant over the
4046 appropriate number of bits and mask it with the computed mask
4047 (in case this was a signed field). If we changed it, make a new one. */
4048 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4051 TREE_SIDE_EFFECTS (lhs) = 1;
4052 TREE_THIS_VOLATILE (lhs) = 1;
4055 rhs = const_binop (BIT_AND_EXPR,
4056 const_binop (LSHIFT_EXPR,
4057 fold_convert (unsigned_type, rhs),
4058 size_int (lbitpos), 0),
4061 return build2 (code, compare_type,
4062 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4066 /* Subroutine for fold_truthop: decode a field reference.
4068 If EXP is a comparison reference, we return the innermost reference.
4070 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4071 set to the starting bit number.
4073 If the innermost field can be completely contained in a mode-sized
4074 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4076 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4077 otherwise it is not changed.
4079 *PUNSIGNEDP is set to the signedness of the field.
4081 *PMASK is set to the mask used. This is either contained in a
4082 BIT_AND_EXPR or derived from the width of the field.
4084 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4086 Return 0 if this is not a component reference or is one that we can't
4087 do anything with. */
4090 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4091 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4092 int *punsignedp, int *pvolatilep,
4093 tree *pmask, tree *pand_mask)
4095 tree outer_type = 0;
4097 tree mask, inner, offset;
4099 unsigned int precision;
4101 /* All the optimizations using this function assume integer fields.
4102 There are problems with FP fields since the type_for_size call
4103 below can fail for, e.g., XFmode. */
4104 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4107 /* We are interested in the bare arrangement of bits, so strip everything
4108 that doesn't affect the machine mode. However, record the type of the
4109 outermost expression if it may matter below. */
4110 if (CONVERT_EXPR_P (exp)
4111 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4112 outer_type = TREE_TYPE (exp);
4115 if (TREE_CODE (exp) == BIT_AND_EXPR)
4117 and_mask = TREE_OPERAND (exp, 1);
4118 exp = TREE_OPERAND (exp, 0);
4119 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4120 if (TREE_CODE (and_mask) != INTEGER_CST)
4124 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4125 punsignedp, pvolatilep, false);
4126 if ((inner == exp && and_mask == 0)
4127 || *pbitsize < 0 || offset != 0
4128 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4131 /* If the number of bits in the reference is the same as the bitsize of
4132 the outer type, then the outer type gives the signedness. Otherwise
4133 (in case of a small bitfield) the signedness is unchanged. */
4134 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4135 *punsignedp = TYPE_UNSIGNED (outer_type);
4137 /* Compute the mask to access the bitfield. */
4138 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4139 precision = TYPE_PRECISION (unsigned_type);
4141 mask = build_int_cst_type (unsigned_type, -1);
4143 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4144 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4146 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4148 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4149 fold_convert (unsigned_type, and_mask), mask);
4152 *pand_mask = and_mask;
4156 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4160 all_ones_mask_p (const_tree mask, int size)
4162 tree type = TREE_TYPE (mask);
4163 unsigned int precision = TYPE_PRECISION (type);
4166 tmask = build_int_cst_type (signed_type_for (type), -1);
4169 tree_int_cst_equal (mask,
4170 const_binop (RSHIFT_EXPR,
4171 const_binop (LSHIFT_EXPR, tmask,
4172 size_int (precision - size),
4174 size_int (precision - size), 0));
4177 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4178 represents the sign bit of EXP's type. If EXP represents a sign
4179 or zero extension, also test VAL against the unextended type.
4180 The return value is the (sub)expression whose sign bit is VAL,
4181 or NULL_TREE otherwise. */
4184 sign_bit_p (tree exp, const_tree val)
4186 unsigned HOST_WIDE_INT mask_lo, lo;
4187 HOST_WIDE_INT mask_hi, hi;
4191 /* Tree EXP must have an integral type. */
4192 t = TREE_TYPE (exp);
4193 if (! INTEGRAL_TYPE_P (t))
4196 /* Tree VAL must be an integer constant. */
4197 if (TREE_CODE (val) != INTEGER_CST
4198 || TREE_OVERFLOW (val))
4201 width = TYPE_PRECISION (t);
4202 if (width > HOST_BITS_PER_WIDE_INT)
4204 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4207 mask_hi = ((unsigned HOST_WIDE_INT) -1
4208 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4214 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4217 mask_lo = ((unsigned HOST_WIDE_INT) -1
4218 >> (HOST_BITS_PER_WIDE_INT - width));
4221 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4222 treat VAL as if it were unsigned. */
4223 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4224 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4227 /* Handle extension from a narrower type. */
4228 if (TREE_CODE (exp) == NOP_EXPR
4229 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4230 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4235 /* Subroutine for fold_truthop: determine if an operand is simple enough
4236 to be evaluated unconditionally. */
4239 simple_operand_p (const_tree exp)
4241 /* Strip any conversions that don't change the machine mode. */
4244 return (CONSTANT_CLASS_P (exp)
4245 || TREE_CODE (exp) == SSA_NAME
4247 && ! TREE_ADDRESSABLE (exp)
4248 && ! TREE_THIS_VOLATILE (exp)
4249 && ! DECL_NONLOCAL (exp)
4250 /* Don't regard global variables as simple. They may be
4251 allocated in ways unknown to the compiler (shared memory,
4252 #pragma weak, etc). */
4253 && ! TREE_PUBLIC (exp)
4254 && ! DECL_EXTERNAL (exp)
4255 /* Loading a static variable is unduly expensive, but global
4256 registers aren't expensive. */
4257 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4260 /* The following functions are subroutines to fold_range_test and allow it to
4261 try to change a logical combination of comparisons into a range test.
4264 X == 2 || X == 3 || X == 4 || X == 5
4268 (unsigned) (X - 2) <= 3
4270 We describe each set of comparisons as being either inside or outside
4271 a range, using a variable named like IN_P, and then describe the
4272 range with a lower and upper bound. If one of the bounds is omitted,
4273 it represents either the highest or lowest value of the type.
4275 In the comments below, we represent a range by two numbers in brackets
4276 preceded by a "+" to designate being inside that range, or a "-" to
4277 designate being outside that range, so the condition can be inverted by
4278 flipping the prefix. An omitted bound is represented by a "-". For
4279 example, "- [-, 10]" means being outside the range starting at the lowest
4280 possible value and ending at 10, in other words, being greater than 10.
4281 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4284 We set up things so that the missing bounds are handled in a consistent
4285 manner so neither a missing bound nor "true" and "false" need to be
4286 handled using a special case. */
4288 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4289 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4290 and UPPER1_P are nonzero if the respective argument is an upper bound
4291 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4292 must be specified for a comparison. ARG1 will be converted to ARG0's
4293 type if both are specified. */
4296 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4297 tree arg1, int upper1_p)
4303 /* If neither arg represents infinity, do the normal operation.
4304 Else, if not a comparison, return infinity. Else handle the special
4305 comparison rules. Note that most of the cases below won't occur, but
4306 are handled for consistency. */
4308 if (arg0 != 0 && arg1 != 0)
4310 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4311 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4313 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4316 if (TREE_CODE_CLASS (code) != tcc_comparison)
4319 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4320 for neither. In real maths, we cannot assume open ended ranges are
4321 the same. But, this is computer arithmetic, where numbers are finite.
4322 We can therefore make the transformation of any unbounded range with
4323 the value Z, Z being greater than any representable number. This permits
4324 us to treat unbounded ranges as equal. */
4325 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4326 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4330 result = sgn0 == sgn1;
4333 result = sgn0 != sgn1;
4336 result = sgn0 < sgn1;
4339 result = sgn0 <= sgn1;
4342 result = sgn0 > sgn1;
4345 result = sgn0 >= sgn1;
4351 return constant_boolean_node (result, type);
4354 /* Given EXP, a logical expression, set the range it is testing into
4355 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4356 actually being tested. *PLOW and *PHIGH will be made of the same
4357 type as the returned expression. If EXP is not a comparison, we
4358 will most likely not be returning a useful value and range. Set
4359 *STRICT_OVERFLOW_P to true if the return value is only valid
4360 because signed overflow is undefined; otherwise, do not change
4361 *STRICT_OVERFLOW_P. */
4364 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4365 bool *strict_overflow_p)
4367 enum tree_code code;
4368 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4369 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4371 tree low, high, n_low, n_high;
4373 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4374 and see if we can refine the range. Some of the cases below may not
4375 happen, but it doesn't seem worth worrying about this. We "continue"
4376 the outer loop when we've changed something; otherwise we "break"
4377 the switch, which will "break" the while. */
4380 low = high = build_int_cst (TREE_TYPE (exp), 0);
4384 code = TREE_CODE (exp);
4385 exp_type = TREE_TYPE (exp);
4387 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4389 if (TREE_OPERAND_LENGTH (exp) > 0)
4390 arg0 = TREE_OPERAND (exp, 0);
4391 if (TREE_CODE_CLASS (code) == tcc_comparison
4392 || TREE_CODE_CLASS (code) == tcc_unary
4393 || TREE_CODE_CLASS (code) == tcc_binary)
4394 arg0_type = TREE_TYPE (arg0);
4395 if (TREE_CODE_CLASS (code) == tcc_binary
4396 || TREE_CODE_CLASS (code) == tcc_comparison
4397 || (TREE_CODE_CLASS (code) == tcc_expression
4398 && TREE_OPERAND_LENGTH (exp) > 1))
4399 arg1 = TREE_OPERAND (exp, 1);
4404 case TRUTH_NOT_EXPR:
4405 in_p = ! in_p, exp = arg0;
4408 case EQ_EXPR: case NE_EXPR:
4409 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4410 /* We can only do something if the range is testing for zero
4411 and if the second operand is an integer constant. Note that
4412 saying something is "in" the range we make is done by
4413 complementing IN_P since it will set in the initial case of
4414 being not equal to zero; "out" is leaving it alone. */
4415 if (low == 0 || high == 0
4416 || ! integer_zerop (low) || ! integer_zerop (high)
4417 || TREE_CODE (arg1) != INTEGER_CST)
4422 case NE_EXPR: /* - [c, c] */
4425 case EQ_EXPR: /* + [c, c] */
4426 in_p = ! in_p, low = high = arg1;
4428 case GT_EXPR: /* - [-, c] */
4429 low = 0, high = arg1;
4431 case GE_EXPR: /* + [c, -] */
4432 in_p = ! in_p, low = arg1, high = 0;
4434 case LT_EXPR: /* - [c, -] */
4435 low = arg1, high = 0;
4437 case LE_EXPR: /* + [-, c] */
4438 in_p = ! in_p, low = 0, high = arg1;
4444 /* If this is an unsigned comparison, we also know that EXP is
4445 greater than or equal to zero. We base the range tests we make
4446 on that fact, so we record it here so we can parse existing
4447 range tests. We test arg0_type since often the return type
4448 of, e.g. EQ_EXPR, is boolean. */
4449 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4451 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4453 build_int_cst (arg0_type, 0),
4457 in_p = n_in_p, low = n_low, high = n_high;
4459 /* If the high bound is missing, but we have a nonzero low
4460 bound, reverse the range so it goes from zero to the low bound
4462 if (high == 0 && low && ! integer_zerop (low))
4465 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4466 integer_one_node, 0);
4467 low = build_int_cst (arg0_type, 0);
4475 /* (-x) IN [a,b] -> x in [-b, -a] */
4476 n_low = range_binop (MINUS_EXPR, exp_type,
4477 build_int_cst (exp_type, 0),
4479 n_high = range_binop (MINUS_EXPR, exp_type,
4480 build_int_cst (exp_type, 0),
4482 if (n_high != 0 && TREE_OVERFLOW (n_high))
4488 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4489 build_int_cst (exp_type, 1));
4492 case PLUS_EXPR: case MINUS_EXPR:
4493 if (TREE_CODE (arg1) != INTEGER_CST)
4496 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4497 move a constant to the other side. */
4498 if (!TYPE_UNSIGNED (arg0_type)
4499 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4502 /* If EXP is signed, any overflow in the computation is undefined,
4503 so we don't worry about it so long as our computations on
4504 the bounds don't overflow. For unsigned, overflow is defined
4505 and this is exactly the right thing. */
4506 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4507 arg0_type, low, 0, arg1, 0);
4508 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4509 arg0_type, high, 1, arg1, 0);
4510 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4511 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4514 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4515 *strict_overflow_p = true;
4518 /* Check for an unsigned range which has wrapped around the maximum
4519 value thus making n_high < n_low, and normalize it. */
4520 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4522 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4523 integer_one_node, 0);
4524 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4525 integer_one_node, 0);
4527 /* If the range is of the form +/- [ x+1, x ], we won't
4528 be able to normalize it. But then, it represents the
4529 whole range or the empty set, so make it
4531 if (tree_int_cst_equal (n_low, low)
4532 && tree_int_cst_equal (n_high, high))
4538 low = n_low, high = n_high;
4543 CASE_CONVERT: case NON_LVALUE_EXPR:
4544 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4547 if (! INTEGRAL_TYPE_P (arg0_type)
4548 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4549 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4552 n_low = low, n_high = high;
4555 n_low = fold_convert (arg0_type, n_low);
4558 n_high = fold_convert (arg0_type, n_high);
4561 /* If we're converting arg0 from an unsigned type, to exp,
4562 a signed type, we will be doing the comparison as unsigned.
4563 The tests above have already verified that LOW and HIGH
4566 So we have to ensure that we will handle large unsigned
4567 values the same way that the current signed bounds treat
4570 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4574 /* For fixed-point modes, we need to pass the saturating flag
4575 as the 2nd parameter. */
4576 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4577 equiv_type = lang_hooks.types.type_for_mode
4578 (TYPE_MODE (arg0_type),
4579 TYPE_SATURATING (arg0_type));
4581 equiv_type = lang_hooks.types.type_for_mode
4582 (TYPE_MODE (arg0_type), 1);
4584 /* A range without an upper bound is, naturally, unbounded.
4585 Since convert would have cropped a very large value, use
4586 the max value for the destination type. */
4588 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4589 : TYPE_MAX_VALUE (arg0_type);
4591 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4592 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4593 fold_convert (arg0_type,
4595 build_int_cst (arg0_type, 1));
4597 /* If the low bound is specified, "and" the range with the
4598 range for which the original unsigned value will be
4602 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4603 1, n_low, n_high, 1,
4604 fold_convert (arg0_type,
4609 in_p = (n_in_p == in_p);
4613 /* Otherwise, "or" the range with the range of the input
4614 that will be interpreted as negative. */
4615 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4616 0, n_low, n_high, 1,
4617 fold_convert (arg0_type,
4622 in_p = (in_p != n_in_p);
4627 low = n_low, high = n_high;
4637 /* If EXP is a constant, we can evaluate whether this is true or false. */
4638 if (TREE_CODE (exp) == INTEGER_CST)
4640 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4642 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4648 *pin_p = in_p, *plow = low, *phigh = high;
4652 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4653 type, TYPE, return an expression to test if EXP is in (or out of, depending
4654 on IN_P) the range. Return 0 if the test couldn't be created. */
4657 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4659 tree etype = TREE_TYPE (exp);
4662 #ifdef HAVE_canonicalize_funcptr_for_compare
4663 /* Disable this optimization for function pointer expressions
4664 on targets that require function pointer canonicalization. */
4665 if (HAVE_canonicalize_funcptr_for_compare
4666 && TREE_CODE (etype) == POINTER_TYPE
4667 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4673 value = build_range_check (type, exp, 1, low, high);
4675 return invert_truthvalue (value);
4680 if (low == 0 && high == 0)
4681 return build_int_cst (type, 1);
4684 return fold_build2 (LE_EXPR, type, exp,
4685 fold_convert (etype, high));
4688 return fold_build2 (GE_EXPR, type, exp,
4689 fold_convert (etype, low));
4691 if (operand_equal_p (low, high, 0))
4692 return fold_build2 (EQ_EXPR, type, exp,
4693 fold_convert (etype, low));
4695 if (integer_zerop (low))
4697 if (! TYPE_UNSIGNED (etype))
4699 etype = unsigned_type_for (etype);
4700 high = fold_convert (etype, high);
4701 exp = fold_convert (etype, exp);
4703 return build_range_check (type, exp, 1, 0, high);
4706 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4707 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4709 unsigned HOST_WIDE_INT lo;
4713 prec = TYPE_PRECISION (etype);
4714 if (prec <= HOST_BITS_PER_WIDE_INT)
4717 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4721 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4722 lo = (unsigned HOST_WIDE_INT) -1;
4725 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4727 if (TYPE_UNSIGNED (etype))
4729 tree signed_etype = signed_type_for (etype);
4730 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4732 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4734 etype = signed_etype;
4735 exp = fold_convert (etype, exp);
4737 return fold_build2 (GT_EXPR, type, exp,
4738 build_int_cst (etype, 0));
4742 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4743 This requires wrap-around arithmetics for the type of the expression. */
4744 switch (TREE_CODE (etype))
4747 /* There is no requirement that LOW be within the range of ETYPE
4748 if the latter is a subtype. It must, however, be within the base
4749 type of ETYPE. So be sure we do the subtraction in that type. */
4750 if (TREE_TYPE (etype))
4751 etype = TREE_TYPE (etype);
4756 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4757 TYPE_UNSIGNED (etype));
4764 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4765 if (TREE_CODE (etype) == INTEGER_TYPE
4766 && !TYPE_OVERFLOW_WRAPS (etype))
4768 tree utype, minv, maxv;
4770 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4771 for the type in question, as we rely on this here. */
4772 utype = unsigned_type_for (etype);
4773 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4774 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4775 integer_one_node, 1);
4776 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4778 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4785 high = fold_convert (etype, high);
4786 low = fold_convert (etype, low);
4787 exp = fold_convert (etype, exp);
4789 value = const_binop (MINUS_EXPR, high, low, 0);
4792 if (POINTER_TYPE_P (etype))
4794 if (value != 0 && !TREE_OVERFLOW (value))
4796 low = fold_convert (sizetype, low);
4797 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4798 return build_range_check (type,
4799 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4800 1, build_int_cst (etype, 0), value);
4805 if (value != 0 && !TREE_OVERFLOW (value))
4806 return build_range_check (type,
4807 fold_build2 (MINUS_EXPR, etype, exp, low),
4808 1, build_int_cst (etype, 0), value);
4813 /* Return the predecessor of VAL in its type, handling the infinite case. */
4816 range_predecessor (tree val)
4818 tree type = TREE_TYPE (val);
4820 if (INTEGRAL_TYPE_P (type)
4821 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4824 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4827 /* Return the successor of VAL in its type, handling the infinite case. */
4830 range_successor (tree val)
4832 tree type = TREE_TYPE (val);
4834 if (INTEGRAL_TYPE_P (type)
4835 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4838 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4841 /* Given two ranges, see if we can merge them into one. Return 1 if we
4842 can, 0 if we can't. Set the output range into the specified parameters. */
4845 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4846 tree high0, int in1_p, tree low1, tree high1)
4854 int lowequal = ((low0 == 0 && low1 == 0)
4855 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4856 low0, 0, low1, 0)));
4857 int highequal = ((high0 == 0 && high1 == 0)
4858 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4859 high0, 1, high1, 1)));
4861 /* Make range 0 be the range that starts first, or ends last if they
4862 start at the same value. Swap them if it isn't. */
4863 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4866 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4867 high1, 1, high0, 1))))
4869 temp = in0_p, in0_p = in1_p, in1_p = temp;
4870 tem = low0, low0 = low1, low1 = tem;
4871 tem = high0, high0 = high1, high1 = tem;
4874 /* Now flag two cases, whether the ranges are disjoint or whether the
4875 second range is totally subsumed in the first. Note that the tests
4876 below are simplified by the ones above. */
4877 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4878 high0, 1, low1, 0));
4879 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4880 high1, 1, high0, 1));
4882 /* We now have four cases, depending on whether we are including or
4883 excluding the two ranges. */
4886 /* If they don't overlap, the result is false. If the second range
4887 is a subset it is the result. Otherwise, the range is from the start
4888 of the second to the end of the first. */
4890 in_p = 0, low = high = 0;
4892 in_p = 1, low = low1, high = high1;
4894 in_p = 1, low = low1, high = high0;
4897 else if (in0_p && ! in1_p)
4899 /* If they don't overlap, the result is the first range. If they are
4900 equal, the result is false. If the second range is a subset of the
4901 first, and the ranges begin at the same place, we go from just after
4902 the end of the second range to the end of the first. If the second
4903 range is not a subset of the first, or if it is a subset and both
4904 ranges end at the same place, the range starts at the start of the
4905 first range and ends just before the second range.
4906 Otherwise, we can't describe this as a single range. */
4908 in_p = 1, low = low0, high = high0;
4909 else if (lowequal && highequal)
4910 in_p = 0, low = high = 0;
4911 else if (subset && lowequal)
4913 low = range_successor (high1);
4918 /* We are in the weird situation where high0 > high1 but
4919 high1 has no successor. Punt. */
4923 else if (! subset || highequal)
4926 high = range_predecessor (low1);
4930 /* low0 < low1 but low1 has no predecessor. Punt. */
4938 else if (! in0_p && in1_p)
4940 /* If they don't overlap, the result is the second range. If the second
4941 is a subset of the first, the result is false. Otherwise,
4942 the range starts just after the first range and ends at the
4943 end of the second. */
4945 in_p = 1, low = low1, high = high1;
4946 else if (subset || highequal)
4947 in_p = 0, low = high = 0;
4950 low = range_successor (high0);
4955 /* high1 > high0 but high0 has no successor. Punt. */
4963 /* The case where we are excluding both ranges. Here the complex case
4964 is if they don't overlap. In that case, the only time we have a
4965 range is if they are adjacent. If the second is a subset of the
4966 first, the result is the first. Otherwise, the range to exclude
4967 starts at the beginning of the first range and ends at the end of the
4971 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4972 range_successor (high0),
4974 in_p = 0, low = low0, high = high1;
4977 /* Canonicalize - [min, x] into - [-, x]. */
4978 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4979 switch (TREE_CODE (TREE_TYPE (low0)))
4982 if (TYPE_PRECISION (TREE_TYPE (low0))
4983 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4987 if (tree_int_cst_equal (low0,
4988 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4992 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4993 && integer_zerop (low0))
5000 /* Canonicalize - [x, max] into - [x, -]. */
5001 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5002 switch (TREE_CODE (TREE_TYPE (high1)))
5005 if (TYPE_PRECISION (TREE_TYPE (high1))
5006 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5010 if (tree_int_cst_equal (high1,
5011 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5015 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5016 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5018 integer_one_node, 1)))
5025 /* The ranges might be also adjacent between the maximum and
5026 minimum values of the given type. For
5027 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5028 return + [x + 1, y - 1]. */
5029 if (low0 == 0 && high1 == 0)
5031 low = range_successor (high0);
5032 high = range_predecessor (low1);
5033 if (low == 0 || high == 0)
5043 in_p = 0, low = low0, high = high0;
5045 in_p = 0, low = low0, high = high1;
5048 *pin_p = in_p, *plow = low, *phigh = high;
5053 /* Subroutine of fold, looking inside expressions of the form
5054 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5055 of the COND_EXPR. This function is being used also to optimize
5056 A op B ? C : A, by reversing the comparison first.
5058 Return a folded expression whose code is not a COND_EXPR
5059 anymore, or NULL_TREE if no folding opportunity is found. */
5062 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5064 enum tree_code comp_code = TREE_CODE (arg0);
5065 tree arg00 = TREE_OPERAND (arg0, 0);
5066 tree arg01 = TREE_OPERAND (arg0, 1);
5067 tree arg1_type = TREE_TYPE (arg1);
5073 /* If we have A op 0 ? A : -A, consider applying the following
5076 A == 0? A : -A same as -A
5077 A != 0? A : -A same as A
5078 A >= 0? A : -A same as abs (A)
5079 A > 0? A : -A same as abs (A)
5080 A <= 0? A : -A same as -abs (A)
5081 A < 0? A : -A same as -abs (A)
5083 None of these transformations work for modes with signed
5084 zeros. If A is +/-0, the first two transformations will
5085 change the sign of the result (from +0 to -0, or vice
5086 versa). The last four will fix the sign of the result,
5087 even though the original expressions could be positive or
5088 negative, depending on the sign of A.
5090 Note that all these transformations are correct if A is
5091 NaN, since the two alternatives (A and -A) are also NaNs. */
5092 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5093 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5094 ? real_zerop (arg01)
5095 : integer_zerop (arg01))
5096 && ((TREE_CODE (arg2) == NEGATE_EXPR
5097 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5098 /* In the case that A is of the form X-Y, '-A' (arg2) may
5099 have already been folded to Y-X, check for that. */
5100 || (TREE_CODE (arg1) == MINUS_EXPR
5101 && TREE_CODE (arg2) == MINUS_EXPR
5102 && operand_equal_p (TREE_OPERAND (arg1, 0),
5103 TREE_OPERAND (arg2, 1), 0)
5104 && operand_equal_p (TREE_OPERAND (arg1, 1),
5105 TREE_OPERAND (arg2, 0), 0))))
5110 tem = fold_convert (arg1_type, arg1);
5111 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5114 return pedantic_non_lvalue (fold_convert (type, arg1));
5117 if (flag_trapping_math)
5122 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5123 arg1 = fold_convert (signed_type_for
5124 (TREE_TYPE (arg1)), arg1);
5125 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5126 return pedantic_non_lvalue (fold_convert (type, tem));
5129 if (flag_trapping_math)
5133 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5134 arg1 = fold_convert (signed_type_for
5135 (TREE_TYPE (arg1)), arg1);
5136 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5137 return negate_expr (fold_convert (type, tem));
5139 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5143 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5144 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5145 both transformations are correct when A is NaN: A != 0
5146 is then true, and A == 0 is false. */
5148 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5149 && integer_zerop (arg01) && integer_zerop (arg2))
5151 if (comp_code == NE_EXPR)
5152 return pedantic_non_lvalue (fold_convert (type, arg1));
5153 else if (comp_code == EQ_EXPR)
5154 return build_int_cst (type, 0);
5157 /* Try some transformations of A op B ? A : B.
5159 A == B? A : B same as B
5160 A != B? A : B same as A
5161 A >= B? A : B same as max (A, B)
5162 A > B? A : B same as max (B, A)
5163 A <= B? A : B same as min (A, B)
5164 A < B? A : B same as min (B, A)
5166 As above, these transformations don't work in the presence
5167 of signed zeros. For example, if A and B are zeros of
5168 opposite sign, the first two transformations will change
5169 the sign of the result. In the last four, the original
5170 expressions give different results for (A=+0, B=-0) and
5171 (A=-0, B=+0), but the transformed expressions do not.
5173 The first two transformations are correct if either A or B
5174 is a NaN. In the first transformation, the condition will
5175 be false, and B will indeed be chosen. In the case of the
5176 second transformation, the condition A != B will be true,
5177 and A will be chosen.
5179 The conversions to max() and min() are not correct if B is
5180 a number and A is not. The conditions in the original
5181 expressions will be false, so all four give B. The min()
5182 and max() versions would give a NaN instead. */
5183 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5184 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5185 /* Avoid these transformations if the COND_EXPR may be used
5186 as an lvalue in the C++ front-end. PR c++/19199. */
5188 || (strcmp (lang_hooks.name, "GNU C++") != 0
5189 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5190 || ! maybe_lvalue_p (arg1)
5191 || ! maybe_lvalue_p (arg2)))
5193 tree comp_op0 = arg00;
5194 tree comp_op1 = arg01;
5195 tree comp_type = TREE_TYPE (comp_op0);
5197 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5198 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5208 return pedantic_non_lvalue (fold_convert (type, arg2));
5210 return pedantic_non_lvalue (fold_convert (type, arg1));
5215 /* In C++ a ?: expression can be an lvalue, so put the
5216 operand which will be used if they are equal first
5217 so that we can convert this back to the
5218 corresponding COND_EXPR. */
5219 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5221 comp_op0 = fold_convert (comp_type, comp_op0);
5222 comp_op1 = fold_convert (comp_type, comp_op1);
5223 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5224 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5225 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5226 return pedantic_non_lvalue (fold_convert (type, tem));
5233 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5235 comp_op0 = fold_convert (comp_type, comp_op0);
5236 comp_op1 = fold_convert (comp_type, comp_op1);
5237 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5238 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5239 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5240 return pedantic_non_lvalue (fold_convert (type, tem));
5244 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5245 return pedantic_non_lvalue (fold_convert (type, arg2));
5248 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5249 return pedantic_non_lvalue (fold_convert (type, arg1));
5252 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5257 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5258 we might still be able to simplify this. For example,
5259 if C1 is one less or one more than C2, this might have started
5260 out as a MIN or MAX and been transformed by this function.
5261 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5263 if (INTEGRAL_TYPE_P (type)
5264 && TREE_CODE (arg01) == INTEGER_CST
5265 && TREE_CODE (arg2) == INTEGER_CST)
5269 /* We can replace A with C1 in this case. */
5270 arg1 = fold_convert (type, arg01);
5271 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5274 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5275 MIN_EXPR, to preserve the signedness of the comparison. */
5276 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5278 && operand_equal_p (arg01,
5279 const_binop (PLUS_EXPR, arg2,
5280 build_int_cst (type, 1), 0),
5283 tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00,
5284 fold_convert (TREE_TYPE (arg00), arg2));
5285 return pedantic_non_lvalue (fold_convert (type, tem));
5290 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5292 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5294 && operand_equal_p (arg01,
5295 const_binop (MINUS_EXPR, arg2,
5296 build_int_cst (type, 1), 0),
5299 tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00,
5300 fold_convert (TREE_TYPE (arg00), arg2));
5301 return pedantic_non_lvalue (fold_convert (type, tem));
5306 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5307 MAX_EXPR, to preserve the signedness of the comparison. */
5308 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5310 && operand_equal_p (arg01,
5311 const_binop (MINUS_EXPR, arg2,
5312 build_int_cst (type, 1), 0),
5315 tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00,
5316 fold_convert (TREE_TYPE (arg00), arg2));
5317 return pedantic_non_lvalue (fold_convert (type, tem));
5322 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5323 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5325 && operand_equal_p (arg01,
5326 const_binop (PLUS_EXPR, arg2,
5327 build_int_cst (type, 1), 0),
5330 tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00,
5331 fold_convert (TREE_TYPE (arg00), arg2));
5332 return pedantic_non_lvalue (fold_convert (type, tem));
5346 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5347 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5348 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5352 /* EXP is some logical combination of boolean tests. See if we can
5353 merge it into some range test. Return the new tree if so. */
5356 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5358 int or_op = (code == TRUTH_ORIF_EXPR
5359 || code == TRUTH_OR_EXPR);
5360 int in0_p, in1_p, in_p;
5361 tree low0, low1, low, high0, high1, high;
5362 bool strict_overflow_p = false;
5363 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5364 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5366 const char * const warnmsg = G_("assuming signed overflow does not occur "
5367 "when simplifying range test");
5369 /* If this is an OR operation, invert both sides; we will invert
5370 again at the end. */
5372 in0_p = ! in0_p, in1_p = ! in1_p;
5374 /* If both expressions are the same, if we can merge the ranges, and we
5375 can build the range test, return it or it inverted. If one of the
5376 ranges is always true or always false, consider it to be the same
5377 expression as the other. */
5378 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5379 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5381 && 0 != (tem = (build_range_check (type,
5383 : rhs != 0 ? rhs : integer_zero_node,
5386 if (strict_overflow_p)
5387 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5388 return or_op ? invert_truthvalue (tem) : tem;
5391 /* On machines where the branch cost is expensive, if this is a
5392 short-circuited branch and the underlying object on both sides
5393 is the same, make a non-short-circuit operation. */
5394 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5395 && lhs != 0 && rhs != 0
5396 && (code == TRUTH_ANDIF_EXPR
5397 || code == TRUTH_ORIF_EXPR)
5398 && operand_equal_p (lhs, rhs, 0))
5400 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5401 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5402 which cases we can't do this. */
5403 if (simple_operand_p (lhs))
5404 return build2 (code == TRUTH_ANDIF_EXPR
5405 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5408 else if (lang_hooks.decls.global_bindings_p () == 0
5409 && ! CONTAINS_PLACEHOLDER_P (lhs))
5411 tree common = save_expr (lhs);
5413 if (0 != (lhs = build_range_check (type, common,
5414 or_op ? ! in0_p : in0_p,
5416 && (0 != (rhs = build_range_check (type, common,
5417 or_op ? ! in1_p : in1_p,
5420 if (strict_overflow_p)
5421 fold_overflow_warning (warnmsg,
5422 WARN_STRICT_OVERFLOW_COMPARISON);
5423 return build2 (code == TRUTH_ANDIF_EXPR
5424 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5433 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5434 bit value. Arrange things so the extra bits will be set to zero if and
5435 only if C is signed-extended to its full width. If MASK is nonzero,
5436 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5439 unextend (tree c, int p, int unsignedp, tree mask)
5441 tree type = TREE_TYPE (c);
5442 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5445 if (p == modesize || unsignedp)
5448 /* We work by getting just the sign bit into the low-order bit, then
5449 into the high-order bit, then sign-extend. We then XOR that value
5451 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5452 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5454 /* We must use a signed type in order to get an arithmetic right shift.
5455 However, we must also avoid introducing accidental overflows, so that
5456 a subsequent call to integer_zerop will work. Hence we must
5457 do the type conversion here. At this point, the constant is either
5458 zero or one, and the conversion to a signed type can never overflow.
5459 We could get an overflow if this conversion is done anywhere else. */
5460 if (TYPE_UNSIGNED (type))
5461 temp = fold_convert (signed_type_for (type), temp);
5463 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5464 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5466 temp = const_binop (BIT_AND_EXPR, temp,
5467 fold_convert (TREE_TYPE (c), mask), 0);
5468 /* If necessary, convert the type back to match the type of C. */
5469 if (TYPE_UNSIGNED (type))
5470 temp = fold_convert (type, temp);
5472 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5475 /* Find ways of folding logical expressions of LHS and RHS:
5476 Try to merge two comparisons to the same innermost item.
5477 Look for range tests like "ch >= '0' && ch <= '9'".
5478 Look for combinations of simple terms on machines with expensive branches
5479 and evaluate the RHS unconditionally.
5481 For example, if we have p->a == 2 && p->b == 4 and we can make an
5482 object large enough to span both A and B, we can do this with a comparison
5483 against the object ANDed with the a mask.
5485 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5486 operations to do this with one comparison.
5488 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5489 function and the one above.
5491 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5492 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5494 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5497 We return the simplified tree or 0 if no optimization is possible. */
5500 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5502 /* If this is the "or" of two comparisons, we can do something if
5503 the comparisons are NE_EXPR. If this is the "and", we can do something
5504 if the comparisons are EQ_EXPR. I.e.,
5505 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5507 WANTED_CODE is this operation code. For single bit fields, we can
5508 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5509 comparison for one-bit fields. */
5511 enum tree_code wanted_code;
5512 enum tree_code lcode, rcode;
5513 tree ll_arg, lr_arg, rl_arg, rr_arg;
5514 tree ll_inner, lr_inner, rl_inner, rr_inner;
5515 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5516 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5517 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5518 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5519 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5520 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5521 enum machine_mode lnmode, rnmode;
5522 tree ll_mask, lr_mask, rl_mask, rr_mask;
5523 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5524 tree l_const, r_const;
5525 tree lntype, rntype, result;
5526 HOST_WIDE_INT first_bit, end_bit;
5528 tree orig_lhs = lhs, orig_rhs = rhs;
5529 enum tree_code orig_code = code;
5531 /* Start by getting the comparison codes. Fail if anything is volatile.
5532 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5533 it were surrounded with a NE_EXPR. */
5535 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5538 lcode = TREE_CODE (lhs);
5539 rcode = TREE_CODE (rhs);
5541 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5543 lhs = build2 (NE_EXPR, truth_type, lhs,
5544 build_int_cst (TREE_TYPE (lhs), 0));
5548 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5550 rhs = build2 (NE_EXPR, truth_type, rhs,
5551 build_int_cst (TREE_TYPE (rhs), 0));
5555 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5556 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5559 ll_arg = TREE_OPERAND (lhs, 0);
5560 lr_arg = TREE_OPERAND (lhs, 1);
5561 rl_arg = TREE_OPERAND (rhs, 0);
5562 rr_arg = TREE_OPERAND (rhs, 1);
5564 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5565 if (simple_operand_p (ll_arg)
5566 && simple_operand_p (lr_arg))
5569 if (operand_equal_p (ll_arg, rl_arg, 0)
5570 && operand_equal_p (lr_arg, rr_arg, 0))
5572 result = combine_comparisons (code, lcode, rcode,
5573 truth_type, ll_arg, lr_arg);
5577 else if (operand_equal_p (ll_arg, rr_arg, 0)
5578 && operand_equal_p (lr_arg, rl_arg, 0))
5580 result = combine_comparisons (code, lcode,
5581 swap_tree_comparison (rcode),
5582 truth_type, ll_arg, lr_arg);
5588 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5589 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5591 /* If the RHS can be evaluated unconditionally and its operands are
5592 simple, it wins to evaluate the RHS unconditionally on machines
5593 with expensive branches. In this case, this isn't a comparison
5594 that can be merged. Avoid doing this if the RHS is a floating-point
5595 comparison since those can trap. */
5597 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5599 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5600 && simple_operand_p (rl_arg)
5601 && simple_operand_p (rr_arg))
5603 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5604 if (code == TRUTH_OR_EXPR
5605 && lcode == NE_EXPR && integer_zerop (lr_arg)
5606 && rcode == NE_EXPR && integer_zerop (rr_arg)
5607 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5608 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5609 return build2 (NE_EXPR, truth_type,
5610 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5612 build_int_cst (TREE_TYPE (ll_arg), 0));
5614 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5615 if (code == TRUTH_AND_EXPR
5616 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5617 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5618 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5619 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5620 return build2 (EQ_EXPR, truth_type,
5621 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5623 build_int_cst (TREE_TYPE (ll_arg), 0));
5625 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5627 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5628 return build2 (code, truth_type, lhs, rhs);
5633 /* See if the comparisons can be merged. Then get all the parameters for
5636 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5637 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5641 ll_inner = decode_field_reference (ll_arg,
5642 &ll_bitsize, &ll_bitpos, &ll_mode,
5643 &ll_unsignedp, &volatilep, &ll_mask,
5645 lr_inner = decode_field_reference (lr_arg,
5646 &lr_bitsize, &lr_bitpos, &lr_mode,
5647 &lr_unsignedp, &volatilep, &lr_mask,
5649 rl_inner = decode_field_reference (rl_arg,
5650 &rl_bitsize, &rl_bitpos, &rl_mode,
5651 &rl_unsignedp, &volatilep, &rl_mask,
5653 rr_inner = decode_field_reference (rr_arg,
5654 &rr_bitsize, &rr_bitpos, &rr_mode,
5655 &rr_unsignedp, &volatilep, &rr_mask,
5658 /* It must be true that the inner operation on the lhs of each
5659 comparison must be the same if we are to be able to do anything.
5660 Then see if we have constants. If not, the same must be true for
5662 if (volatilep || ll_inner == 0 || rl_inner == 0
5663 || ! operand_equal_p (ll_inner, rl_inner, 0))
5666 if (TREE_CODE (lr_arg) == INTEGER_CST
5667 && TREE_CODE (rr_arg) == INTEGER_CST)
5668 l_const = lr_arg, r_const = rr_arg;
5669 else if (lr_inner == 0 || rr_inner == 0
5670 || ! operand_equal_p (lr_inner, rr_inner, 0))
5673 l_const = r_const = 0;
5675 /* If either comparison code is not correct for our logical operation,
5676 fail. However, we can convert a one-bit comparison against zero into
5677 the opposite comparison against that bit being set in the field. */
5679 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5680 if (lcode != wanted_code)
5682 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5684 /* Make the left operand unsigned, since we are only interested
5685 in the value of one bit. Otherwise we are doing the wrong
5694 /* This is analogous to the code for l_const above. */
5695 if (rcode != wanted_code)
5697 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5706 /* See if we can find a mode that contains both fields being compared on
5707 the left. If we can't, fail. Otherwise, update all constants and masks
5708 to be relative to a field of that size. */
5709 first_bit = MIN (ll_bitpos, rl_bitpos);
5710 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5711 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5712 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5714 if (lnmode == VOIDmode)
5717 lnbitsize = GET_MODE_BITSIZE (lnmode);
5718 lnbitpos = first_bit & ~ (lnbitsize - 1);
5719 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5720 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5722 if (BYTES_BIG_ENDIAN)
5724 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5725 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5728 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5729 size_int (xll_bitpos), 0);
5730 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5731 size_int (xrl_bitpos), 0);
5735 l_const = fold_convert (lntype, l_const);
5736 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5737 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5738 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5739 fold_build1 (BIT_NOT_EXPR,
5743 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5745 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5750 r_const = fold_convert (lntype, r_const);
5751 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5752 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5753 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5754 fold_build1 (BIT_NOT_EXPR,
5758 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5760 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5764 /* If the right sides are not constant, do the same for it. Also,
5765 disallow this optimization if a size or signedness mismatch occurs
5766 between the left and right sides. */
5769 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5770 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5771 /* Make sure the two fields on the right
5772 correspond to the left without being swapped. */
5773 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5776 first_bit = MIN (lr_bitpos, rr_bitpos);
5777 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5778 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5779 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5781 if (rnmode == VOIDmode)
5784 rnbitsize = GET_MODE_BITSIZE (rnmode);
5785 rnbitpos = first_bit & ~ (rnbitsize - 1);
5786 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5787 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5789 if (BYTES_BIG_ENDIAN)
5791 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5792 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5795 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5796 size_int (xlr_bitpos), 0);
5797 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5798 size_int (xrr_bitpos), 0);
5800 /* Make a mask that corresponds to both fields being compared.
5801 Do this for both items being compared. If the operands are the
5802 same size and the bits being compared are in the same position
5803 then we can do this by masking both and comparing the masked
5805 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5806 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5807 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5809 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5810 ll_unsignedp || rl_unsignedp);
5811 if (! all_ones_mask_p (ll_mask, lnbitsize))
5812 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5814 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5815 lr_unsignedp || rr_unsignedp);
5816 if (! all_ones_mask_p (lr_mask, rnbitsize))
5817 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5819 return build2 (wanted_code, truth_type, lhs, rhs);
5822 /* There is still another way we can do something: If both pairs of
5823 fields being compared are adjacent, we may be able to make a wider
5824 field containing them both.
5826 Note that we still must mask the lhs/rhs expressions. Furthermore,
5827 the mask must be shifted to account for the shift done by
5828 make_bit_field_ref. */
5829 if ((ll_bitsize + ll_bitpos == rl_bitpos
5830 && lr_bitsize + lr_bitpos == rr_bitpos)
5831 || (ll_bitpos == rl_bitpos + rl_bitsize
5832 && lr_bitpos == rr_bitpos + rr_bitsize))
5836 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5837 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5838 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5839 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5841 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5842 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5843 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5844 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5846 /* Convert to the smaller type before masking out unwanted bits. */
5848 if (lntype != rntype)
5850 if (lnbitsize > rnbitsize)
5852 lhs = fold_convert (rntype, lhs);
5853 ll_mask = fold_convert (rntype, ll_mask);
5856 else if (lnbitsize < rnbitsize)
5858 rhs = fold_convert (lntype, rhs);
5859 lr_mask = fold_convert (lntype, lr_mask);
5864 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5865 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5867 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5868 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5870 return build2 (wanted_code, truth_type, lhs, rhs);
5876 /* Handle the case of comparisons with constants. If there is something in
5877 common between the masks, those bits of the constants must be the same.
5878 If not, the condition is always false. Test for this to avoid generating
5879 incorrect code below. */
5880 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5881 if (! integer_zerop (result)
5882 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5883 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5885 if (wanted_code == NE_EXPR)
5887 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5888 return constant_boolean_node (true, truth_type);
5892 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5893 return constant_boolean_node (false, truth_type);
5897 /* Construct the expression we will return. First get the component
5898 reference we will make. Unless the mask is all ones the width of
5899 that field, perform the mask operation. Then compare with the
5901 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5902 ll_unsignedp || rl_unsignedp);
5904 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5905 if (! all_ones_mask_p (ll_mask, lnbitsize))
5906 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5908 return build2 (wanted_code, truth_type, result,
5909 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5912 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5916 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5919 enum tree_code op_code;
5922 int consts_equal, consts_lt;
5925 STRIP_SIGN_NOPS (arg0);
5927 op_code = TREE_CODE (arg0);
5928 minmax_const = TREE_OPERAND (arg0, 1);
5929 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5930 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5931 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5932 inner = TREE_OPERAND (arg0, 0);
5934 /* If something does not permit us to optimize, return the original tree. */
5935 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5936 || TREE_CODE (comp_const) != INTEGER_CST
5937 || TREE_OVERFLOW (comp_const)
5938 || TREE_CODE (minmax_const) != INTEGER_CST
5939 || TREE_OVERFLOW (minmax_const))
5942 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5943 and GT_EXPR, doing the rest with recursive calls using logical
5947 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5949 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5952 return invert_truthvalue (tem);
5958 fold_build2 (TRUTH_ORIF_EXPR, type,
5959 optimize_minmax_comparison
5960 (EQ_EXPR, type, arg0, comp_const),
5961 optimize_minmax_comparison
5962 (GT_EXPR, type, arg0, comp_const));
5965 if (op_code == MAX_EXPR && consts_equal)
5966 /* MAX (X, 0) == 0 -> X <= 0 */
5967 return fold_build2 (LE_EXPR, type, inner, comp_const);
5969 else if (op_code == MAX_EXPR && consts_lt)
5970 /* MAX (X, 0) == 5 -> X == 5 */
5971 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5973 else if (op_code == MAX_EXPR)
5974 /* MAX (X, 0) == -1 -> false */
5975 return omit_one_operand (type, integer_zero_node, inner);
5977 else if (consts_equal)
5978 /* MIN (X, 0) == 0 -> X >= 0 */
5979 return fold_build2 (GE_EXPR, type, inner, comp_const);
5982 /* MIN (X, 0) == 5 -> false */
5983 return omit_one_operand (type, integer_zero_node, inner);
5986 /* MIN (X, 0) == -1 -> X == -1 */
5987 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5990 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5991 /* MAX (X, 0) > 0 -> X > 0
5992 MAX (X, 0) > 5 -> X > 5 */
5993 return fold_build2 (GT_EXPR, type, inner, comp_const);
5995 else if (op_code == MAX_EXPR)
5996 /* MAX (X, 0) > -1 -> true */
5997 return omit_one_operand (type, integer_one_node, inner);
5999 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6000 /* MIN (X, 0) > 0 -> false
6001 MIN (X, 0) > 5 -> false */
6002 return omit_one_operand (type, integer_zero_node, inner);
6005 /* MIN (X, 0) > -1 -> X > -1 */
6006 return fold_build2 (GT_EXPR, type, inner, comp_const);
6013 /* T is an integer expression that is being multiplied, divided, or taken a
6014 modulus (CODE says which and what kind of divide or modulus) by a
6015 constant C. See if we can eliminate that operation by folding it with
6016 other operations already in T. WIDE_TYPE, if non-null, is a type that
6017 should be used for the computation if wider than our type.
6019 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6020 (X * 2) + (Y * 4). We must, however, be assured that either the original
6021 expression would not overflow or that overflow is undefined for the type
6022 in the language in question.
6024 If we return a non-null expression, it is an equivalent form of the
6025 original computation, but need not be in the original type.
6027 We set *STRICT_OVERFLOW_P to true if the return values depends on
6028 signed overflow being undefined. Otherwise we do not change
6029 *STRICT_OVERFLOW_P. */
6032 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6033 bool *strict_overflow_p)
6035 /* To avoid exponential search depth, refuse to allow recursion past
6036 three levels. Beyond that (1) it's highly unlikely that we'll find
6037 something interesting and (2) we've probably processed it before
6038 when we built the inner expression. */
6047 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6054 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6055 bool *strict_overflow_p)
6057 tree type = TREE_TYPE (t);
6058 enum tree_code tcode = TREE_CODE (t);
6059 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6060 > GET_MODE_SIZE (TYPE_MODE (type)))
6061 ? wide_type : type);
6063 int same_p = tcode == code;
6064 tree op0 = NULL_TREE, op1 = NULL_TREE;
6065 bool sub_strict_overflow_p;
6067 /* Don't deal with constants of zero here; they confuse the code below. */
6068 if (integer_zerop (c))
6071 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6072 op0 = TREE_OPERAND (t, 0);
6074 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6075 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6077 /* Note that we need not handle conditional operations here since fold
6078 already handles those cases. So just do arithmetic here. */
6082 /* For a constant, we can always simplify if we are a multiply
6083 or (for divide and modulus) if it is a multiple of our constant. */
6084 if (code == MULT_EXPR
6085 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6086 return const_binop (code, fold_convert (ctype, t),
6087 fold_convert (ctype, c), 0);
6090 CASE_CONVERT: case NON_LVALUE_EXPR:
6091 /* If op0 is an expression ... */
6092 if ((COMPARISON_CLASS_P (op0)
6093 || UNARY_CLASS_P (op0)
6094 || BINARY_CLASS_P (op0)
6095 || VL_EXP_CLASS_P (op0)
6096 || EXPRESSION_CLASS_P (op0))
6097 /* ... and has wrapping overflow, and its type is smaller
6098 than ctype, then we cannot pass through as widening. */
6099 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6100 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6101 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6102 && (TYPE_PRECISION (ctype)
6103 > TYPE_PRECISION (TREE_TYPE (op0))))
6104 /* ... or this is a truncation (t is narrower than op0),
6105 then we cannot pass through this narrowing. */
6106 || (TYPE_PRECISION (type)
6107 < TYPE_PRECISION (TREE_TYPE (op0)))
6108 /* ... or signedness changes for division or modulus,
6109 then we cannot pass through this conversion. */
6110 || (code != MULT_EXPR
6111 && (TYPE_UNSIGNED (ctype)
6112 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6113 /* ... or has undefined overflow while the converted to
6114 type has not, we cannot do the operation in the inner type
6115 as that would introduce undefined overflow. */
6116 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6117 && !TYPE_OVERFLOW_UNDEFINED (type))))
6120 /* Pass the constant down and see if we can make a simplification. If
6121 we can, replace this expression with the inner simplification for
6122 possible later conversion to our or some other type. */
6123 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6124 && TREE_CODE (t2) == INTEGER_CST
6125 && !TREE_OVERFLOW (t2)
6126 && (0 != (t1 = extract_muldiv (op0, t2, code,
6128 ? ctype : NULL_TREE,
6129 strict_overflow_p))))
6134 /* If widening the type changes it from signed to unsigned, then we
6135 must avoid building ABS_EXPR itself as unsigned. */
6136 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6138 tree cstype = (*signed_type_for) (ctype);
6139 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6142 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6143 return fold_convert (ctype, t1);
6147 /* If the constant is negative, we cannot simplify this. */
6148 if (tree_int_cst_sgn (c) == -1)
6152 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6154 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6157 case MIN_EXPR: case MAX_EXPR:
6158 /* If widening the type changes the signedness, then we can't perform
6159 this optimization as that changes the result. */
6160 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6163 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6164 sub_strict_overflow_p = false;
6165 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6166 &sub_strict_overflow_p)) != 0
6167 && (t2 = extract_muldiv (op1, c, code, wide_type,
6168 &sub_strict_overflow_p)) != 0)
6170 if (tree_int_cst_sgn (c) < 0)
6171 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6172 if (sub_strict_overflow_p)
6173 *strict_overflow_p = true;
6174 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6175 fold_convert (ctype, t2));
6179 case LSHIFT_EXPR: case RSHIFT_EXPR:
6180 /* If the second operand is constant, this is a multiplication
6181 or floor division, by a power of two, so we can treat it that
6182 way unless the multiplier or divisor overflows. Signed
6183 left-shift overflow is implementation-defined rather than
6184 undefined in C90, so do not convert signed left shift into
6186 if (TREE_CODE (op1) == INTEGER_CST
6187 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6188 /* const_binop may not detect overflow correctly,
6189 so check for it explicitly here. */
6190 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6191 && TREE_INT_CST_HIGH (op1) == 0
6192 && 0 != (t1 = fold_convert (ctype,
6193 const_binop (LSHIFT_EXPR,
6196 && !TREE_OVERFLOW (t1))
6197 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6198 ? MULT_EXPR : FLOOR_DIV_EXPR,
6199 ctype, fold_convert (ctype, op0), t1),
6200 c, code, wide_type, strict_overflow_p);
6203 case PLUS_EXPR: case MINUS_EXPR:
6204 /* See if we can eliminate the operation on both sides. If we can, we
6205 can return a new PLUS or MINUS. If we can't, the only remaining
6206 cases where we can do anything are if the second operand is a
6208 sub_strict_overflow_p = false;
6209 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6210 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6211 if (t1 != 0 && t2 != 0
6212 && (code == MULT_EXPR
6213 /* If not multiplication, we can only do this if both operands
6214 are divisible by c. */
6215 || (multiple_of_p (ctype, op0, c)
6216 && multiple_of_p (ctype, op1, c))))
6218 if (sub_strict_overflow_p)
6219 *strict_overflow_p = true;
6220 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6221 fold_convert (ctype, t2));
6224 /* If this was a subtraction, negate OP1 and set it to be an addition.
6225 This simplifies the logic below. */
6226 if (tcode == MINUS_EXPR)
6227 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6229 if (TREE_CODE (op1) != INTEGER_CST)
6232 /* If either OP1 or C are negative, this optimization is not safe for
6233 some of the division and remainder types while for others we need
6234 to change the code. */
6235 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6237 if (code == CEIL_DIV_EXPR)
6238 code = FLOOR_DIV_EXPR;
6239 else if (code == FLOOR_DIV_EXPR)
6240 code = CEIL_DIV_EXPR;
6241 else if (code != MULT_EXPR
6242 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6246 /* If it's a multiply or a division/modulus operation of a multiple
6247 of our constant, do the operation and verify it doesn't overflow. */
6248 if (code == MULT_EXPR
6249 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6251 op1 = const_binop (code, fold_convert (ctype, op1),
6252 fold_convert (ctype, c), 0);
6253 /* We allow the constant to overflow with wrapping semantics. */
6255 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6261 /* If we have an unsigned type is not a sizetype, we cannot widen
6262 the operation since it will change the result if the original
6263 computation overflowed. */
6264 if (TYPE_UNSIGNED (ctype)
6265 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6269 /* If we were able to eliminate our operation from the first side,
6270 apply our operation to the second side and reform the PLUS. */
6271 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6272 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6274 /* The last case is if we are a multiply. In that case, we can
6275 apply the distributive law to commute the multiply and addition
6276 if the multiplication of the constants doesn't overflow. */
6277 if (code == MULT_EXPR)
6278 return fold_build2 (tcode, ctype,
6279 fold_build2 (code, ctype,
6280 fold_convert (ctype, op0),
6281 fold_convert (ctype, c)),
6287 /* We have a special case here if we are doing something like
6288 (C * 8) % 4 since we know that's zero. */
6289 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6290 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6291 /* If the multiplication can overflow we cannot optimize this.
6292 ??? Until we can properly mark individual operations as
6293 not overflowing we need to treat sizetype special here as
6294 stor-layout relies on this opimization to make
6295 DECL_FIELD_BIT_OFFSET always a constant. */
6296 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6297 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6298 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6299 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6300 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6302 *strict_overflow_p = true;
6303 return omit_one_operand (type, integer_zero_node, op0);
6306 /* ... fall through ... */
6308 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6309 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6310 /* If we can extract our operation from the LHS, do so and return a
6311 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6312 do something only if the second operand is a constant. */
6314 && (t1 = extract_muldiv (op0, c, code, wide_type,
6315 strict_overflow_p)) != 0)
6316 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6317 fold_convert (ctype, op1));
6318 else if (tcode == MULT_EXPR && code == MULT_EXPR
6319 && (t1 = extract_muldiv (op1, c, code, wide_type,
6320 strict_overflow_p)) != 0)
6321 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6322 fold_convert (ctype, t1));
6323 else if (TREE_CODE (op1) != INTEGER_CST)
6326 /* If these are the same operation types, we can associate them
6327 assuming no overflow. */
6329 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6330 fold_convert (ctype, c), 1))
6331 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6332 TREE_INT_CST_HIGH (t1),
6333 (TYPE_UNSIGNED (ctype)
6334 && tcode != MULT_EXPR) ? -1 : 1,
6335 TREE_OVERFLOW (t1)))
6336 && !TREE_OVERFLOW (t1))
6337 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6339 /* If these operations "cancel" each other, we have the main
6340 optimizations of this pass, which occur when either constant is a
6341 multiple of the other, in which case we replace this with either an
6342 operation or CODE or TCODE.
6344 If we have an unsigned type that is not a sizetype, we cannot do
6345 this since it will change the result if the original computation
6347 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6348 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6349 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6350 || (tcode == MULT_EXPR
6351 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6352 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6353 && code != MULT_EXPR)))
6355 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6357 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6358 *strict_overflow_p = true;
6359 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6360 fold_convert (ctype,
6361 const_binop (TRUNC_DIV_EXPR,
6364 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6366 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6367 *strict_overflow_p = true;
6368 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6369 fold_convert (ctype,
6370 const_binop (TRUNC_DIV_EXPR,
6383 /* Return a node which has the indicated constant VALUE (either 0 or
6384 1), and is of the indicated TYPE. */
6387 constant_boolean_node (int value, tree type)
6389 if (type == integer_type_node)
6390 return value ? integer_one_node : integer_zero_node;
6391 else if (type == boolean_type_node)
6392 return value ? boolean_true_node : boolean_false_node;
6394 return build_int_cst (type, value);
6398 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6399 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6400 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6401 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6402 COND is the first argument to CODE; otherwise (as in the example
6403 given here), it is the second argument. TYPE is the type of the
6404 original expression. Return NULL_TREE if no simplification is
6408 fold_binary_op_with_conditional_arg (enum tree_code code,
6409 tree type, tree op0, tree op1,
6410 tree cond, tree arg, int cond_first_p)
6412 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6413 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6414 tree test, true_value, false_value;
6415 tree lhs = NULL_TREE;
6416 tree rhs = NULL_TREE;
6418 /* This transformation is only worthwhile if we don't have to wrap
6419 arg in a SAVE_EXPR, and the operation can be simplified on at least
6420 one of the branches once its pushed inside the COND_EXPR. */
6421 if (!TREE_CONSTANT (arg))
6424 if (TREE_CODE (cond) == COND_EXPR)
6426 test = TREE_OPERAND (cond, 0);
6427 true_value = TREE_OPERAND (cond, 1);
6428 false_value = TREE_OPERAND (cond, 2);
6429 /* If this operand throws an expression, then it does not make
6430 sense to try to perform a logical or arithmetic operation
6432 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6434 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6439 tree testtype = TREE_TYPE (cond);
6441 true_value = constant_boolean_node (true, testtype);
6442 false_value = constant_boolean_node (false, testtype);
6445 arg = fold_convert (arg_type, arg);
6448 true_value = fold_convert (cond_type, true_value);
6450 lhs = fold_build2 (code, type, true_value, arg);
6452 lhs = fold_build2 (code, type, arg, true_value);
6456 false_value = fold_convert (cond_type, false_value);
6458 rhs = fold_build2 (code, type, false_value, arg);
6460 rhs = fold_build2 (code, type, arg, false_value);
6463 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6464 return fold_convert (type, test);
6468 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6470 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6471 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6472 ADDEND is the same as X.
6474 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6475 and finite. The problematic cases are when X is zero, and its mode
6476 has signed zeros. In the case of rounding towards -infinity,
6477 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6478 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6481 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6483 if (!real_zerop (addend))
6486 /* Don't allow the fold with -fsignaling-nans. */
6487 if (HONOR_SNANS (TYPE_MODE (type)))
6490 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6491 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6494 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6495 if (TREE_CODE (addend) == REAL_CST
6496 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6499 /* The mode has signed zeros, and we have to honor their sign.
6500 In this situation, there is only one case we can return true for.
6501 X - 0 is the same as X unless rounding towards -infinity is
6503 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6506 /* Subroutine of fold() that checks comparisons of built-in math
6507 functions against real constants.
6509 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6510 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6511 is the type of the result and ARG0 and ARG1 are the operands of the
6512 comparison. ARG1 must be a TREE_REAL_CST.
6514 The function returns the constant folded tree if a simplification
6515 can be made, and NULL_TREE otherwise. */
6518 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6519 tree type, tree arg0, tree arg1)
6523 if (BUILTIN_SQRT_P (fcode))
6525 tree arg = CALL_EXPR_ARG (arg0, 0);
6526 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6528 c = TREE_REAL_CST (arg1);
6529 if (REAL_VALUE_NEGATIVE (c))
6531 /* sqrt(x) < y is always false, if y is negative. */
6532 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6533 return omit_one_operand (type, integer_zero_node, arg);
6535 /* sqrt(x) > y is always true, if y is negative and we
6536 don't care about NaNs, i.e. negative values of x. */
6537 if (code == NE_EXPR || !HONOR_NANS (mode))
6538 return omit_one_operand (type, integer_one_node, arg);
6540 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6541 return fold_build2 (GE_EXPR, type, arg,
6542 build_real (TREE_TYPE (arg), dconst0));
6544 else if (code == GT_EXPR || code == GE_EXPR)
6548 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6549 real_convert (&c2, mode, &c2);
6551 if (REAL_VALUE_ISINF (c2))
6553 /* sqrt(x) > y is x == +Inf, when y is very large. */
6554 if (HONOR_INFINITIES (mode))
6555 return fold_build2 (EQ_EXPR, type, arg,
6556 build_real (TREE_TYPE (arg), c2));
6558 /* sqrt(x) > y is always false, when y is very large
6559 and we don't care about infinities. */
6560 return omit_one_operand (type, integer_zero_node, arg);
6563 /* sqrt(x) > c is the same as x > c*c. */
6564 return fold_build2 (code, type, arg,
6565 build_real (TREE_TYPE (arg), c2));
6567 else if (code == LT_EXPR || code == LE_EXPR)
6571 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6572 real_convert (&c2, mode, &c2);
6574 if (REAL_VALUE_ISINF (c2))
6576 /* sqrt(x) < y is always true, when y is a very large
6577 value and we don't care about NaNs or Infinities. */
6578 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6579 return omit_one_operand (type, integer_one_node, arg);
6581 /* sqrt(x) < y is x != +Inf when y is very large and we
6582 don't care about NaNs. */
6583 if (! HONOR_NANS (mode))
6584 return fold_build2 (NE_EXPR, type, arg,
6585 build_real (TREE_TYPE (arg), c2));
6587 /* sqrt(x) < y is x >= 0 when y is very large and we
6588 don't care about Infinities. */
6589 if (! HONOR_INFINITIES (mode))
6590 return fold_build2 (GE_EXPR, type, arg,
6591 build_real (TREE_TYPE (arg), dconst0));
6593 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6594 if (lang_hooks.decls.global_bindings_p () != 0
6595 || CONTAINS_PLACEHOLDER_P (arg))
6598 arg = save_expr (arg);
6599 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6600 fold_build2 (GE_EXPR, type, arg,
6601 build_real (TREE_TYPE (arg),
6603 fold_build2 (NE_EXPR, type, arg,
6604 build_real (TREE_TYPE (arg),
6608 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6609 if (! HONOR_NANS (mode))
6610 return fold_build2 (code, type, arg,
6611 build_real (TREE_TYPE (arg), c2));
6613 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6614 if (lang_hooks.decls.global_bindings_p () == 0
6615 && ! CONTAINS_PLACEHOLDER_P (arg))
6617 arg = save_expr (arg);
6618 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6619 fold_build2 (GE_EXPR, type, arg,
6620 build_real (TREE_TYPE (arg),
6622 fold_build2 (code, type, arg,
6623 build_real (TREE_TYPE (arg),
6632 /* Subroutine of fold() that optimizes comparisons against Infinities,
6633 either +Inf or -Inf.
6635 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6636 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6637 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6639 The function returns the constant folded tree if a simplification
6640 can be made, and NULL_TREE otherwise. */
6643 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6645 enum machine_mode mode;
6646 REAL_VALUE_TYPE max;
6650 mode = TYPE_MODE (TREE_TYPE (arg0));
6652 /* For negative infinity swap the sense of the comparison. */
6653 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6655 code = swap_tree_comparison (code);
6660 /* x > +Inf is always false, if with ignore sNANs. */
6661 if (HONOR_SNANS (mode))
6663 return omit_one_operand (type, integer_zero_node, arg0);
6666 /* x <= +Inf is always true, if we don't case about NaNs. */
6667 if (! HONOR_NANS (mode))
6668 return omit_one_operand (type, integer_one_node, arg0);
6670 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6671 if (lang_hooks.decls.global_bindings_p () == 0
6672 && ! CONTAINS_PLACEHOLDER_P (arg0))
6674 arg0 = save_expr (arg0);
6675 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6681 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6682 real_maxval (&max, neg, mode);
6683 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6684 arg0, build_real (TREE_TYPE (arg0), max));
6687 /* x < +Inf is always equal to x <= DBL_MAX. */
6688 real_maxval (&max, neg, mode);
6689 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6690 arg0, build_real (TREE_TYPE (arg0), max));
6693 /* x != +Inf is always equal to !(x > DBL_MAX). */
6694 real_maxval (&max, neg, mode);
6695 if (! HONOR_NANS (mode))
6696 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6697 arg0, build_real (TREE_TYPE (arg0), max));
6699 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6700 arg0, build_real (TREE_TYPE (arg0), max));
6701 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6710 /* Subroutine of fold() that optimizes comparisons of a division by
6711 a nonzero integer constant against an integer constant, i.e.
6714 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6715 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6716 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6718 The function returns the constant folded tree if a simplification
6719 can be made, and NULL_TREE otherwise. */
6722 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6724 tree prod, tmp, hi, lo;
6725 tree arg00 = TREE_OPERAND (arg0, 0);
6726 tree arg01 = TREE_OPERAND (arg0, 1);
6727 unsigned HOST_WIDE_INT lpart;
6728 HOST_WIDE_INT hpart;
6729 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6733 /* We have to do this the hard way to detect unsigned overflow.
6734 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6735 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6736 TREE_INT_CST_HIGH (arg01),
6737 TREE_INT_CST_LOW (arg1),
6738 TREE_INT_CST_HIGH (arg1),
6739 &lpart, &hpart, unsigned_p);
6740 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6742 neg_overflow = false;
6746 tmp = int_const_binop (MINUS_EXPR, arg01,
6747 build_int_cst (TREE_TYPE (arg01), 1), 0);
6750 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6751 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6752 TREE_INT_CST_HIGH (prod),
6753 TREE_INT_CST_LOW (tmp),
6754 TREE_INT_CST_HIGH (tmp),
6755 &lpart, &hpart, unsigned_p);
6756 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6757 -1, overflow | TREE_OVERFLOW (prod));
6759 else if (tree_int_cst_sgn (arg01) >= 0)
6761 tmp = int_const_binop (MINUS_EXPR, arg01,
6762 build_int_cst (TREE_TYPE (arg01), 1), 0);
6763 switch (tree_int_cst_sgn (arg1))
6766 neg_overflow = true;
6767 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6772 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6777 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6787 /* A negative divisor reverses the relational operators. */
6788 code = swap_tree_comparison (code);
6790 tmp = int_const_binop (PLUS_EXPR, arg01,
6791 build_int_cst (TREE_TYPE (arg01), 1), 0);
6792 switch (tree_int_cst_sgn (arg1))
6795 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6800 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6805 neg_overflow = true;
6806 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6818 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6819 return omit_one_operand (type, integer_zero_node, arg00);
6820 if (TREE_OVERFLOW (hi))
6821 return fold_build2 (GE_EXPR, type, arg00, lo);
6822 if (TREE_OVERFLOW (lo))
6823 return fold_build2 (LE_EXPR, type, arg00, hi);
6824 return build_range_check (type, arg00, 1, lo, hi);
6827 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6828 return omit_one_operand (type, integer_one_node, arg00);
6829 if (TREE_OVERFLOW (hi))
6830 return fold_build2 (LT_EXPR, type, arg00, lo);
6831 if (TREE_OVERFLOW (lo))
6832 return fold_build2 (GT_EXPR, type, arg00, hi);
6833 return build_range_check (type, arg00, 0, lo, hi);
6836 if (TREE_OVERFLOW (lo))
6838 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6839 return omit_one_operand (type, tmp, arg00);
6841 return fold_build2 (LT_EXPR, type, arg00, lo);
6844 if (TREE_OVERFLOW (hi))
6846 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6847 return omit_one_operand (type, tmp, arg00);
6849 return fold_build2 (LE_EXPR, type, arg00, hi);
6852 if (TREE_OVERFLOW (hi))
6854 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6855 return omit_one_operand (type, tmp, arg00);
6857 return fold_build2 (GT_EXPR, type, arg00, hi);
6860 if (TREE_OVERFLOW (lo))
6862 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6863 return omit_one_operand (type, tmp, arg00);
6865 return fold_build2 (GE_EXPR, type, arg00, lo);
6875 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6876 equality/inequality test, then return a simplified form of the test
6877 using a sign testing. Otherwise return NULL. TYPE is the desired
6881 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6884 /* If this is testing a single bit, we can optimize the test. */
6885 if ((code == NE_EXPR || code == EQ_EXPR)
6886 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6887 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6889 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6890 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6891 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6893 if (arg00 != NULL_TREE
6894 /* This is only a win if casting to a signed type is cheap,
6895 i.e. when arg00's type is not a partial mode. */
6896 && TYPE_PRECISION (TREE_TYPE (arg00))
6897 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6899 tree stype = signed_type_for (TREE_TYPE (arg00));
6900 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6901 result_type, fold_convert (stype, arg00),
6902 build_int_cst (stype, 0));
6909 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6910 equality/inequality test, then return a simplified form of
6911 the test using shifts and logical operations. Otherwise return
6912 NULL. TYPE is the desired result type. */
6915 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6918 /* If this is testing a single bit, we can optimize the test. */
6919 if ((code == NE_EXPR || code == EQ_EXPR)
6920 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6921 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6923 tree inner = TREE_OPERAND (arg0, 0);
6924 tree type = TREE_TYPE (arg0);
6925 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6926 enum machine_mode operand_mode = TYPE_MODE (type);
6928 tree signed_type, unsigned_type, intermediate_type;
6931 /* First, see if we can fold the single bit test into a sign-bit
6933 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6938 /* Otherwise we have (A & C) != 0 where C is a single bit,
6939 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6940 Similarly for (A & C) == 0. */
6942 /* If INNER is a right shift of a constant and it plus BITNUM does
6943 not overflow, adjust BITNUM and INNER. */
6944 if (TREE_CODE (inner) == RSHIFT_EXPR
6945 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6946 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6947 && bitnum < TYPE_PRECISION (type)
6948 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6949 bitnum - TYPE_PRECISION (type)))
6951 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6952 inner = TREE_OPERAND (inner, 0);
6955 /* If we are going to be able to omit the AND below, we must do our
6956 operations as unsigned. If we must use the AND, we have a choice.
6957 Normally unsigned is faster, but for some machines signed is. */
6958 #ifdef LOAD_EXTEND_OP
6959 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6960 && !flag_syntax_only) ? 0 : 1;
6965 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6966 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6967 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6968 inner = fold_convert (intermediate_type, inner);
6971 inner = build2 (RSHIFT_EXPR, intermediate_type,
6972 inner, size_int (bitnum));
6974 one = build_int_cst (intermediate_type, 1);
6976 if (code == EQ_EXPR)
6977 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6979 /* Put the AND last so it can combine with more things. */
6980 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6982 /* Make sure to return the proper type. */
6983 inner = fold_convert (result_type, inner);
6990 /* Check whether we are allowed to reorder operands arg0 and arg1,
6991 such that the evaluation of arg1 occurs before arg0. */
6994 reorder_operands_p (const_tree arg0, const_tree arg1)
6996 if (! flag_evaluation_order)
6998 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7000 return ! TREE_SIDE_EFFECTS (arg0)
7001 && ! TREE_SIDE_EFFECTS (arg1);
7004 /* Test whether it is preferable two swap two operands, ARG0 and
7005 ARG1, for example because ARG0 is an integer constant and ARG1
7006 isn't. If REORDER is true, only recommend swapping if we can
7007 evaluate the operands in reverse order. */
7010 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7012 STRIP_SIGN_NOPS (arg0);
7013 STRIP_SIGN_NOPS (arg1);
7015 if (TREE_CODE (arg1) == INTEGER_CST)
7017 if (TREE_CODE (arg0) == INTEGER_CST)
7020 if (TREE_CODE (arg1) == REAL_CST)
7022 if (TREE_CODE (arg0) == REAL_CST)
7025 if (TREE_CODE (arg1) == FIXED_CST)
7027 if (TREE_CODE (arg0) == FIXED_CST)
7030 if (TREE_CODE (arg1) == COMPLEX_CST)
7032 if (TREE_CODE (arg0) == COMPLEX_CST)
7035 if (TREE_CONSTANT (arg1))
7037 if (TREE_CONSTANT (arg0))
7040 if (optimize_function_for_size_p (cfun))
7043 if (reorder && flag_evaluation_order
7044 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7047 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7048 for commutative and comparison operators. Ensuring a canonical
7049 form allows the optimizers to find additional redundancies without
7050 having to explicitly check for both orderings. */
7051 if (TREE_CODE (arg0) == SSA_NAME
7052 && TREE_CODE (arg1) == SSA_NAME
7053 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7056 /* Put SSA_NAMEs last. */
7057 if (TREE_CODE (arg1) == SSA_NAME)
7059 if (TREE_CODE (arg0) == SSA_NAME)
7062 /* Put variables last. */
7071 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7072 ARG0 is extended to a wider type. */
7075 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7077 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7079 tree shorter_type, outer_type;
7083 if (arg0_unw == arg0)
7085 shorter_type = TREE_TYPE (arg0_unw);
7087 #ifdef HAVE_canonicalize_funcptr_for_compare
7088 /* Disable this optimization if we're casting a function pointer
7089 type on targets that require function pointer canonicalization. */
7090 if (HAVE_canonicalize_funcptr_for_compare
7091 && TREE_CODE (shorter_type) == POINTER_TYPE
7092 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7096 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7099 arg1_unw = get_unwidened (arg1, NULL_TREE);
7101 /* If possible, express the comparison in the shorter mode. */
7102 if ((code == EQ_EXPR || code == NE_EXPR
7103 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7104 && (TREE_TYPE (arg1_unw) == shorter_type
7105 || ((TYPE_PRECISION (shorter_type)
7106 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7107 && (TYPE_UNSIGNED (shorter_type)
7108 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7109 || (TREE_CODE (arg1_unw) == INTEGER_CST
7110 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7111 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7112 && int_fits_type_p (arg1_unw, shorter_type))))
7113 return fold_build2 (code, type, arg0_unw,
7114 fold_convert (shorter_type, arg1_unw));
7116 if (TREE_CODE (arg1_unw) != INTEGER_CST
7117 || TREE_CODE (shorter_type) != INTEGER_TYPE
7118 || !int_fits_type_p (arg1_unw, shorter_type))
7121 /* If we are comparing with the integer that does not fit into the range
7122 of the shorter type, the result is known. */
7123 outer_type = TREE_TYPE (arg1_unw);
7124 min = lower_bound_in_type (outer_type, shorter_type);
7125 max = upper_bound_in_type (outer_type, shorter_type);
7127 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7129 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7136 return omit_one_operand (type, integer_zero_node, arg0);
7141 return omit_one_operand (type, integer_one_node, arg0);
7147 return omit_one_operand (type, integer_one_node, arg0);
7149 return omit_one_operand (type, integer_zero_node, arg0);
7154 return omit_one_operand (type, integer_zero_node, arg0);
7156 return omit_one_operand (type, integer_one_node, arg0);
7165 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7166 ARG0 just the signedness is changed. */
7169 fold_sign_changed_comparison (enum tree_code code, tree type,
7170 tree arg0, tree arg1)
7173 tree inner_type, outer_type;
7175 if (!CONVERT_EXPR_P (arg0))
7178 outer_type = TREE_TYPE (arg0);
7179 arg0_inner = TREE_OPERAND (arg0, 0);
7180 inner_type = TREE_TYPE (arg0_inner);
7182 #ifdef HAVE_canonicalize_funcptr_for_compare
7183 /* Disable this optimization if we're casting a function pointer
7184 type on targets that require function pointer canonicalization. */
7185 if (HAVE_canonicalize_funcptr_for_compare
7186 && TREE_CODE (inner_type) == POINTER_TYPE
7187 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7191 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7194 /* If the conversion is from an integral subtype to its basetype
7196 if (TREE_TYPE (inner_type) == outer_type)
7199 if (TREE_CODE (arg1) != INTEGER_CST
7200 && !(CONVERT_EXPR_P (arg1)
7201 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7204 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7205 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7210 if (TREE_CODE (arg1) == INTEGER_CST)
7211 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7212 TREE_INT_CST_HIGH (arg1), 0,
7213 TREE_OVERFLOW (arg1));
7215 arg1 = fold_convert (inner_type, arg1);
7217 return fold_build2 (code, type, arg0_inner, arg1);
7220 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7221 step of the array. Reconstructs s and delta in the case of s * delta
7222 being an integer constant (and thus already folded).
7223 ADDR is the address. MULT is the multiplicative expression.
7224 If the function succeeds, the new address expression is returned. Otherwise
7225 NULL_TREE is returned. */
7228 try_move_mult_to_index (tree addr, tree op1)
7230 tree s, delta, step;
7231 tree ref = TREE_OPERAND (addr, 0), pref;
7236 /* Strip the nops that might be added when converting op1 to sizetype. */
7239 /* Canonicalize op1 into a possibly non-constant delta
7240 and an INTEGER_CST s. */
7241 if (TREE_CODE (op1) == MULT_EXPR)
7243 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7248 if (TREE_CODE (arg0) == INTEGER_CST)
7253 else if (TREE_CODE (arg1) == INTEGER_CST)
7261 else if (TREE_CODE (op1) == INTEGER_CST)
7268 /* Simulate we are delta * 1. */
7270 s = integer_one_node;
7273 for (;; ref = TREE_OPERAND (ref, 0))
7275 if (TREE_CODE (ref) == ARRAY_REF)
7277 /* Remember if this was a multi-dimensional array. */
7278 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7281 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7285 step = array_ref_element_size (ref);
7286 if (TREE_CODE (step) != INTEGER_CST)
7291 if (! tree_int_cst_equal (step, s))
7296 /* Try if delta is a multiple of step. */
7297 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7303 /* Only fold here if we can verify we do not overflow one
7304 dimension of a multi-dimensional array. */
7309 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7310 || !INTEGRAL_TYPE_P (itype)
7311 || !TYPE_MAX_VALUE (itype)
7312 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7315 tmp = fold_binary (PLUS_EXPR, itype,
7316 fold_convert (itype,
7317 TREE_OPERAND (ref, 1)),
7318 fold_convert (itype, delta));
7320 || TREE_CODE (tmp) != INTEGER_CST
7321 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7330 if (!handled_component_p (ref))
7334 /* We found the suitable array reference. So copy everything up to it,
7335 and replace the index. */
7337 pref = TREE_OPERAND (addr, 0);
7338 ret = copy_node (pref);
7343 pref = TREE_OPERAND (pref, 0);
7344 TREE_OPERAND (pos, 0) = copy_node (pref);
7345 pos = TREE_OPERAND (pos, 0);
7348 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7349 fold_convert (itype,
7350 TREE_OPERAND (pos, 1)),
7351 fold_convert (itype, delta));
7353 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7357 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7358 means A >= Y && A != MAX, but in this case we know that
7359 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7362 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7364 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7366 if (TREE_CODE (bound) == LT_EXPR)
7367 a = TREE_OPERAND (bound, 0);
7368 else if (TREE_CODE (bound) == GT_EXPR)
7369 a = TREE_OPERAND (bound, 1);
7373 typea = TREE_TYPE (a);
7374 if (!INTEGRAL_TYPE_P (typea)
7375 && !POINTER_TYPE_P (typea))
7378 if (TREE_CODE (ineq) == LT_EXPR)
7380 a1 = TREE_OPERAND (ineq, 1);
7381 y = TREE_OPERAND (ineq, 0);
7383 else if (TREE_CODE (ineq) == GT_EXPR)
7385 a1 = TREE_OPERAND (ineq, 0);
7386 y = TREE_OPERAND (ineq, 1);
7391 if (TREE_TYPE (a1) != typea)
7394 if (POINTER_TYPE_P (typea))
7396 /* Convert the pointer types into integer before taking the difference. */
7397 tree ta = fold_convert (ssizetype, a);
7398 tree ta1 = fold_convert (ssizetype, a1);
7399 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7402 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7404 if (!diff || !integer_onep (diff))
7407 return fold_build2 (GE_EXPR, type, a, y);
7410 /* Fold a sum or difference of at least one multiplication.
7411 Returns the folded tree or NULL if no simplification could be made. */
7414 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7416 tree arg00, arg01, arg10, arg11;
7417 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7419 /* (A * C) +- (B * C) -> (A+-B) * C.
7420 (A * C) +- A -> A * (C+-1).
7421 We are most concerned about the case where C is a constant,
7422 but other combinations show up during loop reduction. Since
7423 it is not difficult, try all four possibilities. */
7425 if (TREE_CODE (arg0) == MULT_EXPR)
7427 arg00 = TREE_OPERAND (arg0, 0);
7428 arg01 = TREE_OPERAND (arg0, 1);
7430 else if (TREE_CODE (arg0) == INTEGER_CST)
7432 arg00 = build_one_cst (type);
7437 /* We cannot generate constant 1 for fract. */
7438 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7441 arg01 = build_one_cst (type);
7443 if (TREE_CODE (arg1) == MULT_EXPR)
7445 arg10 = TREE_OPERAND (arg1, 0);
7446 arg11 = TREE_OPERAND (arg1, 1);
7448 else if (TREE_CODE (arg1) == INTEGER_CST)
7450 arg10 = build_one_cst (type);
7451 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7452 the purpose of this canonicalization. */
7453 if (TREE_INT_CST_HIGH (arg1) == -1
7454 && negate_expr_p (arg1)
7455 && code == PLUS_EXPR)
7457 arg11 = negate_expr (arg1);
7465 /* We cannot generate constant 1 for fract. */
7466 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7469 arg11 = build_one_cst (type);
7473 if (operand_equal_p (arg01, arg11, 0))
7474 same = arg01, alt0 = arg00, alt1 = arg10;
7475 else if (operand_equal_p (arg00, arg10, 0))
7476 same = arg00, alt0 = arg01, alt1 = arg11;
7477 else if (operand_equal_p (arg00, arg11, 0))
7478 same = arg00, alt0 = arg01, alt1 = arg10;
7479 else if (operand_equal_p (arg01, arg10, 0))
7480 same = arg01, alt0 = arg00, alt1 = arg11;
7482 /* No identical multiplicands; see if we can find a common
7483 power-of-two factor in non-power-of-two multiplies. This
7484 can help in multi-dimensional array access. */
7485 else if (host_integerp (arg01, 0)
7486 && host_integerp (arg11, 0))
7488 HOST_WIDE_INT int01, int11, tmp;
7491 int01 = TREE_INT_CST_LOW (arg01);
7492 int11 = TREE_INT_CST_LOW (arg11);
7494 /* Move min of absolute values to int11. */
7495 if ((int01 >= 0 ? int01 : -int01)
7496 < (int11 >= 0 ? int11 : -int11))
7498 tmp = int01, int01 = int11, int11 = tmp;
7499 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7506 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7508 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7509 build_int_cst (TREE_TYPE (arg00),
7514 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7519 return fold_build2 (MULT_EXPR, type,
7520 fold_build2 (code, type,
7521 fold_convert (type, alt0),
7522 fold_convert (type, alt1)),
7523 fold_convert (type, same));
7528 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7529 specified by EXPR into the buffer PTR of length LEN bytes.
7530 Return the number of bytes placed in the buffer, or zero
7534 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7536 tree type = TREE_TYPE (expr);
7537 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7538 int byte, offset, word, words;
7539 unsigned char value;
7541 if (total_bytes > len)
7543 words = total_bytes / UNITS_PER_WORD;
7545 for (byte = 0; byte < total_bytes; byte++)
7547 int bitpos = byte * BITS_PER_UNIT;
7548 if (bitpos < HOST_BITS_PER_WIDE_INT)
7549 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7551 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7552 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7554 if (total_bytes > UNITS_PER_WORD)
7556 word = byte / UNITS_PER_WORD;
7557 if (WORDS_BIG_ENDIAN)
7558 word = (words - 1) - word;
7559 offset = word * UNITS_PER_WORD;
7560 if (BYTES_BIG_ENDIAN)
7561 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7563 offset += byte % UNITS_PER_WORD;
7566 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7567 ptr[offset] = value;
7573 /* Subroutine of native_encode_expr. Encode the REAL_CST
7574 specified by EXPR into the buffer PTR of length LEN bytes.
7575 Return the number of bytes placed in the buffer, or zero
7579 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7581 tree type = TREE_TYPE (expr);
7582 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7583 int byte, offset, word, words, bitpos;
7584 unsigned char value;
7586 /* There are always 32 bits in each long, no matter the size of
7587 the hosts long. We handle floating point representations with
7591 if (total_bytes > len)
7593 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7595 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7597 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7598 bitpos += BITS_PER_UNIT)
7600 byte = (bitpos / BITS_PER_UNIT) & 3;
7601 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7603 if (UNITS_PER_WORD < 4)
7605 word = byte / UNITS_PER_WORD;
7606 if (WORDS_BIG_ENDIAN)
7607 word = (words - 1) - word;
7608 offset = word * UNITS_PER_WORD;
7609 if (BYTES_BIG_ENDIAN)
7610 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7612 offset += byte % UNITS_PER_WORD;
7615 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7616 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7621 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7622 specified by EXPR into the buffer PTR of length LEN bytes.
7623 Return the number of bytes placed in the buffer, or zero
7627 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7632 part = TREE_REALPART (expr);
7633 rsize = native_encode_expr (part, ptr, len);
7636 part = TREE_IMAGPART (expr);
7637 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7640 return rsize + isize;
7644 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7645 specified by EXPR into the buffer PTR of length LEN bytes.
7646 Return the number of bytes placed in the buffer, or zero
7650 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7652 int i, size, offset, count;
7653 tree itype, elem, elements;
7656 elements = TREE_VECTOR_CST_ELTS (expr);
7657 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7658 itype = TREE_TYPE (TREE_TYPE (expr));
7659 size = GET_MODE_SIZE (TYPE_MODE (itype));
7660 for (i = 0; i < count; i++)
7664 elem = TREE_VALUE (elements);
7665 elements = TREE_CHAIN (elements);
7672 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7677 if (offset + size > len)
7679 memset (ptr+offset, 0, size);
7687 /* Subroutine of native_encode_expr. Encode the STRING_CST
7688 specified by EXPR into the buffer PTR of length LEN bytes.
7689 Return the number of bytes placed in the buffer, or zero
7693 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7695 tree type = TREE_TYPE (expr);
7696 HOST_WIDE_INT total_bytes;
7698 if (TREE_CODE (type) != ARRAY_TYPE
7699 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7700 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7701 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7703 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7704 if (total_bytes > len)
7706 if (TREE_STRING_LENGTH (expr) < total_bytes)
7708 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7709 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7710 total_bytes - TREE_STRING_LENGTH (expr));
7713 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7718 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7719 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7720 buffer PTR of length LEN bytes. Return the number of bytes
7721 placed in the buffer, or zero upon failure. */
7724 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7726 switch (TREE_CODE (expr))
7729 return native_encode_int (expr, ptr, len);
7732 return native_encode_real (expr, ptr, len);
7735 return native_encode_complex (expr, ptr, len);
7738 return native_encode_vector (expr, ptr, len);
7741 return native_encode_string (expr, ptr, len);
7749 /* Subroutine of native_interpret_expr. Interpret the contents of
7750 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7751 If the buffer cannot be interpreted, return NULL_TREE. */
7754 native_interpret_int (tree type, const unsigned char *ptr, int len)
7756 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7757 int byte, offset, word, words;
7758 unsigned char value;
7759 unsigned int HOST_WIDE_INT lo = 0;
7760 HOST_WIDE_INT hi = 0;
7762 if (total_bytes > len)
7764 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7766 words = total_bytes / UNITS_PER_WORD;
7768 for (byte = 0; byte < total_bytes; byte++)
7770 int bitpos = byte * BITS_PER_UNIT;
7771 if (total_bytes > UNITS_PER_WORD)
7773 word = byte / UNITS_PER_WORD;
7774 if (WORDS_BIG_ENDIAN)
7775 word = (words - 1) - word;
7776 offset = word * UNITS_PER_WORD;
7777 if (BYTES_BIG_ENDIAN)
7778 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7780 offset += byte % UNITS_PER_WORD;
7783 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7784 value = ptr[offset];
7786 if (bitpos < HOST_BITS_PER_WIDE_INT)
7787 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7789 hi |= (unsigned HOST_WIDE_INT) value
7790 << (bitpos - HOST_BITS_PER_WIDE_INT);
7793 return build_int_cst_wide_type (type, lo, hi);
7797 /* Subroutine of native_interpret_expr. Interpret the contents of
7798 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7799 If the buffer cannot be interpreted, return NULL_TREE. */
7802 native_interpret_real (tree type, const unsigned char *ptr, int len)
7804 enum machine_mode mode = TYPE_MODE (type);
7805 int total_bytes = GET_MODE_SIZE (mode);
7806 int byte, offset, word, words, bitpos;
7807 unsigned char value;
7808 /* There are always 32 bits in each long, no matter the size of
7809 the hosts long. We handle floating point representations with
7814 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7815 if (total_bytes > len || total_bytes > 24)
7817 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7819 memset (tmp, 0, sizeof (tmp));
7820 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7821 bitpos += BITS_PER_UNIT)
7823 byte = (bitpos / BITS_PER_UNIT) & 3;
7824 if (UNITS_PER_WORD < 4)
7826 word = byte / UNITS_PER_WORD;
7827 if (WORDS_BIG_ENDIAN)
7828 word = (words - 1) - word;
7829 offset = word * UNITS_PER_WORD;
7830 if (BYTES_BIG_ENDIAN)
7831 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7833 offset += byte % UNITS_PER_WORD;
7836 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7837 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7839 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7842 real_from_target (&r, tmp, mode);
7843 return build_real (type, r);
7847 /* Subroutine of native_interpret_expr. Interpret the contents of
7848 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7849 If the buffer cannot be interpreted, return NULL_TREE. */
7852 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7854 tree etype, rpart, ipart;
7857 etype = TREE_TYPE (type);
7858 size = GET_MODE_SIZE (TYPE_MODE (etype));
7861 rpart = native_interpret_expr (etype, ptr, size);
7864 ipart = native_interpret_expr (etype, ptr+size, size);
7867 return build_complex (type, rpart, ipart);
7871 /* Subroutine of native_interpret_expr. Interpret the contents of
7872 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7873 If the buffer cannot be interpreted, return NULL_TREE. */
7876 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7878 tree etype, elem, elements;
7881 etype = TREE_TYPE (type);
7882 size = GET_MODE_SIZE (TYPE_MODE (etype));
7883 count = TYPE_VECTOR_SUBPARTS (type);
7884 if (size * count > len)
7887 elements = NULL_TREE;
7888 for (i = count - 1; i >= 0; i--)
7890 elem = native_interpret_expr (etype, ptr+(i*size), size);
7893 elements = tree_cons (NULL_TREE, elem, elements);
7895 return build_vector (type, elements);
7899 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7900 the buffer PTR of length LEN as a constant of type TYPE. For
7901 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7902 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7903 return NULL_TREE. */
7906 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7908 switch (TREE_CODE (type))
7913 return native_interpret_int (type, ptr, len);
7916 return native_interpret_real (type, ptr, len);
7919 return native_interpret_complex (type, ptr, len);
7922 return native_interpret_vector (type, ptr, len);
7930 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7931 TYPE at compile-time. If we're unable to perform the conversion
7932 return NULL_TREE. */
7935 fold_view_convert_expr (tree type, tree expr)
7937 /* We support up to 512-bit values (for V8DFmode). */
7938 unsigned char buffer[64];
7941 /* Check that the host and target are sane. */
7942 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7945 len = native_encode_expr (expr, buffer, sizeof (buffer));
7949 return native_interpret_expr (type, buffer, len);
7952 /* Build an expression for the address of T. Folds away INDIRECT_REF
7953 to avoid confusing the gimplify process. When IN_FOLD is true
7954 avoid modifications of T. */
7957 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7959 /* The size of the object is not relevant when talking about its address. */
7960 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7961 t = TREE_OPERAND (t, 0);
7963 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7964 if (TREE_CODE (t) == INDIRECT_REF
7965 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7967 t = TREE_OPERAND (t, 0);
7969 if (TREE_TYPE (t) != ptrtype)
7970 t = build1 (NOP_EXPR, ptrtype, t);
7976 while (handled_component_p (base))
7977 base = TREE_OPERAND (base, 0);
7980 TREE_ADDRESSABLE (base) = 1;
7982 t = build1 (ADDR_EXPR, ptrtype, t);
7985 t = build1 (ADDR_EXPR, ptrtype, t);
7990 /* Build an expression for the address of T with type PTRTYPE. This
7991 function modifies the input parameter 'T' by sometimes setting the
7992 TREE_ADDRESSABLE flag. */
7995 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7997 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
8000 /* Build an expression for the address of T. This function modifies
8001 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
8002 flag. When called from fold functions, use fold_addr_expr instead. */
8005 build_fold_addr_expr (tree t)
8007 return build_fold_addr_expr_with_type_1 (t,
8008 build_pointer_type (TREE_TYPE (t)),
8012 /* Same as build_fold_addr_expr, builds an expression for the address
8013 of T, but avoids touching the input node 't'. Fold functions
8014 should use this version. */
8017 fold_addr_expr (tree t)
8019 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8021 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
8024 /* Fold a unary expression of code CODE and type TYPE with operand
8025 OP0. Return the folded expression if folding is successful.
8026 Otherwise, return NULL_TREE. */
8029 fold_unary (enum tree_code code, tree type, tree op0)
8033 enum tree_code_class kind = TREE_CODE_CLASS (code);
8035 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8036 && TREE_CODE_LENGTH (code) == 1);
8041 if (CONVERT_EXPR_CODE_P (code)
8042 || code == FLOAT_EXPR || code == ABS_EXPR)
8044 /* Don't use STRIP_NOPS, because signedness of argument type
8046 STRIP_SIGN_NOPS (arg0);
8050 /* Strip any conversions that don't change the mode. This
8051 is safe for every expression, except for a comparison
8052 expression because its signedness is derived from its
8055 Note that this is done as an internal manipulation within
8056 the constant folder, in order to find the simplest
8057 representation of the arguments so that their form can be
8058 studied. In any cases, the appropriate type conversions
8059 should be put back in the tree that will get out of the
8065 if (TREE_CODE_CLASS (code) == tcc_unary)
8067 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8068 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8069 fold_build1 (code, type,
8070 fold_convert (TREE_TYPE (op0),
8071 TREE_OPERAND (arg0, 1))));
8072 else if (TREE_CODE (arg0) == COND_EXPR)
8074 tree arg01 = TREE_OPERAND (arg0, 1);
8075 tree arg02 = TREE_OPERAND (arg0, 2);
8076 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8077 arg01 = fold_build1 (code, type,
8078 fold_convert (TREE_TYPE (op0), arg01));
8079 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8080 arg02 = fold_build1 (code, type,
8081 fold_convert (TREE_TYPE (op0), arg02));
8082 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8085 /* If this was a conversion, and all we did was to move into
8086 inside the COND_EXPR, bring it back out. But leave it if
8087 it is a conversion from integer to integer and the
8088 result precision is no wider than a word since such a
8089 conversion is cheap and may be optimized away by combine,
8090 while it couldn't if it were outside the COND_EXPR. Then return
8091 so we don't get into an infinite recursion loop taking the
8092 conversion out and then back in. */
8094 if ((CONVERT_EXPR_CODE_P (code)
8095 || code == NON_LVALUE_EXPR)
8096 && TREE_CODE (tem) == COND_EXPR
8097 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8098 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8099 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8100 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8101 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8102 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8103 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8105 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8106 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8107 || flag_syntax_only))
8108 tem = build1 (code, type,
8110 TREE_TYPE (TREE_OPERAND
8111 (TREE_OPERAND (tem, 1), 0)),
8112 TREE_OPERAND (tem, 0),
8113 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8114 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8117 else if (COMPARISON_CLASS_P (arg0))
8119 if (TREE_CODE (type) == BOOLEAN_TYPE)
8121 arg0 = copy_node (arg0);
8122 TREE_TYPE (arg0) = type;
8125 else if (TREE_CODE (type) != INTEGER_TYPE)
8126 return fold_build3 (COND_EXPR, type, arg0,
8127 fold_build1 (code, type,
8129 fold_build1 (code, type,
8130 integer_zero_node));
8137 /* Re-association barriers around constants and other re-association
8138 barriers can be removed. */
8139 if (CONSTANT_CLASS_P (op0)
8140 || TREE_CODE (op0) == PAREN_EXPR)
8141 return fold_convert (type, op0);
8146 case FIX_TRUNC_EXPR:
8147 if (TREE_TYPE (op0) == type)
8150 /* If we have (type) (a CMP b) and type is an integral type, return
8151 new expression involving the new type. */
8152 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8153 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8154 TREE_OPERAND (op0, 1));
8156 /* Handle cases of two conversions in a row. */
8157 if (CONVERT_EXPR_P (op0))
8159 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8160 tree inter_type = TREE_TYPE (op0);
8161 int inside_int = INTEGRAL_TYPE_P (inside_type);
8162 int inside_ptr = POINTER_TYPE_P (inside_type);
8163 int inside_float = FLOAT_TYPE_P (inside_type);
8164 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8165 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8166 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8167 int inter_int = INTEGRAL_TYPE_P (inter_type);
8168 int inter_ptr = POINTER_TYPE_P (inter_type);
8169 int inter_float = FLOAT_TYPE_P (inter_type);
8170 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8171 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8172 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8173 int final_int = INTEGRAL_TYPE_P (type);
8174 int final_ptr = POINTER_TYPE_P (type);
8175 int final_float = FLOAT_TYPE_P (type);
8176 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8177 unsigned int final_prec = TYPE_PRECISION (type);
8178 int final_unsignedp = TYPE_UNSIGNED (type);
8180 /* In addition to the cases of two conversions in a row
8181 handled below, if we are converting something to its own
8182 type via an object of identical or wider precision, neither
8183 conversion is needed. */
8184 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8185 && (((inter_int || inter_ptr) && final_int)
8186 || (inter_float && final_float))
8187 && inter_prec >= final_prec)
8188 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8190 /* Likewise, if the intermediate and initial types are either both
8191 float or both integer, we don't need the middle conversion if the
8192 former is wider than the latter and doesn't change the signedness
8193 (for integers). Avoid this if the final type is a pointer since
8194 then we sometimes need the middle conversion. Likewise if the
8195 final type has a precision not equal to the size of its mode. */
8196 if (((inter_int && inside_int)
8197 || (inter_float && inside_float)
8198 || (inter_vec && inside_vec))
8199 && inter_prec >= inside_prec
8200 && (inter_float || inter_vec
8201 || inter_unsignedp == inside_unsignedp)
8202 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8203 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8205 && (! final_vec || inter_prec == inside_prec))
8206 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8208 /* If we have a sign-extension of a zero-extended value, we can
8209 replace that by a single zero-extension. */
8210 if (inside_int && inter_int && final_int
8211 && inside_prec < inter_prec && inter_prec < final_prec
8212 && inside_unsignedp && !inter_unsignedp)
8213 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8215 /* Two conversions in a row are not needed unless:
8216 - some conversion is floating-point (overstrict for now), or
8217 - some conversion is a vector (overstrict for now), or
8218 - the intermediate type is narrower than both initial and
8220 - the intermediate type and innermost type differ in signedness,
8221 and the outermost type is wider than the intermediate, or
8222 - the initial type is a pointer type and the precisions of the
8223 intermediate and final types differ, or
8224 - the final type is a pointer type and the precisions of the
8225 initial and intermediate types differ. */
8226 if (! inside_float && ! inter_float && ! final_float
8227 && ! inside_vec && ! inter_vec && ! final_vec
8228 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8229 && ! (inside_int && inter_int
8230 && inter_unsignedp != inside_unsignedp
8231 && inter_prec < final_prec)
8232 && ((inter_unsignedp && inter_prec > inside_prec)
8233 == (final_unsignedp && final_prec > inter_prec))
8234 && ! (inside_ptr && inter_prec != final_prec)
8235 && ! (final_ptr && inside_prec != inter_prec)
8236 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8237 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8238 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8241 /* Handle (T *)&A.B.C for A being of type T and B and C
8242 living at offset zero. This occurs frequently in
8243 C++ upcasting and then accessing the base. */
8244 if (TREE_CODE (op0) == ADDR_EXPR
8245 && POINTER_TYPE_P (type)
8246 && handled_component_p (TREE_OPERAND (op0, 0)))
8248 HOST_WIDE_INT bitsize, bitpos;
8250 enum machine_mode mode;
8251 int unsignedp, volatilep;
8252 tree base = TREE_OPERAND (op0, 0);
8253 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8254 &mode, &unsignedp, &volatilep, false);
8255 /* If the reference was to a (constant) zero offset, we can use
8256 the address of the base if it has the same base type
8257 as the result type. */
8258 if (! offset && bitpos == 0
8259 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8260 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8261 return fold_convert (type, fold_addr_expr (base));
8264 if (TREE_CODE (op0) == MODIFY_EXPR
8265 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8266 /* Detect assigning a bitfield. */
8267 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8269 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8271 /* Don't leave an assignment inside a conversion
8272 unless assigning a bitfield. */
8273 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8274 /* First do the assignment, then return converted constant. */
8275 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8276 TREE_NO_WARNING (tem) = 1;
8277 TREE_USED (tem) = 1;
8281 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8282 constants (if x has signed type, the sign bit cannot be set
8283 in c). This folds extension into the BIT_AND_EXPR.
8284 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8285 very likely don't have maximal range for their precision and this
8286 transformation effectively doesn't preserve non-maximal ranges. */
8287 if (TREE_CODE (type) == INTEGER_TYPE
8288 && TREE_CODE (op0) == BIT_AND_EXPR
8289 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
8290 /* Not if the conversion is to the sub-type. */
8291 && TREE_TYPE (type) != TREE_TYPE (op0))
8294 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8297 if (TYPE_UNSIGNED (TREE_TYPE (and))
8298 || (TYPE_PRECISION (type)
8299 <= TYPE_PRECISION (TREE_TYPE (and))))
8301 else if (TYPE_PRECISION (TREE_TYPE (and1))
8302 <= HOST_BITS_PER_WIDE_INT
8303 && host_integerp (and1, 1))
8305 unsigned HOST_WIDE_INT cst;
8307 cst = tree_low_cst (and1, 1);
8308 cst &= (HOST_WIDE_INT) -1
8309 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8310 change = (cst == 0);
8311 #ifdef LOAD_EXTEND_OP
8313 && !flag_syntax_only
8314 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8317 tree uns = unsigned_type_for (TREE_TYPE (and0));
8318 and0 = fold_convert (uns, and0);
8319 and1 = fold_convert (uns, and1);
8325 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8326 TREE_INT_CST_HIGH (and1), 0,
8327 TREE_OVERFLOW (and1));
8328 return fold_build2 (BIT_AND_EXPR, type,
8329 fold_convert (type, and0), tem);
8333 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8334 when one of the new casts will fold away. Conservatively we assume
8335 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8336 if (POINTER_TYPE_P (type)
8337 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8338 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8339 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8340 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8342 tree arg00 = TREE_OPERAND (arg0, 0);
8343 tree arg01 = TREE_OPERAND (arg0, 1);
8345 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8346 fold_convert (sizetype, arg01));
8349 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8350 of the same precision, and X is an integer type not narrower than
8351 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8352 if (INTEGRAL_TYPE_P (type)
8353 && TREE_CODE (op0) == BIT_NOT_EXPR
8354 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8355 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8356 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8358 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8359 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8360 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8361 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8364 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8365 type of X and Y (integer types only). */
8366 if (INTEGRAL_TYPE_P (type)
8367 && TREE_CODE (op0) == MULT_EXPR
8368 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8369 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8371 /* Be careful not to introduce new overflows. */
8373 if (TYPE_OVERFLOW_WRAPS (type))
8376 mult_type = unsigned_type_for (type);
8378 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8380 tem = fold_build2 (MULT_EXPR, mult_type,
8381 fold_convert (mult_type,
8382 TREE_OPERAND (op0, 0)),
8383 fold_convert (mult_type,
8384 TREE_OPERAND (op0, 1)));
8385 return fold_convert (type, tem);
8389 tem = fold_convert_const (code, type, op0);
8390 return tem ? tem : NULL_TREE;
8392 case FIXED_CONVERT_EXPR:
8393 tem = fold_convert_const (code, type, arg0);
8394 return tem ? tem : NULL_TREE;
8396 case VIEW_CONVERT_EXPR:
8397 if (TREE_TYPE (op0) == type)
8399 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8400 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8402 /* For integral conversions with the same precision or pointer
8403 conversions use a NOP_EXPR instead. */
8404 if ((INTEGRAL_TYPE_P (type)
8405 || POINTER_TYPE_P (type))
8406 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8407 || POINTER_TYPE_P (TREE_TYPE (op0)))
8408 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8409 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8410 a sub-type to its base type as generated by the Ada FE. */
8411 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
8412 && TREE_TYPE (TREE_TYPE (op0))))
8413 return fold_convert (type, op0);
8415 /* Strip inner integral conversions that do not change the precision. */
8416 if (CONVERT_EXPR_P (op0)
8417 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8418 || POINTER_TYPE_P (TREE_TYPE (op0)))
8419 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8420 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8421 && (TYPE_PRECISION (TREE_TYPE (op0))
8422 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8423 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8425 return fold_view_convert_expr (type, op0);
8428 tem = fold_negate_expr (arg0);
8430 return fold_convert (type, tem);
8434 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8435 return fold_abs_const (arg0, type);
8436 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8437 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8438 /* Convert fabs((double)float) into (double)fabsf(float). */
8439 else if (TREE_CODE (arg0) == NOP_EXPR
8440 && TREE_CODE (type) == REAL_TYPE)
8442 tree targ0 = strip_float_extensions (arg0);
8444 return fold_convert (type, fold_build1 (ABS_EXPR,
8448 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8449 else if (TREE_CODE (arg0) == ABS_EXPR)
8451 else if (tree_expr_nonnegative_p (arg0))
8454 /* Strip sign ops from argument. */
8455 if (TREE_CODE (type) == REAL_TYPE)
8457 tem = fold_strip_sign_ops (arg0);
8459 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8464 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8465 return fold_convert (type, arg0);
8466 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8468 tree itype = TREE_TYPE (type);
8469 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8470 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8471 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8473 if (TREE_CODE (arg0) == COMPLEX_CST)
8475 tree itype = TREE_TYPE (type);
8476 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8477 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8478 return build_complex (type, rpart, negate_expr (ipart));
8480 if (TREE_CODE (arg0) == CONJ_EXPR)
8481 return fold_convert (type, TREE_OPERAND (arg0, 0));
8485 if (TREE_CODE (arg0) == INTEGER_CST)
8486 return fold_not_const (arg0, type);
8487 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8488 return fold_convert (type, TREE_OPERAND (arg0, 0));
8489 /* Convert ~ (-A) to A - 1. */
8490 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8491 return fold_build2 (MINUS_EXPR, type,
8492 fold_convert (type, TREE_OPERAND (arg0, 0)),
8493 build_int_cst (type, 1));
8494 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8495 else if (INTEGRAL_TYPE_P (type)
8496 && ((TREE_CODE (arg0) == MINUS_EXPR
8497 && integer_onep (TREE_OPERAND (arg0, 1)))
8498 || (TREE_CODE (arg0) == PLUS_EXPR
8499 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8500 return fold_build1 (NEGATE_EXPR, type,
8501 fold_convert (type, TREE_OPERAND (arg0, 0)));
8502 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8503 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8504 && (tem = fold_unary (BIT_NOT_EXPR, type,
8506 TREE_OPERAND (arg0, 0)))))
8507 return fold_build2 (BIT_XOR_EXPR, type, tem,
8508 fold_convert (type, TREE_OPERAND (arg0, 1)));
8509 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8510 && (tem = fold_unary (BIT_NOT_EXPR, type,
8512 TREE_OPERAND (arg0, 1)))))
8513 return fold_build2 (BIT_XOR_EXPR, type,
8514 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8515 /* Perform BIT_NOT_EXPR on each element individually. */
8516 else if (TREE_CODE (arg0) == VECTOR_CST)
8518 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8519 int count = TYPE_VECTOR_SUBPARTS (type), i;
8521 for (i = 0; i < count; i++)
8525 elem = TREE_VALUE (elements);
8526 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8527 if (elem == NULL_TREE)
8529 elements = TREE_CHAIN (elements);
8532 elem = build_int_cst (TREE_TYPE (type), -1);
8533 list = tree_cons (NULL_TREE, elem, list);
8536 return build_vector (type, nreverse (list));
8541 case TRUTH_NOT_EXPR:
8542 /* The argument to invert_truthvalue must have Boolean type. */
8543 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8544 arg0 = fold_convert (boolean_type_node, arg0);
8546 /* Note that the operand of this must be an int
8547 and its values must be 0 or 1.
8548 ("true" is a fixed value perhaps depending on the language,
8549 but we don't handle values other than 1 correctly yet.) */
8550 tem = fold_truth_not_expr (arg0);
8553 return fold_convert (type, tem);
8556 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8557 return fold_convert (type, arg0);
8558 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8559 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8560 TREE_OPERAND (arg0, 1));
8561 if (TREE_CODE (arg0) == COMPLEX_CST)
8562 return fold_convert (type, TREE_REALPART (arg0));
8563 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8565 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8566 tem = fold_build2 (TREE_CODE (arg0), itype,
8567 fold_build1 (REALPART_EXPR, itype,
8568 TREE_OPERAND (arg0, 0)),
8569 fold_build1 (REALPART_EXPR, itype,
8570 TREE_OPERAND (arg0, 1)));
8571 return fold_convert (type, tem);
8573 if (TREE_CODE (arg0) == CONJ_EXPR)
8575 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8576 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8577 return fold_convert (type, tem);
8579 if (TREE_CODE (arg0) == CALL_EXPR)
8581 tree fn = get_callee_fndecl (arg0);
8582 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8583 switch (DECL_FUNCTION_CODE (fn))
8585 CASE_FLT_FN (BUILT_IN_CEXPI):
8586 fn = mathfn_built_in (type, BUILT_IN_COS);
8588 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8598 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8599 return fold_convert (type, integer_zero_node);
8600 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8601 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8602 TREE_OPERAND (arg0, 0));
8603 if (TREE_CODE (arg0) == COMPLEX_CST)
8604 return fold_convert (type, TREE_IMAGPART (arg0));
8605 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8607 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8608 tem = fold_build2 (TREE_CODE (arg0), itype,
8609 fold_build1 (IMAGPART_EXPR, itype,
8610 TREE_OPERAND (arg0, 0)),
8611 fold_build1 (IMAGPART_EXPR, itype,
8612 TREE_OPERAND (arg0, 1)));
8613 return fold_convert (type, tem);
8615 if (TREE_CODE (arg0) == CONJ_EXPR)
8617 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8618 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8619 return fold_convert (type, negate_expr (tem));
8621 if (TREE_CODE (arg0) == CALL_EXPR)
8623 tree fn = get_callee_fndecl (arg0);
8624 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8625 switch (DECL_FUNCTION_CODE (fn))
8627 CASE_FLT_FN (BUILT_IN_CEXPI):
8628 fn = mathfn_built_in (type, BUILT_IN_SIN);
8630 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8641 } /* switch (code) */
8645 /* If the operation was a conversion do _not_ mark a resulting constant
8646 with TREE_OVERFLOW if the original constant was not. These conversions
8647 have implementation defined behavior and retaining the TREE_OVERFLOW
8648 flag here would confuse later passes such as VRP. */
8650 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8652 tree res = fold_unary (code, type, op0);
8654 && TREE_CODE (res) == INTEGER_CST
8655 && TREE_CODE (op0) == INTEGER_CST
8656 && CONVERT_EXPR_CODE_P (code))
8657 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8662 /* Fold a binary expression of code CODE and type TYPE with operands
8663 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8664 Return the folded expression if folding is successful. Otherwise,
8665 return NULL_TREE. */
8668 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8670 enum tree_code compl_code;
8672 if (code == MIN_EXPR)
8673 compl_code = MAX_EXPR;
8674 else if (code == MAX_EXPR)
8675 compl_code = MIN_EXPR;
8679 /* MIN (MAX (a, b), b) == b. */
8680 if (TREE_CODE (op0) == compl_code
8681 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8682 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8684 /* MIN (MAX (b, a), b) == b. */
8685 if (TREE_CODE (op0) == compl_code
8686 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8687 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8688 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8690 /* MIN (a, MAX (a, b)) == a. */
8691 if (TREE_CODE (op1) == compl_code
8692 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8693 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8694 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8696 /* MIN (a, MAX (b, a)) == a. */
8697 if (TREE_CODE (op1) == compl_code
8698 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8699 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8700 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8705 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8706 by changing CODE to reduce the magnitude of constants involved in
8707 ARG0 of the comparison.
8708 Returns a canonicalized comparison tree if a simplification was
8709 possible, otherwise returns NULL_TREE.
8710 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8711 valid if signed overflow is undefined. */
8714 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8715 tree arg0, tree arg1,
8716 bool *strict_overflow_p)
8718 enum tree_code code0 = TREE_CODE (arg0);
8719 tree t, cst0 = NULL_TREE;
8723 /* Match A +- CST code arg1 and CST code arg1. We can change the
8724 first form only if overflow is undefined. */
8725 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8726 /* In principle pointers also have undefined overflow behavior,
8727 but that causes problems elsewhere. */
8728 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8729 && (code0 == MINUS_EXPR
8730 || code0 == PLUS_EXPR)
8731 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8732 || code0 == INTEGER_CST))
8735 /* Identify the constant in arg0 and its sign. */
8736 if (code0 == INTEGER_CST)
8739 cst0 = TREE_OPERAND (arg0, 1);
8740 sgn0 = tree_int_cst_sgn (cst0);
8742 /* Overflowed constants and zero will cause problems. */
8743 if (integer_zerop (cst0)
8744 || TREE_OVERFLOW (cst0))
8747 /* See if we can reduce the magnitude of the constant in
8748 arg0 by changing the comparison code. */
8749 if (code0 == INTEGER_CST)
8751 /* CST <= arg1 -> CST-1 < arg1. */
8752 if (code == LE_EXPR && sgn0 == 1)
8754 /* -CST < arg1 -> -CST-1 <= arg1. */
8755 else if (code == LT_EXPR && sgn0 == -1)
8757 /* CST > arg1 -> CST-1 >= arg1. */
8758 else if (code == GT_EXPR && sgn0 == 1)
8760 /* -CST >= arg1 -> -CST-1 > arg1. */
8761 else if (code == GE_EXPR && sgn0 == -1)
8765 /* arg1 code' CST' might be more canonical. */
8770 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8772 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8774 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8775 else if (code == GT_EXPR
8776 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8778 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8779 else if (code == LE_EXPR
8780 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8782 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8783 else if (code == GE_EXPR
8784 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8788 *strict_overflow_p = true;
8791 /* Now build the constant reduced in magnitude. But not if that
8792 would produce one outside of its types range. */
8793 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8795 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8796 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8798 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8799 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8800 /* We cannot swap the comparison here as that would cause us to
8801 endlessly recurse. */
8804 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8805 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8806 if (code0 != INTEGER_CST)
8807 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8809 /* If swapping might yield to a more canonical form, do so. */
8811 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8813 return fold_build2 (code, type, t, arg1);
8816 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8817 overflow further. Try to decrease the magnitude of constants involved
8818 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8819 and put sole constants at the second argument position.
8820 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8823 maybe_canonicalize_comparison (enum tree_code code, tree type,
8824 tree arg0, tree arg1)
8827 bool strict_overflow_p;
8828 const char * const warnmsg = G_("assuming signed overflow does not occur "
8829 "when reducing constant in comparison");
8831 /* Try canonicalization by simplifying arg0. */
8832 strict_overflow_p = false;
8833 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8834 &strict_overflow_p);
8837 if (strict_overflow_p)
8838 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8842 /* Try canonicalization by simplifying arg1 using the swapped
8844 code = swap_tree_comparison (code);
8845 strict_overflow_p = false;
8846 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8847 &strict_overflow_p);
8848 if (t && strict_overflow_p)
8849 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8853 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8854 space. This is used to avoid issuing overflow warnings for
8855 expressions like &p->x which can not wrap. */
8858 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8860 unsigned HOST_WIDE_INT offset_low, total_low;
8861 HOST_WIDE_INT size, offset_high, total_high;
8863 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8869 if (offset == NULL_TREE)
8874 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8878 offset_low = TREE_INT_CST_LOW (offset);
8879 offset_high = TREE_INT_CST_HIGH (offset);
8882 if (add_double_with_sign (offset_low, offset_high,
8883 bitpos / BITS_PER_UNIT, 0,
8884 &total_low, &total_high,
8888 if (total_high != 0)
8891 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8895 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8897 if (TREE_CODE (base) == ADDR_EXPR)
8899 HOST_WIDE_INT base_size;
8901 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8902 if (base_size > 0 && size < base_size)
8906 return total_low > (unsigned HOST_WIDE_INT) size;
8909 /* Subroutine of fold_binary. This routine performs all of the
8910 transformations that are common to the equality/inequality
8911 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8912 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8913 fold_binary should call fold_binary. Fold a comparison with
8914 tree code CODE and type TYPE with operands OP0 and OP1. Return
8915 the folded comparison or NULL_TREE. */
8918 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8920 tree arg0, arg1, tem;
8925 STRIP_SIGN_NOPS (arg0);
8926 STRIP_SIGN_NOPS (arg1);
8928 tem = fold_relational_const (code, type, arg0, arg1);
8929 if (tem != NULL_TREE)
8932 /* If one arg is a real or integer constant, put it last. */
8933 if (tree_swap_operands_p (arg0, arg1, true))
8934 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8936 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8937 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8938 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8939 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8941 && (TREE_CODE (arg1) == INTEGER_CST
8942 && !TREE_OVERFLOW (arg1)))
8944 tree const1 = TREE_OPERAND (arg0, 1);
8946 tree variable = TREE_OPERAND (arg0, 0);
8949 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8951 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8952 TREE_TYPE (arg1), const2, const1);
8954 /* If the constant operation overflowed this can be
8955 simplified as a comparison against INT_MAX/INT_MIN. */
8956 if (TREE_CODE (lhs) == INTEGER_CST
8957 && TREE_OVERFLOW (lhs))
8959 int const1_sgn = tree_int_cst_sgn (const1);
8960 enum tree_code code2 = code;
8962 /* Get the sign of the constant on the lhs if the
8963 operation were VARIABLE + CONST1. */
8964 if (TREE_CODE (arg0) == MINUS_EXPR)
8965 const1_sgn = -const1_sgn;
8967 /* The sign of the constant determines if we overflowed
8968 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8969 Canonicalize to the INT_MIN overflow by swapping the comparison
8971 if (const1_sgn == -1)
8972 code2 = swap_tree_comparison (code);
8974 /* We now can look at the canonicalized case
8975 VARIABLE + 1 CODE2 INT_MIN
8976 and decide on the result. */
8977 if (code2 == LT_EXPR
8979 || code2 == EQ_EXPR)
8980 return omit_one_operand (type, boolean_false_node, variable);
8981 else if (code2 == NE_EXPR
8983 || code2 == GT_EXPR)
8984 return omit_one_operand (type, boolean_true_node, variable);
8987 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8988 && (TREE_CODE (lhs) != INTEGER_CST
8989 || !TREE_OVERFLOW (lhs)))
8991 fold_overflow_warning (("assuming signed overflow does not occur "
8992 "when changing X +- C1 cmp C2 to "
8994 WARN_STRICT_OVERFLOW_COMPARISON);
8995 return fold_build2 (code, type, variable, lhs);
8999 /* For comparisons of pointers we can decompose it to a compile time
9000 comparison of the base objects and the offsets into the object.
9001 This requires at least one operand being an ADDR_EXPR or a
9002 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9003 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9004 && (TREE_CODE (arg0) == ADDR_EXPR
9005 || TREE_CODE (arg1) == ADDR_EXPR
9006 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9007 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9009 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9010 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9011 enum machine_mode mode;
9012 int volatilep, unsignedp;
9013 bool indirect_base0 = false, indirect_base1 = false;
9015 /* Get base and offset for the access. Strip ADDR_EXPR for
9016 get_inner_reference, but put it back by stripping INDIRECT_REF
9017 off the base object if possible. indirect_baseN will be true
9018 if baseN is not an address but refers to the object itself. */
9020 if (TREE_CODE (arg0) == ADDR_EXPR)
9022 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9023 &bitsize, &bitpos0, &offset0, &mode,
9024 &unsignedp, &volatilep, false);
9025 if (TREE_CODE (base0) == INDIRECT_REF)
9026 base0 = TREE_OPERAND (base0, 0);
9028 indirect_base0 = true;
9030 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9032 base0 = TREE_OPERAND (arg0, 0);
9033 offset0 = TREE_OPERAND (arg0, 1);
9037 if (TREE_CODE (arg1) == ADDR_EXPR)
9039 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9040 &bitsize, &bitpos1, &offset1, &mode,
9041 &unsignedp, &volatilep, false);
9042 if (TREE_CODE (base1) == INDIRECT_REF)
9043 base1 = TREE_OPERAND (base1, 0);
9045 indirect_base1 = true;
9047 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9049 base1 = TREE_OPERAND (arg1, 0);
9050 offset1 = TREE_OPERAND (arg1, 1);
9053 /* If we have equivalent bases we might be able to simplify. */
9054 if (indirect_base0 == indirect_base1
9055 && operand_equal_p (base0, base1, 0))
9057 /* We can fold this expression to a constant if the non-constant
9058 offset parts are equal. */
9059 if ((offset0 == offset1
9060 || (offset0 && offset1
9061 && operand_equal_p (offset0, offset1, 0)))
9064 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9069 && bitpos0 != bitpos1
9070 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9071 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9072 fold_overflow_warning (("assuming pointer wraparound does not "
9073 "occur when comparing P +- C1 with "
9075 WARN_STRICT_OVERFLOW_CONDITIONAL);
9080 return constant_boolean_node (bitpos0 == bitpos1, type);
9082 return constant_boolean_node (bitpos0 != bitpos1, type);
9084 return constant_boolean_node (bitpos0 < bitpos1, type);
9086 return constant_boolean_node (bitpos0 <= bitpos1, type);
9088 return constant_boolean_node (bitpos0 >= bitpos1, type);
9090 return constant_boolean_node (bitpos0 > bitpos1, type);
9094 /* We can simplify the comparison to a comparison of the variable
9095 offset parts if the constant offset parts are equal.
9096 Be careful to use signed size type here because otherwise we
9097 mess with array offsets in the wrong way. This is possible
9098 because pointer arithmetic is restricted to retain within an
9099 object and overflow on pointer differences is undefined as of
9100 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9101 else if (bitpos0 == bitpos1
9102 && ((code == EQ_EXPR || code == NE_EXPR)
9103 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9105 tree signed_size_type_node;
9106 signed_size_type_node = signed_type_for (size_type_node);
9108 /* By converting to signed size type we cover middle-end pointer
9109 arithmetic which operates on unsigned pointer types of size
9110 type size and ARRAY_REF offsets which are properly sign or
9111 zero extended from their type in case it is narrower than
9113 if (offset0 == NULL_TREE)
9114 offset0 = build_int_cst (signed_size_type_node, 0);
9116 offset0 = fold_convert (signed_size_type_node, offset0);
9117 if (offset1 == NULL_TREE)
9118 offset1 = build_int_cst (signed_size_type_node, 0);
9120 offset1 = fold_convert (signed_size_type_node, offset1);
9124 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9125 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9126 fold_overflow_warning (("assuming pointer wraparound does not "
9127 "occur when comparing P +- C1 with "
9129 WARN_STRICT_OVERFLOW_COMPARISON);
9131 return fold_build2 (code, type, offset0, offset1);
9134 /* For non-equal bases we can simplify if they are addresses
9135 of local binding decls or constants. */
9136 else if (indirect_base0 && indirect_base1
9137 /* We know that !operand_equal_p (base0, base1, 0)
9138 because the if condition was false. But make
9139 sure two decls are not the same. */
9141 && TREE_CODE (arg0) == ADDR_EXPR
9142 && TREE_CODE (arg1) == ADDR_EXPR
9143 && (((TREE_CODE (base0) == VAR_DECL
9144 || TREE_CODE (base0) == PARM_DECL)
9145 && (targetm.binds_local_p (base0)
9146 || CONSTANT_CLASS_P (base1)))
9147 || CONSTANT_CLASS_P (base0))
9148 && (((TREE_CODE (base1) == VAR_DECL
9149 || TREE_CODE (base1) == PARM_DECL)
9150 && (targetm.binds_local_p (base1)
9151 || CONSTANT_CLASS_P (base0)))
9152 || CONSTANT_CLASS_P (base1)))
9154 if (code == EQ_EXPR)
9155 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9156 else if (code == NE_EXPR)
9157 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9159 /* For equal offsets we can simplify to a comparison of the
9161 else if (bitpos0 == bitpos1
9163 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9165 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9166 && ((offset0 == offset1)
9167 || (offset0 && offset1
9168 && operand_equal_p (offset0, offset1, 0))))
9171 base0 = fold_addr_expr (base0);
9173 base1 = fold_addr_expr (base1);
9174 return fold_build2 (code, type, base0, base1);
9178 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9179 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9180 the resulting offset is smaller in absolute value than the
9182 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9183 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9184 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9185 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9186 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9187 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9188 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9190 tree const1 = TREE_OPERAND (arg0, 1);
9191 tree const2 = TREE_OPERAND (arg1, 1);
9192 tree variable1 = TREE_OPERAND (arg0, 0);
9193 tree variable2 = TREE_OPERAND (arg1, 0);
9195 const char * const warnmsg = G_("assuming signed overflow does not "
9196 "occur when combining constants around "
9199 /* Put the constant on the side where it doesn't overflow and is
9200 of lower absolute value than before. */
9201 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9202 ? MINUS_EXPR : PLUS_EXPR,
9204 if (!TREE_OVERFLOW (cst)
9205 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9207 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9208 return fold_build2 (code, type,
9210 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
9214 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9215 ? MINUS_EXPR : PLUS_EXPR,
9217 if (!TREE_OVERFLOW (cst)
9218 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9220 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9221 return fold_build2 (code, type,
9222 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
9228 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9229 signed arithmetic case. That form is created by the compiler
9230 often enough for folding it to be of value. One example is in
9231 computing loop trip counts after Operator Strength Reduction. */
9232 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9233 && TREE_CODE (arg0) == MULT_EXPR
9234 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9235 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9236 && integer_zerop (arg1))
9238 tree const1 = TREE_OPERAND (arg0, 1);
9239 tree const2 = arg1; /* zero */
9240 tree variable1 = TREE_OPERAND (arg0, 0);
9241 enum tree_code cmp_code = code;
9243 gcc_assert (!integer_zerop (const1));
9245 fold_overflow_warning (("assuming signed overflow does not occur when "
9246 "eliminating multiplication in comparison "
9248 WARN_STRICT_OVERFLOW_COMPARISON);
9250 /* If const1 is negative we swap the sense of the comparison. */
9251 if (tree_int_cst_sgn (const1) < 0)
9252 cmp_code = swap_tree_comparison (cmp_code);
9254 return fold_build2 (cmp_code, type, variable1, const2);
9257 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9261 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9263 tree targ0 = strip_float_extensions (arg0);
9264 tree targ1 = strip_float_extensions (arg1);
9265 tree newtype = TREE_TYPE (targ0);
9267 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9268 newtype = TREE_TYPE (targ1);
9270 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9271 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9272 return fold_build2 (code, type, fold_convert (newtype, targ0),
9273 fold_convert (newtype, targ1));
9275 /* (-a) CMP (-b) -> b CMP a */
9276 if (TREE_CODE (arg0) == NEGATE_EXPR
9277 && TREE_CODE (arg1) == NEGATE_EXPR)
9278 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9279 TREE_OPERAND (arg0, 0));
9281 if (TREE_CODE (arg1) == REAL_CST)
9283 REAL_VALUE_TYPE cst;
9284 cst = TREE_REAL_CST (arg1);
9286 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9287 if (TREE_CODE (arg0) == NEGATE_EXPR)
9288 return fold_build2 (swap_tree_comparison (code), type,
9289 TREE_OPERAND (arg0, 0),
9290 build_real (TREE_TYPE (arg1),
9291 REAL_VALUE_NEGATE (cst)));
9293 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9294 /* a CMP (-0) -> a CMP 0 */
9295 if (REAL_VALUE_MINUS_ZERO (cst))
9296 return fold_build2 (code, type, arg0,
9297 build_real (TREE_TYPE (arg1), dconst0));
9299 /* x != NaN is always true, other ops are always false. */
9300 if (REAL_VALUE_ISNAN (cst)
9301 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9303 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9304 return omit_one_operand (type, tem, arg0);
9307 /* Fold comparisons against infinity. */
9308 if (REAL_VALUE_ISINF (cst)
9309 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9311 tem = fold_inf_compare (code, type, arg0, arg1);
9312 if (tem != NULL_TREE)
9317 /* If this is a comparison of a real constant with a PLUS_EXPR
9318 or a MINUS_EXPR of a real constant, we can convert it into a
9319 comparison with a revised real constant as long as no overflow
9320 occurs when unsafe_math_optimizations are enabled. */
9321 if (flag_unsafe_math_optimizations
9322 && TREE_CODE (arg1) == REAL_CST
9323 && (TREE_CODE (arg0) == PLUS_EXPR
9324 || TREE_CODE (arg0) == MINUS_EXPR)
9325 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9326 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9327 ? MINUS_EXPR : PLUS_EXPR,
9328 arg1, TREE_OPERAND (arg0, 1), 0))
9329 && !TREE_OVERFLOW (tem))
9330 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9332 /* Likewise, we can simplify a comparison of a real constant with
9333 a MINUS_EXPR whose first operand is also a real constant, i.e.
9334 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9335 floating-point types only if -fassociative-math is set. */
9336 if (flag_associative_math
9337 && TREE_CODE (arg1) == REAL_CST
9338 && TREE_CODE (arg0) == MINUS_EXPR
9339 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9340 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9342 && !TREE_OVERFLOW (tem))
9343 return fold_build2 (swap_tree_comparison (code), type,
9344 TREE_OPERAND (arg0, 1), tem);
9346 /* Fold comparisons against built-in math functions. */
9347 if (TREE_CODE (arg1) == REAL_CST
9348 && flag_unsafe_math_optimizations
9349 && ! flag_errno_math)
9351 enum built_in_function fcode = builtin_mathfn_code (arg0);
9353 if (fcode != END_BUILTINS)
9355 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9356 if (tem != NULL_TREE)
9362 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9363 && CONVERT_EXPR_P (arg0))
9365 /* If we are widening one operand of an integer comparison,
9366 see if the other operand is similarly being widened. Perhaps we
9367 can do the comparison in the narrower type. */
9368 tem = fold_widened_comparison (code, type, arg0, arg1);
9372 /* Or if we are changing signedness. */
9373 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9378 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9379 constant, we can simplify it. */
9380 if (TREE_CODE (arg1) == INTEGER_CST
9381 && (TREE_CODE (arg0) == MIN_EXPR
9382 || TREE_CODE (arg0) == MAX_EXPR)
9383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9385 tem = optimize_minmax_comparison (code, type, op0, op1);
9390 /* Simplify comparison of something with itself. (For IEEE
9391 floating-point, we can only do some of these simplifications.) */
9392 if (operand_equal_p (arg0, arg1, 0))
9397 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9398 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9399 return constant_boolean_node (1, type);
9404 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9405 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9406 return constant_boolean_node (1, type);
9407 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9410 /* For NE, we can only do this simplification if integer
9411 or we don't honor IEEE floating point NaNs. */
9412 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9413 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9415 /* ... fall through ... */
9418 return constant_boolean_node (0, type);
9424 /* If we are comparing an expression that just has comparisons
9425 of two integer values, arithmetic expressions of those comparisons,
9426 and constants, we can simplify it. There are only three cases
9427 to check: the two values can either be equal, the first can be
9428 greater, or the second can be greater. Fold the expression for
9429 those three values. Since each value must be 0 or 1, we have
9430 eight possibilities, each of which corresponds to the constant 0
9431 or 1 or one of the six possible comparisons.
9433 This handles common cases like (a > b) == 0 but also handles
9434 expressions like ((x > y) - (y > x)) > 0, which supposedly
9435 occur in macroized code. */
9437 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9439 tree cval1 = 0, cval2 = 0;
9442 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9443 /* Don't handle degenerate cases here; they should already
9444 have been handled anyway. */
9445 && cval1 != 0 && cval2 != 0
9446 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9447 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9448 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9449 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9450 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9451 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9452 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9454 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9455 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9457 /* We can't just pass T to eval_subst in case cval1 or cval2
9458 was the same as ARG1. */
9461 = fold_build2 (code, type,
9462 eval_subst (arg0, cval1, maxval,
9466 = fold_build2 (code, type,
9467 eval_subst (arg0, cval1, maxval,
9471 = fold_build2 (code, type,
9472 eval_subst (arg0, cval1, minval,
9476 /* All three of these results should be 0 or 1. Confirm they are.
9477 Then use those values to select the proper code to use. */
9479 if (TREE_CODE (high_result) == INTEGER_CST
9480 && TREE_CODE (equal_result) == INTEGER_CST
9481 && TREE_CODE (low_result) == INTEGER_CST)
9483 /* Make a 3-bit mask with the high-order bit being the
9484 value for `>', the next for '=', and the low for '<'. */
9485 switch ((integer_onep (high_result) * 4)
9486 + (integer_onep (equal_result) * 2)
9487 + integer_onep (low_result))
9491 return omit_one_operand (type, integer_zero_node, arg0);
9512 return omit_one_operand (type, integer_one_node, arg0);
9516 return save_expr (build2 (code, type, cval1, cval2));
9517 return fold_build2 (code, type, cval1, cval2);
9522 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9523 into a single range test. */
9524 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9525 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9526 && TREE_CODE (arg1) == INTEGER_CST
9527 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9528 && !integer_zerop (TREE_OPERAND (arg0, 1))
9529 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9530 && !TREE_OVERFLOW (arg1))
9532 tem = fold_div_compare (code, type, arg0, arg1);
9533 if (tem != NULL_TREE)
9537 /* Fold ~X op ~Y as Y op X. */
9538 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9539 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9541 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9542 return fold_build2 (code, type,
9543 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9544 TREE_OPERAND (arg0, 0));
9547 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9548 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9549 && TREE_CODE (arg1) == INTEGER_CST)
9551 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9552 return fold_build2 (swap_tree_comparison (code), type,
9553 TREE_OPERAND (arg0, 0),
9554 fold_build1 (BIT_NOT_EXPR, cmp_type,
9555 fold_convert (cmp_type, arg1)));
9562 /* Subroutine of fold_binary. Optimize complex multiplications of the
9563 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9564 argument EXPR represents the expression "z" of type TYPE. */
9567 fold_mult_zconjz (tree type, tree expr)
9569 tree itype = TREE_TYPE (type);
9570 tree rpart, ipart, tem;
9572 if (TREE_CODE (expr) == COMPLEX_EXPR)
9574 rpart = TREE_OPERAND (expr, 0);
9575 ipart = TREE_OPERAND (expr, 1);
9577 else if (TREE_CODE (expr) == COMPLEX_CST)
9579 rpart = TREE_REALPART (expr);
9580 ipart = TREE_IMAGPART (expr);
9584 expr = save_expr (expr);
9585 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9586 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9589 rpart = save_expr (rpart);
9590 ipart = save_expr (ipart);
9591 tem = fold_build2 (PLUS_EXPR, itype,
9592 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9593 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9594 return fold_build2 (COMPLEX_EXPR, type, tem,
9595 fold_convert (itype, integer_zero_node));
9599 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9600 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9601 guarantees that P and N have the same least significant log2(M) bits.
9602 N is not otherwise constrained. In particular, N is not normalized to
9603 0 <= N < M as is common. In general, the precise value of P is unknown.
9604 M is chosen as large as possible such that constant N can be determined.
9606 Returns M and sets *RESIDUE to N. */
9608 static unsigned HOST_WIDE_INT
9609 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9611 enum tree_code code;
9615 code = TREE_CODE (expr);
9616 if (code == ADDR_EXPR)
9618 expr = TREE_OPERAND (expr, 0);
9619 if (handled_component_p (expr))
9621 HOST_WIDE_INT bitsize, bitpos;
9623 enum machine_mode mode;
9624 int unsignedp, volatilep;
9626 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9627 &mode, &unsignedp, &volatilep, false);
9628 *residue = bitpos / BITS_PER_UNIT;
9631 if (TREE_CODE (offset) == INTEGER_CST)
9632 *residue += TREE_INT_CST_LOW (offset);
9634 /* We don't handle more complicated offset expressions. */
9639 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9640 return DECL_ALIGN_UNIT (expr);
9642 else if (code == POINTER_PLUS_EXPR)
9645 unsigned HOST_WIDE_INT modulus;
9646 enum tree_code inner_code;
9648 op0 = TREE_OPERAND (expr, 0);
9650 modulus = get_pointer_modulus_and_residue (op0, residue);
9652 op1 = TREE_OPERAND (expr, 1);
9654 inner_code = TREE_CODE (op1);
9655 if (inner_code == INTEGER_CST)
9657 *residue += TREE_INT_CST_LOW (op1);
9660 else if (inner_code == MULT_EXPR)
9662 op1 = TREE_OPERAND (op1, 1);
9663 if (TREE_CODE (op1) == INTEGER_CST)
9665 unsigned HOST_WIDE_INT align;
9667 /* Compute the greatest power-of-2 divisor of op1. */
9668 align = TREE_INT_CST_LOW (op1);
9671 /* If align is non-zero and less than *modulus, replace
9672 *modulus with align., If align is 0, then either op1 is 0
9673 or the greatest power-of-2 divisor of op1 doesn't fit in an
9674 unsigned HOST_WIDE_INT. In either case, no additional
9675 constraint is imposed. */
9677 modulus = MIN (modulus, align);
9684 /* If we get here, we were unable to determine anything useful about the
9690 /* Fold a binary expression of code CODE and type TYPE with operands
9691 OP0 and OP1. Return the folded expression if folding is
9692 successful. Otherwise, return NULL_TREE. */
9695 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9697 enum tree_code_class kind = TREE_CODE_CLASS (code);
9698 tree arg0, arg1, tem;
9699 tree t1 = NULL_TREE;
9700 bool strict_overflow_p;
9702 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9703 && TREE_CODE_LENGTH (code) == 2
9705 && op1 != NULL_TREE);
9710 /* Strip any conversions that don't change the mode. This is
9711 safe for every expression, except for a comparison expression
9712 because its signedness is derived from its operands. So, in
9713 the latter case, only strip conversions that don't change the
9714 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9717 Note that this is done as an internal manipulation within the
9718 constant folder, in order to find the simplest representation
9719 of the arguments so that their form can be studied. In any
9720 cases, the appropriate type conversions should be put back in
9721 the tree that will get out of the constant folder. */
9723 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9725 STRIP_SIGN_NOPS (arg0);
9726 STRIP_SIGN_NOPS (arg1);
9734 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9735 constant but we can't do arithmetic on them. */
9736 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9737 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9738 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9739 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9740 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9741 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9743 if (kind == tcc_binary)
9745 /* Make sure type and arg0 have the same saturating flag. */
9746 gcc_assert (TYPE_SATURATING (type)
9747 == TYPE_SATURATING (TREE_TYPE (arg0)));
9748 tem = const_binop (code, arg0, arg1, 0);
9750 else if (kind == tcc_comparison)
9751 tem = fold_relational_const (code, type, arg0, arg1);
9755 if (tem != NULL_TREE)
9757 if (TREE_TYPE (tem) != type)
9758 tem = fold_convert (type, tem);
9763 /* If this is a commutative operation, and ARG0 is a constant, move it
9764 to ARG1 to reduce the number of tests below. */
9765 if (commutative_tree_code (code)
9766 && tree_swap_operands_p (arg0, arg1, true))
9767 return fold_build2 (code, type, op1, op0);
9769 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9771 First check for cases where an arithmetic operation is applied to a
9772 compound, conditional, or comparison operation. Push the arithmetic
9773 operation inside the compound or conditional to see if any folding
9774 can then be done. Convert comparison to conditional for this purpose.
9775 The also optimizes non-constant cases that used to be done in
9778 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9779 one of the operands is a comparison and the other is a comparison, a
9780 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9781 code below would make the expression more complex. Change it to a
9782 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9783 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9785 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9786 || code == EQ_EXPR || code == NE_EXPR)
9787 && ((truth_value_p (TREE_CODE (arg0))
9788 && (truth_value_p (TREE_CODE (arg1))
9789 || (TREE_CODE (arg1) == BIT_AND_EXPR
9790 && integer_onep (TREE_OPERAND (arg1, 1)))))
9791 || (truth_value_p (TREE_CODE (arg1))
9792 && (truth_value_p (TREE_CODE (arg0))
9793 || (TREE_CODE (arg0) == BIT_AND_EXPR
9794 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9796 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9797 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9800 fold_convert (boolean_type_node, arg0),
9801 fold_convert (boolean_type_node, arg1));
9803 if (code == EQ_EXPR)
9804 tem = invert_truthvalue (tem);
9806 return fold_convert (type, tem);
9809 if (TREE_CODE_CLASS (code) == tcc_binary
9810 || TREE_CODE_CLASS (code) == tcc_comparison)
9812 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9813 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9814 fold_build2 (code, type,
9815 fold_convert (TREE_TYPE (op0),
9816 TREE_OPERAND (arg0, 1)),
9818 if (TREE_CODE (arg1) == COMPOUND_EXPR
9819 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9820 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9821 fold_build2 (code, type, op0,
9822 fold_convert (TREE_TYPE (op1),
9823 TREE_OPERAND (arg1, 1))));
9825 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9827 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9829 /*cond_first_p=*/1);
9830 if (tem != NULL_TREE)
9834 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9836 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9838 /*cond_first_p=*/0);
9839 if (tem != NULL_TREE)
9846 case POINTER_PLUS_EXPR:
9847 /* 0 +p index -> (type)index */
9848 if (integer_zerop (arg0))
9849 return non_lvalue (fold_convert (type, arg1));
9851 /* PTR +p 0 -> PTR */
9852 if (integer_zerop (arg1))
9853 return non_lvalue (fold_convert (type, arg0));
9855 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9856 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9857 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9858 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9859 fold_convert (sizetype, arg1),
9860 fold_convert (sizetype, arg0)));
9862 /* index +p PTR -> PTR +p index */
9863 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9864 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9865 return fold_build2 (POINTER_PLUS_EXPR, type,
9866 fold_convert (type, arg1),
9867 fold_convert (sizetype, arg0));
9869 /* (PTR +p B) +p A -> PTR +p (B + A) */
9870 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9873 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9874 tree arg00 = TREE_OPERAND (arg0, 0);
9875 inner = fold_build2 (PLUS_EXPR, sizetype,
9876 arg01, fold_convert (sizetype, arg1));
9877 return fold_convert (type,
9878 fold_build2 (POINTER_PLUS_EXPR,
9879 TREE_TYPE (arg00), arg00, inner));
9882 /* PTR_CST +p CST -> CST1 */
9883 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9884 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9886 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9887 of the array. Loop optimizer sometimes produce this type of
9889 if (TREE_CODE (arg0) == ADDR_EXPR)
9891 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9893 return fold_convert (type, tem);
9899 /* A + (-B) -> A - B */
9900 if (TREE_CODE (arg1) == NEGATE_EXPR)
9901 return fold_build2 (MINUS_EXPR, type,
9902 fold_convert (type, arg0),
9903 fold_convert (type, TREE_OPERAND (arg1, 0)));
9904 /* (-A) + B -> B - A */
9905 if (TREE_CODE (arg0) == NEGATE_EXPR
9906 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9907 return fold_build2 (MINUS_EXPR, type,
9908 fold_convert (type, arg1),
9909 fold_convert (type, TREE_OPERAND (arg0, 0)));
9911 if (INTEGRAL_TYPE_P (type))
9913 /* Convert ~A + 1 to -A. */
9914 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9915 && integer_onep (arg1))
9916 return fold_build1 (NEGATE_EXPR, type,
9917 fold_convert (type, TREE_OPERAND (arg0, 0)));
9920 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9921 && !TYPE_OVERFLOW_TRAPS (type))
9923 tree tem = TREE_OPERAND (arg0, 0);
9926 if (operand_equal_p (tem, arg1, 0))
9928 t1 = build_int_cst_type (type, -1);
9929 return omit_one_operand (type, t1, arg1);
9934 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9935 && !TYPE_OVERFLOW_TRAPS (type))
9937 tree tem = TREE_OPERAND (arg1, 0);
9940 if (operand_equal_p (arg0, tem, 0))
9942 t1 = build_int_cst_type (type, -1);
9943 return omit_one_operand (type, t1, arg0);
9947 /* X + (X / CST) * -CST is X % CST. */
9948 if (TREE_CODE (arg1) == MULT_EXPR
9949 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9950 && operand_equal_p (arg0,
9951 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9953 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9954 tree cst1 = TREE_OPERAND (arg1, 1);
9955 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9956 if (sum && integer_zerop (sum))
9957 return fold_convert (type,
9958 fold_build2 (TRUNC_MOD_EXPR,
9959 TREE_TYPE (arg0), arg0, cst0));
9963 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9964 same or one. Make sure type is not saturating.
9965 fold_plusminus_mult_expr will re-associate. */
9966 if ((TREE_CODE (arg0) == MULT_EXPR
9967 || TREE_CODE (arg1) == MULT_EXPR)
9968 && !TYPE_SATURATING (type)
9969 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9971 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9976 if (! FLOAT_TYPE_P (type))
9978 if (integer_zerop (arg1))
9979 return non_lvalue (fold_convert (type, arg0));
9981 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9982 with a constant, and the two constants have no bits in common,
9983 we should treat this as a BIT_IOR_EXPR since this may produce more
9985 if (TREE_CODE (arg0) == BIT_AND_EXPR
9986 && TREE_CODE (arg1) == BIT_AND_EXPR
9987 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9988 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9989 && integer_zerop (const_binop (BIT_AND_EXPR,
9990 TREE_OPERAND (arg0, 1),
9991 TREE_OPERAND (arg1, 1), 0)))
9993 code = BIT_IOR_EXPR;
9997 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9998 (plus (plus (mult) (mult)) (foo)) so that we can
9999 take advantage of the factoring cases below. */
10000 if (((TREE_CODE (arg0) == PLUS_EXPR
10001 || TREE_CODE (arg0) == MINUS_EXPR)
10002 && TREE_CODE (arg1) == MULT_EXPR)
10003 || ((TREE_CODE (arg1) == PLUS_EXPR
10004 || TREE_CODE (arg1) == MINUS_EXPR)
10005 && TREE_CODE (arg0) == MULT_EXPR))
10007 tree parg0, parg1, parg, marg;
10008 enum tree_code pcode;
10010 if (TREE_CODE (arg1) == MULT_EXPR)
10011 parg = arg0, marg = arg1;
10013 parg = arg1, marg = arg0;
10014 pcode = TREE_CODE (parg);
10015 parg0 = TREE_OPERAND (parg, 0);
10016 parg1 = TREE_OPERAND (parg, 1);
10017 STRIP_NOPS (parg0);
10018 STRIP_NOPS (parg1);
10020 if (TREE_CODE (parg0) == MULT_EXPR
10021 && TREE_CODE (parg1) != MULT_EXPR)
10022 return fold_build2 (pcode, type,
10023 fold_build2 (PLUS_EXPR, type,
10024 fold_convert (type, parg0),
10025 fold_convert (type, marg)),
10026 fold_convert (type, parg1));
10027 if (TREE_CODE (parg0) != MULT_EXPR
10028 && TREE_CODE (parg1) == MULT_EXPR)
10029 return fold_build2 (PLUS_EXPR, type,
10030 fold_convert (type, parg0),
10031 fold_build2 (pcode, type,
10032 fold_convert (type, marg),
10033 fold_convert (type,
10039 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10040 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10041 return non_lvalue (fold_convert (type, arg0));
10043 /* Likewise if the operands are reversed. */
10044 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10045 return non_lvalue (fold_convert (type, arg1));
10047 /* Convert X + -C into X - C. */
10048 if (TREE_CODE (arg1) == REAL_CST
10049 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10051 tem = fold_negate_const (arg1, type);
10052 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10053 return fold_build2 (MINUS_EXPR, type,
10054 fold_convert (type, arg0),
10055 fold_convert (type, tem));
10058 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10059 to __complex__ ( x, y ). This is not the same for SNaNs or
10060 if signed zeros are involved. */
10061 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10062 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10063 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10065 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10066 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10067 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10068 bool arg0rz = false, arg0iz = false;
10069 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10070 || (arg0i && (arg0iz = real_zerop (arg0i))))
10072 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10073 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10074 if (arg0rz && arg1i && real_zerop (arg1i))
10076 tree rp = arg1r ? arg1r
10077 : build1 (REALPART_EXPR, rtype, arg1);
10078 tree ip = arg0i ? arg0i
10079 : build1 (IMAGPART_EXPR, rtype, arg0);
10080 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10082 else if (arg0iz && arg1r && real_zerop (arg1r))
10084 tree rp = arg0r ? arg0r
10085 : build1 (REALPART_EXPR, rtype, arg0);
10086 tree ip = arg1i ? arg1i
10087 : build1 (IMAGPART_EXPR, rtype, arg1);
10088 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10093 if (flag_unsafe_math_optimizations
10094 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10095 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10096 && (tem = distribute_real_division (code, type, arg0, arg1)))
10099 /* Convert x+x into x*2.0. */
10100 if (operand_equal_p (arg0, arg1, 0)
10101 && SCALAR_FLOAT_TYPE_P (type))
10102 return fold_build2 (MULT_EXPR, type, arg0,
10103 build_real (type, dconst2));
10105 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10106 We associate floats only if the user has specified
10107 -fassociative-math. */
10108 if (flag_associative_math
10109 && TREE_CODE (arg1) == PLUS_EXPR
10110 && TREE_CODE (arg0) != MULT_EXPR)
10112 tree tree10 = TREE_OPERAND (arg1, 0);
10113 tree tree11 = TREE_OPERAND (arg1, 1);
10114 if (TREE_CODE (tree11) == MULT_EXPR
10115 && TREE_CODE (tree10) == MULT_EXPR)
10118 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10119 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10122 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10123 We associate floats only if the user has specified
10124 -fassociative-math. */
10125 if (flag_associative_math
10126 && TREE_CODE (arg0) == PLUS_EXPR
10127 && TREE_CODE (arg1) != MULT_EXPR)
10129 tree tree00 = TREE_OPERAND (arg0, 0);
10130 tree tree01 = TREE_OPERAND (arg0, 1);
10131 if (TREE_CODE (tree01) == MULT_EXPR
10132 && TREE_CODE (tree00) == MULT_EXPR)
10135 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10136 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10142 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10143 is a rotate of A by C1 bits. */
10144 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10145 is a rotate of A by B bits. */
10147 enum tree_code code0, code1;
10149 code0 = TREE_CODE (arg0);
10150 code1 = TREE_CODE (arg1);
10151 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10152 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10153 && operand_equal_p (TREE_OPERAND (arg0, 0),
10154 TREE_OPERAND (arg1, 0), 0)
10155 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10156 TYPE_UNSIGNED (rtype))
10157 /* Only create rotates in complete modes. Other cases are not
10158 expanded properly. */
10159 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10161 tree tree01, tree11;
10162 enum tree_code code01, code11;
10164 tree01 = TREE_OPERAND (arg0, 1);
10165 tree11 = TREE_OPERAND (arg1, 1);
10166 STRIP_NOPS (tree01);
10167 STRIP_NOPS (tree11);
10168 code01 = TREE_CODE (tree01);
10169 code11 = TREE_CODE (tree11);
10170 if (code01 == INTEGER_CST
10171 && code11 == INTEGER_CST
10172 && TREE_INT_CST_HIGH (tree01) == 0
10173 && TREE_INT_CST_HIGH (tree11) == 0
10174 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10175 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10176 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
10177 code0 == LSHIFT_EXPR ? tree01 : tree11);
10178 else if (code11 == MINUS_EXPR)
10180 tree tree110, tree111;
10181 tree110 = TREE_OPERAND (tree11, 0);
10182 tree111 = TREE_OPERAND (tree11, 1);
10183 STRIP_NOPS (tree110);
10184 STRIP_NOPS (tree111);
10185 if (TREE_CODE (tree110) == INTEGER_CST
10186 && 0 == compare_tree_int (tree110,
10188 (TREE_TYPE (TREE_OPERAND
10190 && operand_equal_p (tree01, tree111, 0))
10191 return build2 ((code0 == LSHIFT_EXPR
10194 type, TREE_OPERAND (arg0, 0), tree01);
10196 else if (code01 == MINUS_EXPR)
10198 tree tree010, tree011;
10199 tree010 = TREE_OPERAND (tree01, 0);
10200 tree011 = TREE_OPERAND (tree01, 1);
10201 STRIP_NOPS (tree010);
10202 STRIP_NOPS (tree011);
10203 if (TREE_CODE (tree010) == INTEGER_CST
10204 && 0 == compare_tree_int (tree010,
10206 (TREE_TYPE (TREE_OPERAND
10208 && operand_equal_p (tree11, tree011, 0))
10209 return build2 ((code0 != LSHIFT_EXPR
10212 type, TREE_OPERAND (arg0, 0), tree11);
10218 /* In most languages, can't associate operations on floats through
10219 parentheses. Rather than remember where the parentheses were, we
10220 don't associate floats at all, unless the user has specified
10221 -fassociative-math.
10222 And, we need to make sure type is not saturating. */
10224 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10225 && !TYPE_SATURATING (type))
10227 tree var0, con0, lit0, minus_lit0;
10228 tree var1, con1, lit1, minus_lit1;
10231 /* Split both trees into variables, constants, and literals. Then
10232 associate each group together, the constants with literals,
10233 then the result with variables. This increases the chances of
10234 literals being recombined later and of generating relocatable
10235 expressions for the sum of a constant and literal. */
10236 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10237 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10238 code == MINUS_EXPR);
10240 /* With undefined overflow we can only associate constants
10241 with one variable. */
10242 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10243 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10249 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10250 tmp0 = TREE_OPERAND (tmp0, 0);
10251 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10252 tmp1 = TREE_OPERAND (tmp1, 0);
10253 /* The only case we can still associate with two variables
10254 is if they are the same, modulo negation. */
10255 if (!operand_equal_p (tmp0, tmp1, 0))
10259 /* Only do something if we found more than two objects. Otherwise,
10260 nothing has changed and we risk infinite recursion. */
10262 && (2 < ((var0 != 0) + (var1 != 0)
10263 + (con0 != 0) + (con1 != 0)
10264 + (lit0 != 0) + (lit1 != 0)
10265 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10267 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10268 if (code == MINUS_EXPR)
10271 var0 = associate_trees (var0, var1, code, type);
10272 con0 = associate_trees (con0, con1, code, type);
10273 lit0 = associate_trees (lit0, lit1, code, type);
10274 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10276 /* Preserve the MINUS_EXPR if the negative part of the literal is
10277 greater than the positive part. Otherwise, the multiplicative
10278 folding code (i.e extract_muldiv) may be fooled in case
10279 unsigned constants are subtracted, like in the following
10280 example: ((X*2 + 4) - 8U)/2. */
10281 if (minus_lit0 && lit0)
10283 if (TREE_CODE (lit0) == INTEGER_CST
10284 && TREE_CODE (minus_lit0) == INTEGER_CST
10285 && tree_int_cst_lt (lit0, minus_lit0))
10287 minus_lit0 = associate_trees (minus_lit0, lit0,
10293 lit0 = associate_trees (lit0, minus_lit0,
10301 return fold_convert (type,
10302 associate_trees (var0, minus_lit0,
10303 MINUS_EXPR, type));
10306 con0 = associate_trees (con0, minus_lit0,
10308 return fold_convert (type,
10309 associate_trees (var0, con0,
10314 con0 = associate_trees (con0, lit0, code, type);
10315 return fold_convert (type, associate_trees (var0, con0,
10323 /* Pointer simplifications for subtraction, simple reassociations. */
10324 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10326 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10327 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10328 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10330 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10331 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10332 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10333 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10334 return fold_build2 (PLUS_EXPR, type,
10335 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10336 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10338 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10339 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10341 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10342 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10343 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10345 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10348 /* A - (-B) -> A + B */
10349 if (TREE_CODE (arg1) == NEGATE_EXPR)
10350 return fold_build2 (PLUS_EXPR, type, op0,
10351 fold_convert (type, TREE_OPERAND (arg1, 0)));
10352 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10353 if (TREE_CODE (arg0) == NEGATE_EXPR
10354 && (FLOAT_TYPE_P (type)
10355 || INTEGRAL_TYPE_P (type))
10356 && negate_expr_p (arg1)
10357 && reorder_operands_p (arg0, arg1))
10358 return fold_build2 (MINUS_EXPR, type,
10359 fold_convert (type, negate_expr (arg1)),
10360 fold_convert (type, TREE_OPERAND (arg0, 0)));
10361 /* Convert -A - 1 to ~A. */
10362 if (INTEGRAL_TYPE_P (type)
10363 && TREE_CODE (arg0) == NEGATE_EXPR
10364 && integer_onep (arg1)
10365 && !TYPE_OVERFLOW_TRAPS (type))
10366 return fold_build1 (BIT_NOT_EXPR, type,
10367 fold_convert (type, TREE_OPERAND (arg0, 0)));
10369 /* Convert -1 - A to ~A. */
10370 if (INTEGRAL_TYPE_P (type)
10371 && integer_all_onesp (arg0))
10372 return fold_build1 (BIT_NOT_EXPR, type, op1);
10375 /* X - (X / CST) * CST is X % CST. */
10376 if (INTEGRAL_TYPE_P (type)
10377 && TREE_CODE (arg1) == MULT_EXPR
10378 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10379 && operand_equal_p (arg0,
10380 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10381 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10382 TREE_OPERAND (arg1, 1), 0))
10383 return fold_convert (type,
10384 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10385 arg0, TREE_OPERAND (arg1, 1)));
10387 if (! FLOAT_TYPE_P (type))
10389 if (integer_zerop (arg0))
10390 return negate_expr (fold_convert (type, arg1));
10391 if (integer_zerop (arg1))
10392 return non_lvalue (fold_convert (type, arg0));
10394 /* Fold A - (A & B) into ~B & A. */
10395 if (!TREE_SIDE_EFFECTS (arg0)
10396 && TREE_CODE (arg1) == BIT_AND_EXPR)
10398 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10400 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10401 return fold_build2 (BIT_AND_EXPR, type,
10402 fold_build1 (BIT_NOT_EXPR, type, arg10),
10403 fold_convert (type, arg0));
10405 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10407 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10408 return fold_build2 (BIT_AND_EXPR, type,
10409 fold_build1 (BIT_NOT_EXPR, type, arg11),
10410 fold_convert (type, arg0));
10414 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10415 any power of 2 minus 1. */
10416 if (TREE_CODE (arg0) == BIT_AND_EXPR
10417 && TREE_CODE (arg1) == BIT_AND_EXPR
10418 && operand_equal_p (TREE_OPERAND (arg0, 0),
10419 TREE_OPERAND (arg1, 0), 0))
10421 tree mask0 = TREE_OPERAND (arg0, 1);
10422 tree mask1 = TREE_OPERAND (arg1, 1);
10423 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10425 if (operand_equal_p (tem, mask1, 0))
10427 tem = fold_build2 (BIT_XOR_EXPR, type,
10428 TREE_OPERAND (arg0, 0), mask1);
10429 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10434 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10435 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10436 return non_lvalue (fold_convert (type, arg0));
10438 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10439 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10440 (-ARG1 + ARG0) reduces to -ARG1. */
10441 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10442 return negate_expr (fold_convert (type, arg1));
10444 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10445 __complex__ ( x, -y ). This is not the same for SNaNs or if
10446 signed zeros are involved. */
10447 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10449 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10451 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10452 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10453 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10454 bool arg0rz = false, arg0iz = false;
10455 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10456 || (arg0i && (arg0iz = real_zerop (arg0i))))
10458 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10459 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10460 if (arg0rz && arg1i && real_zerop (arg1i))
10462 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10464 : build1 (REALPART_EXPR, rtype, arg1));
10465 tree ip = arg0i ? arg0i
10466 : build1 (IMAGPART_EXPR, rtype, arg0);
10467 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10469 else if (arg0iz && arg1r && real_zerop (arg1r))
10471 tree rp = arg0r ? arg0r
10472 : build1 (REALPART_EXPR, rtype, arg0);
10473 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10475 : build1 (IMAGPART_EXPR, rtype, arg1));
10476 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10481 /* Fold &x - &x. This can happen from &x.foo - &x.
10482 This is unsafe for certain floats even in non-IEEE formats.
10483 In IEEE, it is unsafe because it does wrong for NaNs.
10484 Also note that operand_equal_p is always false if an operand
10487 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10488 && operand_equal_p (arg0, arg1, 0))
10489 return fold_convert (type, integer_zero_node);
10491 /* A - B -> A + (-B) if B is easily negatable. */
10492 if (negate_expr_p (arg1)
10493 && ((FLOAT_TYPE_P (type)
10494 /* Avoid this transformation if B is a positive REAL_CST. */
10495 && (TREE_CODE (arg1) != REAL_CST
10496 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10497 || INTEGRAL_TYPE_P (type)))
10498 return fold_build2 (PLUS_EXPR, type,
10499 fold_convert (type, arg0),
10500 fold_convert (type, negate_expr (arg1)));
10502 /* Try folding difference of addresses. */
10504 HOST_WIDE_INT diff;
10506 if ((TREE_CODE (arg0) == ADDR_EXPR
10507 || TREE_CODE (arg1) == ADDR_EXPR)
10508 && ptr_difference_const (arg0, arg1, &diff))
10509 return build_int_cst_type (type, diff);
10512 /* Fold &a[i] - &a[j] to i-j. */
10513 if (TREE_CODE (arg0) == ADDR_EXPR
10514 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10515 && TREE_CODE (arg1) == ADDR_EXPR
10516 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10518 tree aref0 = TREE_OPERAND (arg0, 0);
10519 tree aref1 = TREE_OPERAND (arg1, 0);
10520 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10521 TREE_OPERAND (aref1, 0), 0))
10523 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10524 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10525 tree esz = array_ref_element_size (aref0);
10526 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10527 return fold_build2 (MULT_EXPR, type, diff,
10528 fold_convert (type, esz));
10533 if (flag_unsafe_math_optimizations
10534 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10535 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10536 && (tem = distribute_real_division (code, type, arg0, arg1)))
10539 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10540 same or one. Make sure type is not saturating.
10541 fold_plusminus_mult_expr will re-associate. */
10542 if ((TREE_CODE (arg0) == MULT_EXPR
10543 || TREE_CODE (arg1) == MULT_EXPR)
10544 && !TYPE_SATURATING (type)
10545 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10547 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10555 /* (-A) * (-B) -> A * B */
10556 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10557 return fold_build2 (MULT_EXPR, type,
10558 fold_convert (type, TREE_OPERAND (arg0, 0)),
10559 fold_convert (type, negate_expr (arg1)));
10560 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10561 return fold_build2 (MULT_EXPR, type,
10562 fold_convert (type, negate_expr (arg0)),
10563 fold_convert (type, TREE_OPERAND (arg1, 0)));
10565 if (! FLOAT_TYPE_P (type))
10567 if (integer_zerop (arg1))
10568 return omit_one_operand (type, arg1, arg0);
10569 if (integer_onep (arg1))
10570 return non_lvalue (fold_convert (type, arg0));
10571 /* Transform x * -1 into -x. Make sure to do the negation
10572 on the original operand with conversions not stripped
10573 because we can only strip non-sign-changing conversions. */
10574 if (integer_all_onesp (arg1))
10575 return fold_convert (type, negate_expr (op0));
10576 /* Transform x * -C into -x * C if x is easily negatable. */
10577 if (TREE_CODE (arg1) == INTEGER_CST
10578 && tree_int_cst_sgn (arg1) == -1
10579 && negate_expr_p (arg0)
10580 && (tem = negate_expr (arg1)) != arg1
10581 && !TREE_OVERFLOW (tem))
10582 return fold_build2 (MULT_EXPR, type,
10583 fold_convert (type, negate_expr (arg0)), tem);
10585 /* (a * (1 << b)) is (a << b) */
10586 if (TREE_CODE (arg1) == LSHIFT_EXPR
10587 && integer_onep (TREE_OPERAND (arg1, 0)))
10588 return fold_build2 (LSHIFT_EXPR, type, op0,
10589 TREE_OPERAND (arg1, 1));
10590 if (TREE_CODE (arg0) == LSHIFT_EXPR
10591 && integer_onep (TREE_OPERAND (arg0, 0)))
10592 return fold_build2 (LSHIFT_EXPR, type, op1,
10593 TREE_OPERAND (arg0, 1));
10595 /* (A + A) * C -> A * 2 * C */
10596 if (TREE_CODE (arg0) == PLUS_EXPR
10597 && TREE_CODE (arg1) == INTEGER_CST
10598 && operand_equal_p (TREE_OPERAND (arg0, 0),
10599 TREE_OPERAND (arg0, 1), 0))
10600 return fold_build2 (MULT_EXPR, type,
10601 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10602 TREE_OPERAND (arg0, 1)),
10603 fold_build2 (MULT_EXPR, type,
10604 build_int_cst (type, 2) , arg1));
10606 strict_overflow_p = false;
10607 if (TREE_CODE (arg1) == INTEGER_CST
10608 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10609 &strict_overflow_p)))
10611 if (strict_overflow_p)
10612 fold_overflow_warning (("assuming signed overflow does not "
10613 "occur when simplifying "
10615 WARN_STRICT_OVERFLOW_MISC);
10616 return fold_convert (type, tem);
10619 /* Optimize z * conj(z) for integer complex numbers. */
10620 if (TREE_CODE (arg0) == CONJ_EXPR
10621 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10622 return fold_mult_zconjz (type, arg1);
10623 if (TREE_CODE (arg1) == CONJ_EXPR
10624 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10625 return fold_mult_zconjz (type, arg0);
10629 /* Maybe fold x * 0 to 0. The expressions aren't the same
10630 when x is NaN, since x * 0 is also NaN. Nor are they the
10631 same in modes with signed zeros, since multiplying a
10632 negative value by 0 gives -0, not +0. */
10633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10634 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10635 && real_zerop (arg1))
10636 return omit_one_operand (type, arg1, arg0);
10637 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10638 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10639 && real_onep (arg1))
10640 return non_lvalue (fold_convert (type, arg0));
10642 /* Transform x * -1.0 into -x. */
10643 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10644 && real_minus_onep (arg1))
10645 return fold_convert (type, negate_expr (arg0));
10647 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10648 the result for floating point types due to rounding so it is applied
10649 only if -fassociative-math was specify. */
10650 if (flag_associative_math
10651 && TREE_CODE (arg0) == RDIV_EXPR
10652 && TREE_CODE (arg1) == REAL_CST
10653 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10655 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10658 return fold_build2 (RDIV_EXPR, type, tem,
10659 TREE_OPERAND (arg0, 1));
10662 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10663 if (operand_equal_p (arg0, arg1, 0))
10665 tree tem = fold_strip_sign_ops (arg0);
10666 if (tem != NULL_TREE)
10668 tem = fold_convert (type, tem);
10669 return fold_build2 (MULT_EXPR, type, tem, tem);
10673 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10674 This is not the same for NaNs or if signed zeros are
10676 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10677 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10678 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10679 && TREE_CODE (arg1) == COMPLEX_CST
10680 && real_zerop (TREE_REALPART (arg1)))
10682 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10683 if (real_onep (TREE_IMAGPART (arg1)))
10684 return fold_build2 (COMPLEX_EXPR, type,
10685 negate_expr (fold_build1 (IMAGPART_EXPR,
10687 fold_build1 (REALPART_EXPR, rtype, arg0));
10688 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10689 return fold_build2 (COMPLEX_EXPR, type,
10690 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10691 negate_expr (fold_build1 (REALPART_EXPR,
10695 /* Optimize z * conj(z) for floating point complex numbers.
10696 Guarded by flag_unsafe_math_optimizations as non-finite
10697 imaginary components don't produce scalar results. */
10698 if (flag_unsafe_math_optimizations
10699 && TREE_CODE (arg0) == CONJ_EXPR
10700 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10701 return fold_mult_zconjz (type, arg1);
10702 if (flag_unsafe_math_optimizations
10703 && TREE_CODE (arg1) == CONJ_EXPR
10704 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10705 return fold_mult_zconjz (type, arg0);
10707 if (flag_unsafe_math_optimizations)
10709 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10710 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10712 /* Optimizations of root(...)*root(...). */
10713 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10716 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10717 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10719 /* Optimize sqrt(x)*sqrt(x) as x. */
10720 if (BUILTIN_SQRT_P (fcode0)
10721 && operand_equal_p (arg00, arg10, 0)
10722 && ! HONOR_SNANS (TYPE_MODE (type)))
10725 /* Optimize root(x)*root(y) as root(x*y). */
10726 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10727 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10728 return build_call_expr (rootfn, 1, arg);
10731 /* Optimize expN(x)*expN(y) as expN(x+y). */
10732 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10734 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10735 tree arg = fold_build2 (PLUS_EXPR, type,
10736 CALL_EXPR_ARG (arg0, 0),
10737 CALL_EXPR_ARG (arg1, 0));
10738 return build_call_expr (expfn, 1, arg);
10741 /* Optimizations of pow(...)*pow(...). */
10742 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10743 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10744 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10746 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10747 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10748 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10749 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10751 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10752 if (operand_equal_p (arg01, arg11, 0))
10754 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10755 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10756 return build_call_expr (powfn, 2, arg, arg01);
10759 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10760 if (operand_equal_p (arg00, arg10, 0))
10762 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10763 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10764 return build_call_expr (powfn, 2, arg00, arg);
10768 /* Optimize tan(x)*cos(x) as sin(x). */
10769 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10770 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10771 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10772 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10773 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10774 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10775 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10776 CALL_EXPR_ARG (arg1, 0), 0))
10778 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10780 if (sinfn != NULL_TREE)
10781 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10784 /* Optimize x*pow(x,c) as pow(x,c+1). */
10785 if (fcode1 == BUILT_IN_POW
10786 || fcode1 == BUILT_IN_POWF
10787 || fcode1 == BUILT_IN_POWL)
10789 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10790 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10791 if (TREE_CODE (arg11) == REAL_CST
10792 && !TREE_OVERFLOW (arg11)
10793 && operand_equal_p (arg0, arg10, 0))
10795 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10799 c = TREE_REAL_CST (arg11);
10800 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10801 arg = build_real (type, c);
10802 return build_call_expr (powfn, 2, arg0, arg);
10806 /* Optimize pow(x,c)*x as pow(x,c+1). */
10807 if (fcode0 == BUILT_IN_POW
10808 || fcode0 == BUILT_IN_POWF
10809 || fcode0 == BUILT_IN_POWL)
10811 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10812 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10813 if (TREE_CODE (arg01) == REAL_CST
10814 && !TREE_OVERFLOW (arg01)
10815 && operand_equal_p (arg1, arg00, 0))
10817 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10821 c = TREE_REAL_CST (arg01);
10822 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10823 arg = build_real (type, c);
10824 return build_call_expr (powfn, 2, arg1, arg);
10828 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10829 if (optimize_function_for_speed_p (cfun)
10830 && operand_equal_p (arg0, arg1, 0))
10832 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10836 tree arg = build_real (type, dconst2);
10837 return build_call_expr (powfn, 2, arg0, arg);
10846 if (integer_all_onesp (arg1))
10847 return omit_one_operand (type, arg1, arg0);
10848 if (integer_zerop (arg1))
10849 return non_lvalue (fold_convert (type, arg0));
10850 if (operand_equal_p (arg0, arg1, 0))
10851 return non_lvalue (fold_convert (type, arg0));
10853 /* ~X | X is -1. */
10854 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10855 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10857 t1 = fold_convert (type, integer_zero_node);
10858 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10859 return omit_one_operand (type, t1, arg1);
10862 /* X | ~X is -1. */
10863 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10864 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10866 t1 = fold_convert (type, integer_zero_node);
10867 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10868 return omit_one_operand (type, t1, arg0);
10871 /* Canonicalize (X & C1) | C2. */
10872 if (TREE_CODE (arg0) == BIT_AND_EXPR
10873 && TREE_CODE (arg1) == INTEGER_CST
10874 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10876 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10877 int width = TYPE_PRECISION (type), w;
10878 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10879 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10880 hi2 = TREE_INT_CST_HIGH (arg1);
10881 lo2 = TREE_INT_CST_LOW (arg1);
10883 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10884 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10885 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10887 if (width > HOST_BITS_PER_WIDE_INT)
10889 mhi = (unsigned HOST_WIDE_INT) -1
10890 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10896 mlo = (unsigned HOST_WIDE_INT) -1
10897 >> (HOST_BITS_PER_WIDE_INT - width);
10900 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10901 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10902 return fold_build2 (BIT_IOR_EXPR, type,
10903 TREE_OPERAND (arg0, 0), arg1);
10905 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10906 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10907 mode which allows further optimizations. */
10914 for (w = BITS_PER_UNIT;
10915 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10918 unsigned HOST_WIDE_INT mask
10919 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10920 if (((lo1 | lo2) & mask) == mask
10921 && (lo1 & ~mask) == 0 && hi1 == 0)
10928 if (hi3 != hi1 || lo3 != lo1)
10929 return fold_build2 (BIT_IOR_EXPR, type,
10930 fold_build2 (BIT_AND_EXPR, type,
10931 TREE_OPERAND (arg0, 0),
10932 build_int_cst_wide (type,
10937 /* (X & Y) | Y is (X, Y). */
10938 if (TREE_CODE (arg0) == BIT_AND_EXPR
10939 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10940 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10941 /* (X & Y) | X is (Y, X). */
10942 if (TREE_CODE (arg0) == BIT_AND_EXPR
10943 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10944 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10945 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10946 /* X | (X & Y) is (Y, X). */
10947 if (TREE_CODE (arg1) == BIT_AND_EXPR
10948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10949 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10950 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10951 /* X | (Y & X) is (Y, X). */
10952 if (TREE_CODE (arg1) == BIT_AND_EXPR
10953 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10954 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10955 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10957 t1 = distribute_bit_expr (code, type, arg0, arg1);
10958 if (t1 != NULL_TREE)
10961 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10963 This results in more efficient code for machines without a NAND
10964 instruction. Combine will canonicalize to the first form
10965 which will allow use of NAND instructions provided by the
10966 backend if they exist. */
10967 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10968 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10970 return fold_build1 (BIT_NOT_EXPR, type,
10971 build2 (BIT_AND_EXPR, type,
10972 fold_convert (type,
10973 TREE_OPERAND (arg0, 0)),
10974 fold_convert (type,
10975 TREE_OPERAND (arg1, 0))));
10978 /* See if this can be simplified into a rotate first. If that
10979 is unsuccessful continue in the association code. */
10983 if (integer_zerop (arg1))
10984 return non_lvalue (fold_convert (type, arg0));
10985 if (integer_all_onesp (arg1))
10986 return fold_build1 (BIT_NOT_EXPR, type, op0);
10987 if (operand_equal_p (arg0, arg1, 0))
10988 return omit_one_operand (type, integer_zero_node, arg0);
10990 /* ~X ^ X is -1. */
10991 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10992 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10994 t1 = fold_convert (type, integer_zero_node);
10995 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10996 return omit_one_operand (type, t1, arg1);
10999 /* X ^ ~X is -1. */
11000 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11001 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11003 t1 = fold_convert (type, integer_zero_node);
11004 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11005 return omit_one_operand (type, t1, arg0);
11008 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11009 with a constant, and the two constants have no bits in common,
11010 we should treat this as a BIT_IOR_EXPR since this may produce more
11011 simplifications. */
11012 if (TREE_CODE (arg0) == BIT_AND_EXPR
11013 && TREE_CODE (arg1) == BIT_AND_EXPR
11014 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11015 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11016 && integer_zerop (const_binop (BIT_AND_EXPR,
11017 TREE_OPERAND (arg0, 1),
11018 TREE_OPERAND (arg1, 1), 0)))
11020 code = BIT_IOR_EXPR;
11024 /* (X | Y) ^ X -> Y & ~ X*/
11025 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11026 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11028 tree t2 = TREE_OPERAND (arg0, 1);
11029 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11031 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11032 fold_convert (type, t1));
11036 /* (Y | X) ^ X -> Y & ~ X*/
11037 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11038 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11040 tree t2 = TREE_OPERAND (arg0, 0);
11041 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11043 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11044 fold_convert (type, t1));
11048 /* X ^ (X | Y) -> Y & ~ X*/
11049 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11050 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11052 tree t2 = TREE_OPERAND (arg1, 1);
11053 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11055 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11056 fold_convert (type, t1));
11060 /* X ^ (Y | X) -> Y & ~ X*/
11061 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11062 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11064 tree t2 = TREE_OPERAND (arg1, 0);
11065 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11067 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11068 fold_convert (type, t1));
11072 /* Convert ~X ^ ~Y to X ^ Y. */
11073 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11074 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11075 return fold_build2 (code, type,
11076 fold_convert (type, TREE_OPERAND (arg0, 0)),
11077 fold_convert (type, TREE_OPERAND (arg1, 0)));
11079 /* Convert ~X ^ C to X ^ ~C. */
11080 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11081 && TREE_CODE (arg1) == INTEGER_CST)
11082 return fold_build2 (code, type,
11083 fold_convert (type, TREE_OPERAND (arg0, 0)),
11084 fold_build1 (BIT_NOT_EXPR, type, arg1));
11086 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11087 if (TREE_CODE (arg0) == BIT_AND_EXPR
11088 && integer_onep (TREE_OPERAND (arg0, 1))
11089 && integer_onep (arg1))
11090 return fold_build2 (EQ_EXPR, type, arg0,
11091 build_int_cst (TREE_TYPE (arg0), 0));
11093 /* Fold (X & Y) ^ Y as ~X & Y. */
11094 if (TREE_CODE (arg0) == BIT_AND_EXPR
11095 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11097 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11098 return fold_build2 (BIT_AND_EXPR, type,
11099 fold_build1 (BIT_NOT_EXPR, type, tem),
11100 fold_convert (type, arg1));
11102 /* Fold (X & Y) ^ X as ~Y & X. */
11103 if (TREE_CODE (arg0) == BIT_AND_EXPR
11104 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11105 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11107 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11108 return fold_build2 (BIT_AND_EXPR, type,
11109 fold_build1 (BIT_NOT_EXPR, type, tem),
11110 fold_convert (type, arg1));
11112 /* Fold X ^ (X & Y) as X & ~Y. */
11113 if (TREE_CODE (arg1) == BIT_AND_EXPR
11114 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11116 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11117 return fold_build2 (BIT_AND_EXPR, type,
11118 fold_convert (type, arg0),
11119 fold_build1 (BIT_NOT_EXPR, type, tem));
11121 /* Fold X ^ (Y & X) as ~Y & X. */
11122 if (TREE_CODE (arg1) == BIT_AND_EXPR
11123 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11124 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11126 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11127 return fold_build2 (BIT_AND_EXPR, type,
11128 fold_build1 (BIT_NOT_EXPR, type, tem),
11129 fold_convert (type, arg0));
11132 /* See if this can be simplified into a rotate first. If that
11133 is unsuccessful continue in the association code. */
11137 if (integer_all_onesp (arg1))
11138 return non_lvalue (fold_convert (type, arg0));
11139 if (integer_zerop (arg1))
11140 return omit_one_operand (type, arg1, arg0);
11141 if (operand_equal_p (arg0, arg1, 0))
11142 return non_lvalue (fold_convert (type, arg0));
11144 /* ~X & X is always zero. */
11145 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11146 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11147 return omit_one_operand (type, integer_zero_node, arg1);
11149 /* X & ~X is always zero. */
11150 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11151 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11152 return omit_one_operand (type, integer_zero_node, arg0);
11154 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11155 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11156 && TREE_CODE (arg1) == INTEGER_CST
11157 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11159 tree tmp1 = fold_convert (type, arg1);
11160 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11161 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11162 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11163 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11164 return fold_convert (type,
11165 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11168 /* (X | Y) & Y is (X, Y). */
11169 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11170 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11171 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11172 /* (X | Y) & X is (Y, X). */
11173 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11174 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11175 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11176 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11177 /* X & (X | Y) is (Y, X). */
11178 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11179 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11180 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11181 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11182 /* X & (Y | X) is (Y, X). */
11183 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11184 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11185 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11186 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11188 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11189 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11190 && integer_onep (TREE_OPERAND (arg0, 1))
11191 && integer_onep (arg1))
11193 tem = TREE_OPERAND (arg0, 0);
11194 return fold_build2 (EQ_EXPR, type,
11195 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11196 build_int_cst (TREE_TYPE (tem), 1)),
11197 build_int_cst (TREE_TYPE (tem), 0));
11199 /* Fold ~X & 1 as (X & 1) == 0. */
11200 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11201 && integer_onep (arg1))
11203 tem = TREE_OPERAND (arg0, 0);
11204 return fold_build2 (EQ_EXPR, type,
11205 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11206 build_int_cst (TREE_TYPE (tem), 1)),
11207 build_int_cst (TREE_TYPE (tem), 0));
11210 /* Fold (X ^ Y) & Y as ~X & Y. */
11211 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11212 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11214 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11215 return fold_build2 (BIT_AND_EXPR, type,
11216 fold_build1 (BIT_NOT_EXPR, type, tem),
11217 fold_convert (type, arg1));
11219 /* Fold (X ^ Y) & X as ~Y & X. */
11220 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11221 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11222 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11224 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11225 return fold_build2 (BIT_AND_EXPR, type,
11226 fold_build1 (BIT_NOT_EXPR, type, tem),
11227 fold_convert (type, arg1));
11229 /* Fold X & (X ^ Y) as X & ~Y. */
11230 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11231 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11233 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11234 return fold_build2 (BIT_AND_EXPR, type,
11235 fold_convert (type, arg0),
11236 fold_build1 (BIT_NOT_EXPR, type, tem));
11238 /* Fold X & (Y ^ X) as ~Y & X. */
11239 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11240 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11241 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11243 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11244 return fold_build2 (BIT_AND_EXPR, type,
11245 fold_build1 (BIT_NOT_EXPR, type, tem),
11246 fold_convert (type, arg0));
11249 t1 = distribute_bit_expr (code, type, arg0, arg1);
11250 if (t1 != NULL_TREE)
11252 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11253 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11254 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11257 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11259 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11260 && (~TREE_INT_CST_LOW (arg1)
11261 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11262 return fold_convert (type, TREE_OPERAND (arg0, 0));
11265 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11267 This results in more efficient code for machines without a NOR
11268 instruction. Combine will canonicalize to the first form
11269 which will allow use of NOR instructions provided by the
11270 backend if they exist. */
11271 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11272 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11274 return fold_build1 (BIT_NOT_EXPR, type,
11275 build2 (BIT_IOR_EXPR, type,
11276 fold_convert (type,
11277 TREE_OPERAND (arg0, 0)),
11278 fold_convert (type,
11279 TREE_OPERAND (arg1, 0))));
11282 /* If arg0 is derived from the address of an object or function, we may
11283 be able to fold this expression using the object or function's
11285 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11287 unsigned HOST_WIDE_INT modulus, residue;
11288 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11290 modulus = get_pointer_modulus_and_residue (arg0, &residue);
11292 /* This works because modulus is a power of 2. If this weren't the
11293 case, we'd have to replace it by its greatest power-of-2
11294 divisor: modulus & -modulus. */
11296 return build_int_cst (type, residue & low);
11299 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11300 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11301 if the new mask might be further optimized. */
11302 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11303 || TREE_CODE (arg0) == RSHIFT_EXPR)
11304 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11305 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11306 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11307 < TYPE_PRECISION (TREE_TYPE (arg0))
11308 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11309 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11311 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11312 unsigned HOST_WIDE_INT mask
11313 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11314 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11315 tree shift_type = TREE_TYPE (arg0);
11317 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11318 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11319 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11320 && TYPE_PRECISION (TREE_TYPE (arg0))
11321 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11323 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11324 tree arg00 = TREE_OPERAND (arg0, 0);
11325 /* See if more bits can be proven as zero because of
11327 if (TREE_CODE (arg00) == NOP_EXPR
11328 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11330 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11331 if (TYPE_PRECISION (inner_type)
11332 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11333 && TYPE_PRECISION (inner_type) < prec)
11335 prec = TYPE_PRECISION (inner_type);
11336 /* See if we can shorten the right shift. */
11338 shift_type = inner_type;
11341 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11342 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11343 zerobits <<= prec - shiftc;
11344 /* For arithmetic shift if sign bit could be set, zerobits
11345 can contain actually sign bits, so no transformation is
11346 possible, unless MASK masks them all away. In that
11347 case the shift needs to be converted into logical shift. */
11348 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11349 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11351 if ((mask & zerobits) == 0)
11352 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11358 /* ((X << 16) & 0xff00) is (X, 0). */
11359 if ((mask & zerobits) == mask)
11360 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11362 newmask = mask | zerobits;
11363 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11367 /* Only do the transformation if NEWMASK is some integer
11369 for (prec = BITS_PER_UNIT;
11370 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11371 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11373 if (prec < HOST_BITS_PER_WIDE_INT
11374 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11378 if (shift_type != TREE_TYPE (arg0))
11380 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11381 fold_convert (shift_type,
11382 TREE_OPERAND (arg0, 0)),
11383 TREE_OPERAND (arg0, 1));
11384 tem = fold_convert (type, tem);
11388 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11389 if (!tree_int_cst_equal (newmaskt, arg1))
11390 return fold_build2 (BIT_AND_EXPR, type, tem, newmaskt);
11398 /* Don't touch a floating-point divide by zero unless the mode
11399 of the constant can represent infinity. */
11400 if (TREE_CODE (arg1) == REAL_CST
11401 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11402 && real_zerop (arg1))
11405 /* Optimize A / A to 1.0 if we don't care about
11406 NaNs or Infinities. Skip the transformation
11407 for non-real operands. */
11408 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11409 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11410 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11411 && operand_equal_p (arg0, arg1, 0))
11413 tree r = build_real (TREE_TYPE (arg0), dconst1);
11415 return omit_two_operands (type, r, arg0, arg1);
11418 /* The complex version of the above A / A optimization. */
11419 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11420 && operand_equal_p (arg0, arg1, 0))
11422 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11423 if (! HONOR_NANS (TYPE_MODE (elem_type))
11424 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11426 tree r = build_real (elem_type, dconst1);
11427 /* omit_two_operands will call fold_convert for us. */
11428 return omit_two_operands (type, r, arg0, arg1);
11432 /* (-A) / (-B) -> A / B */
11433 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11434 return fold_build2 (RDIV_EXPR, type,
11435 TREE_OPERAND (arg0, 0),
11436 negate_expr (arg1));
11437 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11438 return fold_build2 (RDIV_EXPR, type,
11439 negate_expr (arg0),
11440 TREE_OPERAND (arg1, 0));
11442 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11443 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11444 && real_onep (arg1))
11445 return non_lvalue (fold_convert (type, arg0));
11447 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11448 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11449 && real_minus_onep (arg1))
11450 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11452 /* If ARG1 is a constant, we can convert this to a multiply by the
11453 reciprocal. This does not have the same rounding properties,
11454 so only do this if -freciprocal-math. We can actually
11455 always safely do it if ARG1 is a power of two, but it's hard to
11456 tell if it is or not in a portable manner. */
11457 if (TREE_CODE (arg1) == REAL_CST)
11459 if (flag_reciprocal_math
11460 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11462 return fold_build2 (MULT_EXPR, type, arg0, tem);
11463 /* Find the reciprocal if optimizing and the result is exact. */
11467 r = TREE_REAL_CST (arg1);
11468 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11470 tem = build_real (type, r);
11471 return fold_build2 (MULT_EXPR, type,
11472 fold_convert (type, arg0), tem);
11476 /* Convert A/B/C to A/(B*C). */
11477 if (flag_reciprocal_math
11478 && TREE_CODE (arg0) == RDIV_EXPR)
11479 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11480 fold_build2 (MULT_EXPR, type,
11481 TREE_OPERAND (arg0, 1), arg1));
11483 /* Convert A/(B/C) to (A/B)*C. */
11484 if (flag_reciprocal_math
11485 && TREE_CODE (arg1) == RDIV_EXPR)
11486 return fold_build2 (MULT_EXPR, type,
11487 fold_build2 (RDIV_EXPR, type, arg0,
11488 TREE_OPERAND (arg1, 0)),
11489 TREE_OPERAND (arg1, 1));
11491 /* Convert C1/(X*C2) into (C1/C2)/X. */
11492 if (flag_reciprocal_math
11493 && TREE_CODE (arg1) == MULT_EXPR
11494 && TREE_CODE (arg0) == REAL_CST
11495 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11497 tree tem = const_binop (RDIV_EXPR, arg0,
11498 TREE_OPERAND (arg1, 1), 0);
11500 return fold_build2 (RDIV_EXPR, type, tem,
11501 TREE_OPERAND (arg1, 0));
11504 if (flag_unsafe_math_optimizations)
11506 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11507 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11509 /* Optimize sin(x)/cos(x) as tan(x). */
11510 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11511 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11512 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11513 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11514 CALL_EXPR_ARG (arg1, 0), 0))
11516 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11518 if (tanfn != NULL_TREE)
11519 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11522 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11523 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11524 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11525 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11526 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11527 CALL_EXPR_ARG (arg1, 0), 0))
11529 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11531 if (tanfn != NULL_TREE)
11533 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11534 return fold_build2 (RDIV_EXPR, type,
11535 build_real (type, dconst1), tmp);
11539 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11540 NaNs or Infinities. */
11541 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11542 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11543 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11545 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11546 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11548 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11549 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11550 && operand_equal_p (arg00, arg01, 0))
11552 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11554 if (cosfn != NULL_TREE)
11555 return build_call_expr (cosfn, 1, arg00);
11559 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11560 NaNs or Infinities. */
11561 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11562 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11563 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11565 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11566 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11568 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11569 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11570 && operand_equal_p (arg00, arg01, 0))
11572 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11574 if (cosfn != NULL_TREE)
11576 tree tmp = build_call_expr (cosfn, 1, arg00);
11577 return fold_build2 (RDIV_EXPR, type,
11578 build_real (type, dconst1),
11584 /* Optimize pow(x,c)/x as pow(x,c-1). */
11585 if (fcode0 == BUILT_IN_POW
11586 || fcode0 == BUILT_IN_POWF
11587 || fcode0 == BUILT_IN_POWL)
11589 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11590 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11591 if (TREE_CODE (arg01) == REAL_CST
11592 && !TREE_OVERFLOW (arg01)
11593 && operand_equal_p (arg1, arg00, 0))
11595 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11599 c = TREE_REAL_CST (arg01);
11600 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11601 arg = build_real (type, c);
11602 return build_call_expr (powfn, 2, arg1, arg);
11606 /* Optimize a/root(b/c) into a*root(c/b). */
11607 if (BUILTIN_ROOT_P (fcode1))
11609 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11611 if (TREE_CODE (rootarg) == RDIV_EXPR)
11613 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11614 tree b = TREE_OPERAND (rootarg, 0);
11615 tree c = TREE_OPERAND (rootarg, 1);
11617 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11619 tmp = build_call_expr (rootfn, 1, tmp);
11620 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11624 /* Optimize x/expN(y) into x*expN(-y). */
11625 if (BUILTIN_EXPONENT_P (fcode1))
11627 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11628 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11629 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11630 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11633 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11634 if (fcode1 == BUILT_IN_POW
11635 || fcode1 == BUILT_IN_POWF
11636 || fcode1 == BUILT_IN_POWL)
11638 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11639 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11640 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11641 tree neg11 = fold_convert (type, negate_expr (arg11));
11642 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11643 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11648 case TRUNC_DIV_EXPR:
11649 case FLOOR_DIV_EXPR:
11650 /* Simplify A / (B << N) where A and B are positive and B is
11651 a power of 2, to A >> (N + log2(B)). */
11652 strict_overflow_p = false;
11653 if (TREE_CODE (arg1) == LSHIFT_EXPR
11654 && (TYPE_UNSIGNED (type)
11655 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11657 tree sval = TREE_OPERAND (arg1, 0);
11658 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11660 tree sh_cnt = TREE_OPERAND (arg1, 1);
11661 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11663 if (strict_overflow_p)
11664 fold_overflow_warning (("assuming signed overflow does not "
11665 "occur when simplifying A / (B << N)"),
11666 WARN_STRICT_OVERFLOW_MISC);
11668 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11669 sh_cnt, build_int_cst (NULL_TREE, pow2));
11670 return fold_build2 (RSHIFT_EXPR, type,
11671 fold_convert (type, arg0), sh_cnt);
11675 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11676 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11677 if (INTEGRAL_TYPE_P (type)
11678 && TYPE_UNSIGNED (type)
11679 && code == FLOOR_DIV_EXPR)
11680 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11684 case ROUND_DIV_EXPR:
11685 case CEIL_DIV_EXPR:
11686 case EXACT_DIV_EXPR:
11687 if (integer_onep (arg1))
11688 return non_lvalue (fold_convert (type, arg0));
11689 if (integer_zerop (arg1))
11691 /* X / -1 is -X. */
11692 if (!TYPE_UNSIGNED (type)
11693 && TREE_CODE (arg1) == INTEGER_CST
11694 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11695 && TREE_INT_CST_HIGH (arg1) == -1)
11696 return fold_convert (type, negate_expr (arg0));
11698 /* Convert -A / -B to A / B when the type is signed and overflow is
11700 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11701 && TREE_CODE (arg0) == NEGATE_EXPR
11702 && negate_expr_p (arg1))
11704 if (INTEGRAL_TYPE_P (type))
11705 fold_overflow_warning (("assuming signed overflow does not occur "
11706 "when distributing negation across "
11708 WARN_STRICT_OVERFLOW_MISC);
11709 return fold_build2 (code, type,
11710 fold_convert (type, TREE_OPERAND (arg0, 0)),
11711 fold_convert (type, negate_expr (arg1)));
11713 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11714 && TREE_CODE (arg1) == NEGATE_EXPR
11715 && negate_expr_p (arg0))
11717 if (INTEGRAL_TYPE_P (type))
11718 fold_overflow_warning (("assuming signed overflow does not occur "
11719 "when distributing negation across "
11721 WARN_STRICT_OVERFLOW_MISC);
11722 return fold_build2 (code, type,
11723 fold_convert (type, negate_expr (arg0)),
11724 fold_convert (type, TREE_OPERAND (arg1, 0)));
11727 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11728 operation, EXACT_DIV_EXPR.
11730 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11731 At one time others generated faster code, it's not clear if they do
11732 after the last round to changes to the DIV code in expmed.c. */
11733 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11734 && multiple_of_p (type, arg0, arg1))
11735 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11737 strict_overflow_p = false;
11738 if (TREE_CODE (arg1) == INTEGER_CST
11739 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11740 &strict_overflow_p)))
11742 if (strict_overflow_p)
11743 fold_overflow_warning (("assuming signed overflow does not occur "
11744 "when simplifying division"),
11745 WARN_STRICT_OVERFLOW_MISC);
11746 return fold_convert (type, tem);
11751 case CEIL_MOD_EXPR:
11752 case FLOOR_MOD_EXPR:
11753 case ROUND_MOD_EXPR:
11754 case TRUNC_MOD_EXPR:
11755 /* X % 1 is always zero, but be sure to preserve any side
11757 if (integer_onep (arg1))
11758 return omit_one_operand (type, integer_zero_node, arg0);
11760 /* X % 0, return X % 0 unchanged so that we can get the
11761 proper warnings and errors. */
11762 if (integer_zerop (arg1))
11765 /* 0 % X is always zero, but be sure to preserve any side
11766 effects in X. Place this after checking for X == 0. */
11767 if (integer_zerop (arg0))
11768 return omit_one_operand (type, integer_zero_node, arg1);
11770 /* X % -1 is zero. */
11771 if (!TYPE_UNSIGNED (type)
11772 && TREE_CODE (arg1) == INTEGER_CST
11773 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11774 && TREE_INT_CST_HIGH (arg1) == -1)
11775 return omit_one_operand (type, integer_zero_node, arg0);
11777 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11778 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11779 strict_overflow_p = false;
11780 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11781 && (TYPE_UNSIGNED (type)
11782 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11785 /* Also optimize A % (C << N) where C is a power of 2,
11786 to A & ((C << N) - 1). */
11787 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11788 c = TREE_OPERAND (arg1, 0);
11790 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11792 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11793 build_int_cst (TREE_TYPE (arg1), 1));
11794 if (strict_overflow_p)
11795 fold_overflow_warning (("assuming signed overflow does not "
11796 "occur when simplifying "
11797 "X % (power of two)"),
11798 WARN_STRICT_OVERFLOW_MISC);
11799 return fold_build2 (BIT_AND_EXPR, type,
11800 fold_convert (type, arg0),
11801 fold_convert (type, mask));
11805 /* X % -C is the same as X % C. */
11806 if (code == TRUNC_MOD_EXPR
11807 && !TYPE_UNSIGNED (type)
11808 && TREE_CODE (arg1) == INTEGER_CST
11809 && !TREE_OVERFLOW (arg1)
11810 && TREE_INT_CST_HIGH (arg1) < 0
11811 && !TYPE_OVERFLOW_TRAPS (type)
11812 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11813 && !sign_bit_p (arg1, arg1))
11814 return fold_build2 (code, type, fold_convert (type, arg0),
11815 fold_convert (type, negate_expr (arg1)));
11817 /* X % -Y is the same as X % Y. */
11818 if (code == TRUNC_MOD_EXPR
11819 && !TYPE_UNSIGNED (type)
11820 && TREE_CODE (arg1) == NEGATE_EXPR
11821 && !TYPE_OVERFLOW_TRAPS (type))
11822 return fold_build2 (code, type, fold_convert (type, arg0),
11823 fold_convert (type, TREE_OPERAND (arg1, 0)));
11825 if (TREE_CODE (arg1) == INTEGER_CST
11826 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11827 &strict_overflow_p)))
11829 if (strict_overflow_p)
11830 fold_overflow_warning (("assuming signed overflow does not occur "
11831 "when simplifying modulus"),
11832 WARN_STRICT_OVERFLOW_MISC);
11833 return fold_convert (type, tem);
11840 if (integer_all_onesp (arg0))
11841 return omit_one_operand (type, arg0, arg1);
11845 /* Optimize -1 >> x for arithmetic right shifts. */
11846 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11847 && tree_expr_nonnegative_p (arg1))
11848 return omit_one_operand (type, arg0, arg1);
11849 /* ... fall through ... */
11853 if (integer_zerop (arg1))
11854 return non_lvalue (fold_convert (type, arg0));
11855 if (integer_zerop (arg0))
11856 return omit_one_operand (type, arg0, arg1);
11858 /* Since negative shift count is not well-defined,
11859 don't try to compute it in the compiler. */
11860 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11863 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11864 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11865 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11866 && host_integerp (TREE_OPERAND (arg0, 1), false)
11867 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11869 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11870 + TREE_INT_CST_LOW (arg1));
11872 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11873 being well defined. */
11874 if (low >= TYPE_PRECISION (type))
11876 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11877 low = low % TYPE_PRECISION (type);
11878 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11879 return omit_one_operand (type, build_int_cst (type, 0),
11880 TREE_OPERAND (arg0, 0));
11882 low = TYPE_PRECISION (type) - 1;
11885 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11886 build_int_cst (type, low));
11889 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11890 into x & ((unsigned)-1 >> c) for unsigned types. */
11891 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11892 || (TYPE_UNSIGNED (type)
11893 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11894 && host_integerp (arg1, false)
11895 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11896 && host_integerp (TREE_OPERAND (arg0, 1), false)
11897 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11899 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11900 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11906 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11908 lshift = build_int_cst (type, -1);
11909 lshift = int_const_binop (code, lshift, arg1, 0);
11911 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11915 /* Rewrite an LROTATE_EXPR by a constant into an
11916 RROTATE_EXPR by a new constant. */
11917 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11919 tree tem = build_int_cst (TREE_TYPE (arg1),
11920 TYPE_PRECISION (type));
11921 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11922 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11925 /* If we have a rotate of a bit operation with the rotate count and
11926 the second operand of the bit operation both constant,
11927 permute the two operations. */
11928 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11929 && (TREE_CODE (arg0) == BIT_AND_EXPR
11930 || TREE_CODE (arg0) == BIT_IOR_EXPR
11931 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11933 return fold_build2 (TREE_CODE (arg0), type,
11934 fold_build2 (code, type,
11935 TREE_OPERAND (arg0, 0), arg1),
11936 fold_build2 (code, type,
11937 TREE_OPERAND (arg0, 1), arg1));
11939 /* Two consecutive rotates adding up to the precision of the
11940 type can be ignored. */
11941 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11942 && TREE_CODE (arg0) == RROTATE_EXPR
11943 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11944 && TREE_INT_CST_HIGH (arg1) == 0
11945 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11946 && ((TREE_INT_CST_LOW (arg1)
11947 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11948 == (unsigned int) TYPE_PRECISION (type)))
11949 return TREE_OPERAND (arg0, 0);
11951 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11952 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11953 if the latter can be further optimized. */
11954 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11955 && TREE_CODE (arg0) == BIT_AND_EXPR
11956 && TREE_CODE (arg1) == INTEGER_CST
11957 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11959 tree mask = fold_build2 (code, type,
11960 fold_convert (type, TREE_OPERAND (arg0, 1)),
11962 tree shift = fold_build2 (code, type,
11963 fold_convert (type, TREE_OPERAND (arg0, 0)),
11965 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11973 if (operand_equal_p (arg0, arg1, 0))
11974 return omit_one_operand (type, arg0, arg1);
11975 if (INTEGRAL_TYPE_P (type)
11976 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11977 return omit_one_operand (type, arg1, arg0);
11978 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11984 if (operand_equal_p (arg0, arg1, 0))
11985 return omit_one_operand (type, arg0, arg1);
11986 if (INTEGRAL_TYPE_P (type)
11987 && TYPE_MAX_VALUE (type)
11988 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11989 return omit_one_operand (type, arg1, arg0);
11990 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11995 case TRUTH_ANDIF_EXPR:
11996 /* Note that the operands of this must be ints
11997 and their values must be 0 or 1.
11998 ("true" is a fixed value perhaps depending on the language.) */
11999 /* If first arg is constant zero, return it. */
12000 if (integer_zerop (arg0))
12001 return fold_convert (type, arg0);
12002 case TRUTH_AND_EXPR:
12003 /* If either arg is constant true, drop it. */
12004 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12005 return non_lvalue (fold_convert (type, arg1));
12006 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12007 /* Preserve sequence points. */
12008 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12009 return non_lvalue (fold_convert (type, arg0));
12010 /* If second arg is constant zero, result is zero, but first arg
12011 must be evaluated. */
12012 if (integer_zerop (arg1))
12013 return omit_one_operand (type, arg1, arg0);
12014 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12015 case will be handled here. */
12016 if (integer_zerop (arg0))
12017 return omit_one_operand (type, arg0, arg1);
12019 /* !X && X is always false. */
12020 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12021 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12022 return omit_one_operand (type, integer_zero_node, arg1);
12023 /* X && !X is always false. */
12024 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12025 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12026 return omit_one_operand (type, integer_zero_node, arg0);
12028 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12029 means A >= Y && A != MAX, but in this case we know that
12032 if (!TREE_SIDE_EFFECTS (arg0)
12033 && !TREE_SIDE_EFFECTS (arg1))
12035 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12036 if (tem && !operand_equal_p (tem, arg0, 0))
12037 return fold_build2 (code, type, tem, arg1);
12039 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12040 if (tem && !operand_equal_p (tem, arg1, 0))
12041 return fold_build2 (code, type, arg0, tem);
12045 /* We only do these simplifications if we are optimizing. */
12049 /* Check for things like (A || B) && (A || C). We can convert this
12050 to A || (B && C). Note that either operator can be any of the four
12051 truth and/or operations and the transformation will still be
12052 valid. Also note that we only care about order for the
12053 ANDIF and ORIF operators. If B contains side effects, this
12054 might change the truth-value of A. */
12055 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12056 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12057 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12058 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12059 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12060 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12062 tree a00 = TREE_OPERAND (arg0, 0);
12063 tree a01 = TREE_OPERAND (arg0, 1);
12064 tree a10 = TREE_OPERAND (arg1, 0);
12065 tree a11 = TREE_OPERAND (arg1, 1);
12066 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12067 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12068 && (code == TRUTH_AND_EXPR
12069 || code == TRUTH_OR_EXPR));
12071 if (operand_equal_p (a00, a10, 0))
12072 return fold_build2 (TREE_CODE (arg0), type, a00,
12073 fold_build2 (code, type, a01, a11));
12074 else if (commutative && operand_equal_p (a00, a11, 0))
12075 return fold_build2 (TREE_CODE (arg0), type, a00,
12076 fold_build2 (code, type, a01, a10));
12077 else if (commutative && operand_equal_p (a01, a10, 0))
12078 return fold_build2 (TREE_CODE (arg0), type, a01,
12079 fold_build2 (code, type, a00, a11));
12081 /* This case if tricky because we must either have commutative
12082 operators or else A10 must not have side-effects. */
12084 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12085 && operand_equal_p (a01, a11, 0))
12086 return fold_build2 (TREE_CODE (arg0), type,
12087 fold_build2 (code, type, a00, a10),
12091 /* See if we can build a range comparison. */
12092 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12095 /* Check for the possibility of merging component references. If our
12096 lhs is another similar operation, try to merge its rhs with our
12097 rhs. Then try to merge our lhs and rhs. */
12098 if (TREE_CODE (arg0) == code
12099 && 0 != (tem = fold_truthop (code, type,
12100 TREE_OPERAND (arg0, 1), arg1)))
12101 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12103 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12108 case TRUTH_ORIF_EXPR:
12109 /* Note that the operands of this must be ints
12110 and their values must be 0 or true.
12111 ("true" is a fixed value perhaps depending on the language.) */
12112 /* If first arg is constant true, return it. */
12113 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12114 return fold_convert (type, arg0);
12115 case TRUTH_OR_EXPR:
12116 /* If either arg is constant zero, drop it. */
12117 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12118 return non_lvalue (fold_convert (type, arg1));
12119 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12120 /* Preserve sequence points. */
12121 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12122 return non_lvalue (fold_convert (type, arg0));
12123 /* If second arg is constant true, result is true, but we must
12124 evaluate first arg. */
12125 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12126 return omit_one_operand (type, arg1, arg0);
12127 /* Likewise for first arg, but note this only occurs here for
12129 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12130 return omit_one_operand (type, arg0, arg1);
12132 /* !X || X is always true. */
12133 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12134 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12135 return omit_one_operand (type, integer_one_node, arg1);
12136 /* X || !X is always true. */
12137 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12138 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12139 return omit_one_operand (type, integer_one_node, arg0);
12143 case TRUTH_XOR_EXPR:
12144 /* If the second arg is constant zero, drop it. */
12145 if (integer_zerop (arg1))
12146 return non_lvalue (fold_convert (type, arg0));
12147 /* If the second arg is constant true, this is a logical inversion. */
12148 if (integer_onep (arg1))
12150 /* Only call invert_truthvalue if operand is a truth value. */
12151 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12152 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12154 tem = invert_truthvalue (arg0);
12155 return non_lvalue (fold_convert (type, tem));
12157 /* Identical arguments cancel to zero. */
12158 if (operand_equal_p (arg0, arg1, 0))
12159 return omit_one_operand (type, integer_zero_node, arg0);
12161 /* !X ^ X is always true. */
12162 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12163 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12164 return omit_one_operand (type, integer_one_node, arg1);
12166 /* X ^ !X is always true. */
12167 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12168 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12169 return omit_one_operand (type, integer_one_node, arg0);
12175 tem = fold_comparison (code, type, op0, op1);
12176 if (tem != NULL_TREE)
12179 /* bool_var != 0 becomes bool_var. */
12180 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12181 && code == NE_EXPR)
12182 return non_lvalue (fold_convert (type, arg0));
12184 /* bool_var == 1 becomes bool_var. */
12185 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12186 && code == EQ_EXPR)
12187 return non_lvalue (fold_convert (type, arg0));
12189 /* bool_var != 1 becomes !bool_var. */
12190 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12191 && code == NE_EXPR)
12192 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12194 /* bool_var == 0 becomes !bool_var. */
12195 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12196 && code == EQ_EXPR)
12197 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12199 /* If this is an equality comparison of the address of two non-weak,
12200 unaliased symbols neither of which are extern (since we do not
12201 have access to attributes for externs), then we know the result. */
12202 if (TREE_CODE (arg0) == ADDR_EXPR
12203 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12204 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12205 && ! lookup_attribute ("alias",
12206 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12207 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12208 && TREE_CODE (arg1) == ADDR_EXPR
12209 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12210 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12211 && ! lookup_attribute ("alias",
12212 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12213 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12215 /* We know that we're looking at the address of two
12216 non-weak, unaliased, static _DECL nodes.
12218 It is both wasteful and incorrect to call operand_equal_p
12219 to compare the two ADDR_EXPR nodes. It is wasteful in that
12220 all we need to do is test pointer equality for the arguments
12221 to the two ADDR_EXPR nodes. It is incorrect to use
12222 operand_equal_p as that function is NOT equivalent to a
12223 C equality test. It can in fact return false for two
12224 objects which would test as equal using the C equality
12226 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12227 return constant_boolean_node (equal
12228 ? code == EQ_EXPR : code != EQ_EXPR,
12232 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12233 a MINUS_EXPR of a constant, we can convert it into a comparison with
12234 a revised constant as long as no overflow occurs. */
12235 if (TREE_CODE (arg1) == INTEGER_CST
12236 && (TREE_CODE (arg0) == PLUS_EXPR
12237 || TREE_CODE (arg0) == MINUS_EXPR)
12238 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12239 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12240 ? MINUS_EXPR : PLUS_EXPR,
12241 fold_convert (TREE_TYPE (arg0), arg1),
12242 TREE_OPERAND (arg0, 1), 0))
12243 && !TREE_OVERFLOW (tem))
12244 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12246 /* Similarly for a NEGATE_EXPR. */
12247 if (TREE_CODE (arg0) == NEGATE_EXPR
12248 && TREE_CODE (arg1) == INTEGER_CST
12249 && 0 != (tem = negate_expr (arg1))
12250 && TREE_CODE (tem) == INTEGER_CST
12251 && !TREE_OVERFLOW (tem))
12252 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12254 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12255 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12256 && TREE_CODE (arg1) == INTEGER_CST
12257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12258 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12259 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12260 fold_convert (TREE_TYPE (arg0), arg1),
12261 TREE_OPERAND (arg0, 1)));
12263 /* Transform comparisons of the form X +- C CMP X. */
12264 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12265 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12266 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12267 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12268 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12270 tree cst = TREE_OPERAND (arg0, 1);
12272 if (code == EQ_EXPR
12273 && !integer_zerop (cst))
12274 return omit_two_operands (type, boolean_false_node,
12275 TREE_OPERAND (arg0, 0), arg1);
12277 return omit_two_operands (type, boolean_true_node,
12278 TREE_OPERAND (arg0, 0), arg1);
12281 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12282 for !=. Don't do this for ordered comparisons due to overflow. */
12283 if (TREE_CODE (arg0) == MINUS_EXPR
12284 && integer_zerop (arg1))
12285 return fold_build2 (code, type,
12286 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12288 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12289 if (TREE_CODE (arg0) == ABS_EXPR
12290 && (integer_zerop (arg1) || real_zerop (arg1)))
12291 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12293 /* If this is an EQ or NE comparison with zero and ARG0 is
12294 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12295 two operations, but the latter can be done in one less insn
12296 on machines that have only two-operand insns or on which a
12297 constant cannot be the first operand. */
12298 if (TREE_CODE (arg0) == BIT_AND_EXPR
12299 && integer_zerop (arg1))
12301 tree arg00 = TREE_OPERAND (arg0, 0);
12302 tree arg01 = TREE_OPERAND (arg0, 1);
12303 if (TREE_CODE (arg00) == LSHIFT_EXPR
12304 && integer_onep (TREE_OPERAND (arg00, 0)))
12306 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12307 arg01, TREE_OPERAND (arg00, 1));
12308 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12309 build_int_cst (TREE_TYPE (arg0), 1));
12310 return fold_build2 (code, type,
12311 fold_convert (TREE_TYPE (arg1), tem), arg1);
12313 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12314 && integer_onep (TREE_OPERAND (arg01, 0)))
12316 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12317 arg00, TREE_OPERAND (arg01, 1));
12318 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12319 build_int_cst (TREE_TYPE (arg0), 1));
12320 return fold_build2 (code, type,
12321 fold_convert (TREE_TYPE (arg1), tem), arg1);
12325 /* If this is an NE or EQ comparison of zero against the result of a
12326 signed MOD operation whose second operand is a power of 2, make
12327 the MOD operation unsigned since it is simpler and equivalent. */
12328 if (integer_zerop (arg1)
12329 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12330 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12331 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12332 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12333 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12334 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12336 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12337 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12338 fold_convert (newtype,
12339 TREE_OPERAND (arg0, 0)),
12340 fold_convert (newtype,
12341 TREE_OPERAND (arg0, 1)));
12343 return fold_build2 (code, type, newmod,
12344 fold_convert (newtype, arg1));
12347 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12348 C1 is a valid shift constant, and C2 is a power of two, i.e.
12350 if (TREE_CODE (arg0) == BIT_AND_EXPR
12351 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12352 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12354 && integer_pow2p (TREE_OPERAND (arg0, 1))
12355 && integer_zerop (arg1))
12357 tree itype = TREE_TYPE (arg0);
12358 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12359 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12361 /* Check for a valid shift count. */
12362 if (TREE_INT_CST_HIGH (arg001) == 0
12363 && TREE_INT_CST_LOW (arg001) < prec)
12365 tree arg01 = TREE_OPERAND (arg0, 1);
12366 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12367 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12368 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12369 can be rewritten as (X & (C2 << C1)) != 0. */
12370 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12372 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12373 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12374 return fold_build2 (code, type, tem, arg1);
12376 /* Otherwise, for signed (arithmetic) shifts,
12377 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12378 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12379 else if (!TYPE_UNSIGNED (itype))
12380 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12381 arg000, build_int_cst (itype, 0));
12382 /* Otherwise, of unsigned (logical) shifts,
12383 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12384 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12386 return omit_one_operand (type,
12387 code == EQ_EXPR ? integer_one_node
12388 : integer_zero_node,
12393 /* If this is an NE comparison of zero with an AND of one, remove the
12394 comparison since the AND will give the correct value. */
12395 if (code == NE_EXPR
12396 && integer_zerop (arg1)
12397 && TREE_CODE (arg0) == BIT_AND_EXPR
12398 && integer_onep (TREE_OPERAND (arg0, 1)))
12399 return fold_convert (type, arg0);
12401 /* If we have (A & C) == C where C is a power of 2, convert this into
12402 (A & C) != 0. Similarly for NE_EXPR. */
12403 if (TREE_CODE (arg0) == BIT_AND_EXPR
12404 && integer_pow2p (TREE_OPERAND (arg0, 1))
12405 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12406 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12407 arg0, fold_convert (TREE_TYPE (arg0),
12408 integer_zero_node));
12410 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12411 bit, then fold the expression into A < 0 or A >= 0. */
12412 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12416 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12417 Similarly for NE_EXPR. */
12418 if (TREE_CODE (arg0) == BIT_AND_EXPR
12419 && TREE_CODE (arg1) == INTEGER_CST
12420 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12422 tree notc = fold_build1 (BIT_NOT_EXPR,
12423 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12424 TREE_OPERAND (arg0, 1));
12425 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12427 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12428 if (integer_nonzerop (dandnotc))
12429 return omit_one_operand (type, rslt, arg0);
12432 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12433 Similarly for NE_EXPR. */
12434 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12435 && TREE_CODE (arg1) == INTEGER_CST
12436 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12438 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12439 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12440 TREE_OPERAND (arg0, 1), notd);
12441 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12442 if (integer_nonzerop (candnotd))
12443 return omit_one_operand (type, rslt, arg0);
12446 /* If this is a comparison of a field, we may be able to simplify it. */
12447 if ((TREE_CODE (arg0) == COMPONENT_REF
12448 || TREE_CODE (arg0) == BIT_FIELD_REF)
12449 /* Handle the constant case even without -O
12450 to make sure the warnings are given. */
12451 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12453 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12458 /* Optimize comparisons of strlen vs zero to a compare of the
12459 first character of the string vs zero. To wit,
12460 strlen(ptr) == 0 => *ptr == 0
12461 strlen(ptr) != 0 => *ptr != 0
12462 Other cases should reduce to one of these two (or a constant)
12463 due to the return value of strlen being unsigned. */
12464 if (TREE_CODE (arg0) == CALL_EXPR
12465 && integer_zerop (arg1))
12467 tree fndecl = get_callee_fndecl (arg0);
12470 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12471 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12472 && call_expr_nargs (arg0) == 1
12473 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12475 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12476 return fold_build2 (code, type, iref,
12477 build_int_cst (TREE_TYPE (iref), 0));
12481 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12482 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12483 if (TREE_CODE (arg0) == RSHIFT_EXPR
12484 && integer_zerop (arg1)
12485 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12487 tree arg00 = TREE_OPERAND (arg0, 0);
12488 tree arg01 = TREE_OPERAND (arg0, 1);
12489 tree itype = TREE_TYPE (arg00);
12490 if (TREE_INT_CST_HIGH (arg01) == 0
12491 && TREE_INT_CST_LOW (arg01)
12492 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12494 if (TYPE_UNSIGNED (itype))
12496 itype = signed_type_for (itype);
12497 arg00 = fold_convert (itype, arg00);
12499 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12500 type, arg00, build_int_cst (itype, 0));
12504 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12505 if (integer_zerop (arg1)
12506 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12507 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12508 TREE_OPERAND (arg0, 1));
12510 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12511 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12512 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12513 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12514 build_int_cst (TREE_TYPE (arg1), 0));
12515 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12516 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12517 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12518 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12519 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12520 build_int_cst (TREE_TYPE (arg1), 0));
12522 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12523 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12524 && TREE_CODE (arg1) == INTEGER_CST
12525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12526 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12527 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12528 TREE_OPERAND (arg0, 1), arg1));
12530 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12531 (X & C) == 0 when C is a single bit. */
12532 if (TREE_CODE (arg0) == BIT_AND_EXPR
12533 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12534 && integer_zerop (arg1)
12535 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12537 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12538 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12539 TREE_OPERAND (arg0, 1));
12540 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12544 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12545 constant C is a power of two, i.e. a single bit. */
12546 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12547 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12548 && integer_zerop (arg1)
12549 && integer_pow2p (TREE_OPERAND (arg0, 1))
12550 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12551 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12553 tree arg00 = TREE_OPERAND (arg0, 0);
12554 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12555 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12558 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12559 when is C is a power of two, i.e. a single bit. */
12560 if (TREE_CODE (arg0) == BIT_AND_EXPR
12561 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12562 && integer_zerop (arg1)
12563 && integer_pow2p (TREE_OPERAND (arg0, 1))
12564 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12565 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12567 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12568 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12569 arg000, TREE_OPERAND (arg0, 1));
12570 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12571 tem, build_int_cst (TREE_TYPE (tem), 0));
12574 if (integer_zerop (arg1)
12575 && tree_expr_nonzero_p (arg0))
12577 tree res = constant_boolean_node (code==NE_EXPR, type);
12578 return omit_one_operand (type, res, arg0);
12581 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12582 if (TREE_CODE (arg0) == NEGATE_EXPR
12583 && TREE_CODE (arg1) == NEGATE_EXPR)
12584 return fold_build2 (code, type,
12585 TREE_OPERAND (arg0, 0),
12586 TREE_OPERAND (arg1, 0));
12588 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12589 if (TREE_CODE (arg0) == BIT_AND_EXPR
12590 && TREE_CODE (arg1) == BIT_AND_EXPR)
12592 tree arg00 = TREE_OPERAND (arg0, 0);
12593 tree arg01 = TREE_OPERAND (arg0, 1);
12594 tree arg10 = TREE_OPERAND (arg1, 0);
12595 tree arg11 = TREE_OPERAND (arg1, 1);
12596 tree itype = TREE_TYPE (arg0);
12598 if (operand_equal_p (arg01, arg11, 0))
12599 return fold_build2 (code, type,
12600 fold_build2 (BIT_AND_EXPR, itype,
12601 fold_build2 (BIT_XOR_EXPR, itype,
12604 build_int_cst (itype, 0));
12606 if (operand_equal_p (arg01, arg10, 0))
12607 return fold_build2 (code, type,
12608 fold_build2 (BIT_AND_EXPR, itype,
12609 fold_build2 (BIT_XOR_EXPR, itype,
12612 build_int_cst (itype, 0));
12614 if (operand_equal_p (arg00, arg11, 0))
12615 return fold_build2 (code, type,
12616 fold_build2 (BIT_AND_EXPR, itype,
12617 fold_build2 (BIT_XOR_EXPR, itype,
12620 build_int_cst (itype, 0));
12622 if (operand_equal_p (arg00, arg10, 0))
12623 return fold_build2 (code, type,
12624 fold_build2 (BIT_AND_EXPR, itype,
12625 fold_build2 (BIT_XOR_EXPR, itype,
12628 build_int_cst (itype, 0));
12631 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12632 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12634 tree arg00 = TREE_OPERAND (arg0, 0);
12635 tree arg01 = TREE_OPERAND (arg0, 1);
12636 tree arg10 = TREE_OPERAND (arg1, 0);
12637 tree arg11 = TREE_OPERAND (arg1, 1);
12638 tree itype = TREE_TYPE (arg0);
12640 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12641 operand_equal_p guarantees no side-effects so we don't need
12642 to use omit_one_operand on Z. */
12643 if (operand_equal_p (arg01, arg11, 0))
12644 return fold_build2 (code, type, arg00, arg10);
12645 if (operand_equal_p (arg01, arg10, 0))
12646 return fold_build2 (code, type, arg00, arg11);
12647 if (operand_equal_p (arg00, arg11, 0))
12648 return fold_build2 (code, type, arg01, arg10);
12649 if (operand_equal_p (arg00, arg10, 0))
12650 return fold_build2 (code, type, arg01, arg11);
12652 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12653 if (TREE_CODE (arg01) == INTEGER_CST
12654 && TREE_CODE (arg11) == INTEGER_CST)
12655 return fold_build2 (code, type,
12656 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12657 fold_build2 (BIT_XOR_EXPR, itype,
12662 /* Attempt to simplify equality/inequality comparisons of complex
12663 values. Only lower the comparison if the result is known or
12664 can be simplified to a single scalar comparison. */
12665 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12666 || TREE_CODE (arg0) == COMPLEX_CST)
12667 && (TREE_CODE (arg1) == COMPLEX_EXPR
12668 || TREE_CODE (arg1) == COMPLEX_CST))
12670 tree real0, imag0, real1, imag1;
12673 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12675 real0 = TREE_OPERAND (arg0, 0);
12676 imag0 = TREE_OPERAND (arg0, 1);
12680 real0 = TREE_REALPART (arg0);
12681 imag0 = TREE_IMAGPART (arg0);
12684 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12686 real1 = TREE_OPERAND (arg1, 0);
12687 imag1 = TREE_OPERAND (arg1, 1);
12691 real1 = TREE_REALPART (arg1);
12692 imag1 = TREE_IMAGPART (arg1);
12695 rcond = fold_binary (code, type, real0, real1);
12696 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12698 if (integer_zerop (rcond))
12700 if (code == EQ_EXPR)
12701 return omit_two_operands (type, boolean_false_node,
12703 return fold_build2 (NE_EXPR, type, imag0, imag1);
12707 if (code == NE_EXPR)
12708 return omit_two_operands (type, boolean_true_node,
12710 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12714 icond = fold_binary (code, type, imag0, imag1);
12715 if (icond && TREE_CODE (icond) == INTEGER_CST)
12717 if (integer_zerop (icond))
12719 if (code == EQ_EXPR)
12720 return omit_two_operands (type, boolean_false_node,
12722 return fold_build2 (NE_EXPR, type, real0, real1);
12726 if (code == NE_EXPR)
12727 return omit_two_operands (type, boolean_true_node,
12729 return fold_build2 (EQ_EXPR, type, real0, real1);
12740 tem = fold_comparison (code, type, op0, op1);
12741 if (tem != NULL_TREE)
12744 /* Transform comparisons of the form X +- C CMP X. */
12745 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12746 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12747 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12748 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12749 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12750 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12752 tree arg01 = TREE_OPERAND (arg0, 1);
12753 enum tree_code code0 = TREE_CODE (arg0);
12756 if (TREE_CODE (arg01) == REAL_CST)
12757 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12759 is_positive = tree_int_cst_sgn (arg01);
12761 /* (X - c) > X becomes false. */
12762 if (code == GT_EXPR
12763 && ((code0 == MINUS_EXPR && is_positive >= 0)
12764 || (code0 == PLUS_EXPR && is_positive <= 0)))
12766 if (TREE_CODE (arg01) == INTEGER_CST
12767 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12768 fold_overflow_warning (("assuming signed overflow does not "
12769 "occur when assuming that (X - c) > X "
12770 "is always false"),
12771 WARN_STRICT_OVERFLOW_ALL);
12772 return constant_boolean_node (0, type);
12775 /* Likewise (X + c) < X becomes false. */
12776 if (code == LT_EXPR
12777 && ((code0 == PLUS_EXPR && is_positive >= 0)
12778 || (code0 == MINUS_EXPR && is_positive <= 0)))
12780 if (TREE_CODE (arg01) == INTEGER_CST
12781 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12782 fold_overflow_warning (("assuming signed overflow does not "
12783 "occur when assuming that "
12784 "(X + c) < X is always false"),
12785 WARN_STRICT_OVERFLOW_ALL);
12786 return constant_boolean_node (0, type);
12789 /* Convert (X - c) <= X to true. */
12790 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12792 && ((code0 == MINUS_EXPR && is_positive >= 0)
12793 || (code0 == PLUS_EXPR && is_positive <= 0)))
12795 if (TREE_CODE (arg01) == INTEGER_CST
12796 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12797 fold_overflow_warning (("assuming signed overflow does not "
12798 "occur when assuming that "
12799 "(X - c) <= X is always true"),
12800 WARN_STRICT_OVERFLOW_ALL);
12801 return constant_boolean_node (1, type);
12804 /* Convert (X + c) >= X to true. */
12805 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12807 && ((code0 == PLUS_EXPR && is_positive >= 0)
12808 || (code0 == MINUS_EXPR && is_positive <= 0)))
12810 if (TREE_CODE (arg01) == INTEGER_CST
12811 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12812 fold_overflow_warning (("assuming signed overflow does not "
12813 "occur when assuming that "
12814 "(X + c) >= X is always true"),
12815 WARN_STRICT_OVERFLOW_ALL);
12816 return constant_boolean_node (1, type);
12819 if (TREE_CODE (arg01) == INTEGER_CST)
12821 /* Convert X + c > X and X - c < X to true for integers. */
12822 if (code == GT_EXPR
12823 && ((code0 == PLUS_EXPR && is_positive > 0)
12824 || (code0 == MINUS_EXPR && is_positive < 0)))
12826 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12827 fold_overflow_warning (("assuming signed overflow does "
12828 "not occur when assuming that "
12829 "(X + c) > X is always true"),
12830 WARN_STRICT_OVERFLOW_ALL);
12831 return constant_boolean_node (1, type);
12834 if (code == LT_EXPR
12835 && ((code0 == MINUS_EXPR && is_positive > 0)
12836 || (code0 == PLUS_EXPR && is_positive < 0)))
12838 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12839 fold_overflow_warning (("assuming signed overflow does "
12840 "not occur when assuming that "
12841 "(X - c) < X is always true"),
12842 WARN_STRICT_OVERFLOW_ALL);
12843 return constant_boolean_node (1, type);
12846 /* Convert X + c <= X and X - c >= X to false for integers. */
12847 if (code == LE_EXPR
12848 && ((code0 == PLUS_EXPR && is_positive > 0)
12849 || (code0 == MINUS_EXPR && is_positive < 0)))
12851 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12852 fold_overflow_warning (("assuming signed overflow does "
12853 "not occur when assuming that "
12854 "(X + c) <= X is always false"),
12855 WARN_STRICT_OVERFLOW_ALL);
12856 return constant_boolean_node (0, type);
12859 if (code == GE_EXPR
12860 && ((code0 == MINUS_EXPR && is_positive > 0)
12861 || (code0 == PLUS_EXPR && is_positive < 0)))
12863 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12864 fold_overflow_warning (("assuming signed overflow does "
12865 "not occur when assuming that "
12866 "(X - c) >= X is always false"),
12867 WARN_STRICT_OVERFLOW_ALL);
12868 return constant_boolean_node (0, type);
12873 /* Comparisons with the highest or lowest possible integer of
12874 the specified precision will have known values. */
12876 tree arg1_type = TREE_TYPE (arg1);
12877 unsigned int width = TYPE_PRECISION (arg1_type);
12879 if (TREE_CODE (arg1) == INTEGER_CST
12880 && width <= 2 * HOST_BITS_PER_WIDE_INT
12881 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12883 HOST_WIDE_INT signed_max_hi;
12884 unsigned HOST_WIDE_INT signed_max_lo;
12885 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12887 if (width <= HOST_BITS_PER_WIDE_INT)
12889 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12894 if (TYPE_UNSIGNED (arg1_type))
12896 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12902 max_lo = signed_max_lo;
12903 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12909 width -= HOST_BITS_PER_WIDE_INT;
12910 signed_max_lo = -1;
12911 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12916 if (TYPE_UNSIGNED (arg1_type))
12918 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12923 max_hi = signed_max_hi;
12924 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12928 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12929 && TREE_INT_CST_LOW (arg1) == max_lo)
12933 return omit_one_operand (type, integer_zero_node, arg0);
12936 return fold_build2 (EQ_EXPR, type, op0, op1);
12939 return omit_one_operand (type, integer_one_node, arg0);
12942 return fold_build2 (NE_EXPR, type, op0, op1);
12944 /* The GE_EXPR and LT_EXPR cases above are not normally
12945 reached because of previous transformations. */
12950 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12952 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12956 arg1 = const_binop (PLUS_EXPR, arg1,
12957 build_int_cst (TREE_TYPE (arg1), 1), 0);
12958 return fold_build2 (EQ_EXPR, type,
12959 fold_convert (TREE_TYPE (arg1), arg0),
12962 arg1 = const_binop (PLUS_EXPR, arg1,
12963 build_int_cst (TREE_TYPE (arg1), 1), 0);
12964 return fold_build2 (NE_EXPR, type,
12965 fold_convert (TREE_TYPE (arg1), arg0),
12970 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12972 && TREE_INT_CST_LOW (arg1) == min_lo)
12976 return omit_one_operand (type, integer_zero_node, arg0);
12979 return fold_build2 (EQ_EXPR, type, op0, op1);
12982 return omit_one_operand (type, integer_one_node, arg0);
12985 return fold_build2 (NE_EXPR, type, op0, op1);
12990 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12992 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12996 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12997 return fold_build2 (NE_EXPR, type,
12998 fold_convert (TREE_TYPE (arg1), arg0),
13001 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13002 return fold_build2 (EQ_EXPR, type,
13003 fold_convert (TREE_TYPE (arg1), arg0),
13009 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13010 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13011 && TYPE_UNSIGNED (arg1_type)
13012 /* We will flip the signedness of the comparison operator
13013 associated with the mode of arg1, so the sign bit is
13014 specified by this mode. Check that arg1 is the signed
13015 max associated with this sign bit. */
13016 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13017 /* signed_type does not work on pointer types. */
13018 && INTEGRAL_TYPE_P (arg1_type))
13020 /* The following case also applies to X < signed_max+1
13021 and X >= signed_max+1 because previous transformations. */
13022 if (code == LE_EXPR || code == GT_EXPR)
13025 st = signed_type_for (TREE_TYPE (arg1));
13026 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13027 type, fold_convert (st, arg0),
13028 build_int_cst (st, 0));
13034 /* If we are comparing an ABS_EXPR with a constant, we can
13035 convert all the cases into explicit comparisons, but they may
13036 well not be faster than doing the ABS and one comparison.
13037 But ABS (X) <= C is a range comparison, which becomes a subtraction
13038 and a comparison, and is probably faster. */
13039 if (code == LE_EXPR
13040 && TREE_CODE (arg1) == INTEGER_CST
13041 && TREE_CODE (arg0) == ABS_EXPR
13042 && ! TREE_SIDE_EFFECTS (arg0)
13043 && (0 != (tem = negate_expr (arg1)))
13044 && TREE_CODE (tem) == INTEGER_CST
13045 && !TREE_OVERFLOW (tem))
13046 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13047 build2 (GE_EXPR, type,
13048 TREE_OPERAND (arg0, 0), tem),
13049 build2 (LE_EXPR, type,
13050 TREE_OPERAND (arg0, 0), arg1));
13052 /* Convert ABS_EXPR<x> >= 0 to true. */
13053 strict_overflow_p = false;
13054 if (code == GE_EXPR
13055 && (integer_zerop (arg1)
13056 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13057 && real_zerop (arg1)))
13058 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13060 if (strict_overflow_p)
13061 fold_overflow_warning (("assuming signed overflow does not occur "
13062 "when simplifying comparison of "
13063 "absolute value and zero"),
13064 WARN_STRICT_OVERFLOW_CONDITIONAL);
13065 return omit_one_operand (type, integer_one_node, arg0);
13068 /* Convert ABS_EXPR<x> < 0 to false. */
13069 strict_overflow_p = false;
13070 if (code == LT_EXPR
13071 && (integer_zerop (arg1) || real_zerop (arg1))
13072 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13074 if (strict_overflow_p)
13075 fold_overflow_warning (("assuming signed overflow does not occur "
13076 "when simplifying comparison of "
13077 "absolute value and zero"),
13078 WARN_STRICT_OVERFLOW_CONDITIONAL);
13079 return omit_one_operand (type, integer_zero_node, arg0);
13082 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13083 and similarly for >= into !=. */
13084 if ((code == LT_EXPR || code == GE_EXPR)
13085 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13086 && TREE_CODE (arg1) == LSHIFT_EXPR
13087 && integer_onep (TREE_OPERAND (arg1, 0)))
13088 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13089 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13090 TREE_OPERAND (arg1, 1)),
13091 build_int_cst (TREE_TYPE (arg0), 0));
13093 if ((code == LT_EXPR || code == GE_EXPR)
13094 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13095 && CONVERT_EXPR_P (arg1)
13096 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13097 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13099 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13100 fold_convert (TREE_TYPE (arg0),
13101 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13102 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13104 build_int_cst (TREE_TYPE (arg0), 0));
13108 case UNORDERED_EXPR:
13116 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13118 t1 = fold_relational_const (code, type, arg0, arg1);
13119 if (t1 != NULL_TREE)
13123 /* If the first operand is NaN, the result is constant. */
13124 if (TREE_CODE (arg0) == REAL_CST
13125 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13126 && (code != LTGT_EXPR || ! flag_trapping_math))
13128 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13129 ? integer_zero_node
13130 : integer_one_node;
13131 return omit_one_operand (type, t1, arg1);
13134 /* If the second operand is NaN, the result is constant. */
13135 if (TREE_CODE (arg1) == REAL_CST
13136 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13137 && (code != LTGT_EXPR || ! flag_trapping_math))
13139 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13140 ? integer_zero_node
13141 : integer_one_node;
13142 return omit_one_operand (type, t1, arg0);
13145 /* Simplify unordered comparison of something with itself. */
13146 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13147 && operand_equal_p (arg0, arg1, 0))
13148 return constant_boolean_node (1, type);
13150 if (code == LTGT_EXPR
13151 && !flag_trapping_math
13152 && operand_equal_p (arg0, arg1, 0))
13153 return constant_boolean_node (0, type);
13155 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13157 tree targ0 = strip_float_extensions (arg0);
13158 tree targ1 = strip_float_extensions (arg1);
13159 tree newtype = TREE_TYPE (targ0);
13161 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13162 newtype = TREE_TYPE (targ1);
13164 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13165 return fold_build2 (code, type, fold_convert (newtype, targ0),
13166 fold_convert (newtype, targ1));
13171 case COMPOUND_EXPR:
13172 /* When pedantic, a compound expression can be neither an lvalue
13173 nor an integer constant expression. */
13174 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13176 /* Don't let (0, 0) be null pointer constant. */
13177 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13178 : fold_convert (type, arg1);
13179 return pedantic_non_lvalue (tem);
13182 if ((TREE_CODE (arg0) == REAL_CST
13183 && TREE_CODE (arg1) == REAL_CST)
13184 || (TREE_CODE (arg0) == INTEGER_CST
13185 && TREE_CODE (arg1) == INTEGER_CST))
13186 return build_complex (type, arg0, arg1);
13190 /* An ASSERT_EXPR should never be passed to fold_binary. */
13191 gcc_unreachable ();
13195 } /* switch (code) */
13198 /* Callback for walk_tree, looking for LABEL_EXPR.
13199 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
13200 Do not check the sub-tree of GOTO_EXPR. */
13203 contains_label_1 (tree *tp,
13204 int *walk_subtrees,
13205 void *data ATTRIBUTE_UNUSED)
13207 switch (TREE_CODE (*tp))
13212 *walk_subtrees = 0;
13219 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
13220 accessible from outside the sub-tree. Returns NULL_TREE if no
13221 addressable label is found. */
13224 contains_label_p (tree st)
13226 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13229 /* Fold a ternary expression of code CODE and type TYPE with operands
13230 OP0, OP1, and OP2. Return the folded expression if folding is
13231 successful. Otherwise, return NULL_TREE. */
13234 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13237 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13238 enum tree_code_class kind = TREE_CODE_CLASS (code);
13240 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13241 && TREE_CODE_LENGTH (code) == 3);
13243 /* Strip any conversions that don't change the mode. This is safe
13244 for every expression, except for a comparison expression because
13245 its signedness is derived from its operands. So, in the latter
13246 case, only strip conversions that don't change the signedness.
13248 Note that this is done as an internal manipulation within the
13249 constant folder, in order to find the simplest representation of
13250 the arguments so that their form can be studied. In any cases,
13251 the appropriate type conversions should be put back in the tree
13252 that will get out of the constant folder. */
13267 case COMPONENT_REF:
13268 if (TREE_CODE (arg0) == CONSTRUCTOR
13269 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13271 unsigned HOST_WIDE_INT idx;
13273 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13280 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13281 so all simple results must be passed through pedantic_non_lvalue. */
13282 if (TREE_CODE (arg0) == INTEGER_CST)
13284 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13285 tem = integer_zerop (arg0) ? op2 : op1;
13286 /* Only optimize constant conditions when the selected branch
13287 has the same type as the COND_EXPR. This avoids optimizing
13288 away "c ? x : throw", where the throw has a void type.
13289 Avoid throwing away that operand which contains label. */
13290 if ((!TREE_SIDE_EFFECTS (unused_op)
13291 || !contains_label_p (unused_op))
13292 && (! VOID_TYPE_P (TREE_TYPE (tem))
13293 || VOID_TYPE_P (type)))
13294 return pedantic_non_lvalue (tem);
13297 if (operand_equal_p (arg1, op2, 0))
13298 return pedantic_omit_one_operand (type, arg1, arg0);
13300 /* If we have A op B ? A : C, we may be able to convert this to a
13301 simpler expression, depending on the operation and the values
13302 of B and C. Signed zeros prevent all of these transformations,
13303 for reasons given above each one.
13305 Also try swapping the arguments and inverting the conditional. */
13306 if (COMPARISON_CLASS_P (arg0)
13307 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13308 arg1, TREE_OPERAND (arg0, 1))
13309 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13311 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13316 if (COMPARISON_CLASS_P (arg0)
13317 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13319 TREE_OPERAND (arg0, 1))
13320 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13322 tem = fold_truth_not_expr (arg0);
13323 if (tem && COMPARISON_CLASS_P (tem))
13325 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13331 /* If the second operand is simpler than the third, swap them
13332 since that produces better jump optimization results. */
13333 if (truth_value_p (TREE_CODE (arg0))
13334 && tree_swap_operands_p (op1, op2, false))
13336 /* See if this can be inverted. If it can't, possibly because
13337 it was a floating-point inequality comparison, don't do
13339 tem = fold_truth_not_expr (arg0);
13341 return fold_build3 (code, type, tem, op2, op1);
13344 /* Convert A ? 1 : 0 to simply A. */
13345 if (integer_onep (op1)
13346 && integer_zerop (op2)
13347 /* If we try to convert OP0 to our type, the
13348 call to fold will try to move the conversion inside
13349 a COND, which will recurse. In that case, the COND_EXPR
13350 is probably the best choice, so leave it alone. */
13351 && type == TREE_TYPE (arg0))
13352 return pedantic_non_lvalue (arg0);
13354 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13355 over COND_EXPR in cases such as floating point comparisons. */
13356 if (integer_zerop (op1)
13357 && integer_onep (op2)
13358 && truth_value_p (TREE_CODE (arg0)))
13359 return pedantic_non_lvalue (fold_convert (type,
13360 invert_truthvalue (arg0)));
13362 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13363 if (TREE_CODE (arg0) == LT_EXPR
13364 && integer_zerop (TREE_OPERAND (arg0, 1))
13365 && integer_zerop (op2)
13366 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13368 /* sign_bit_p only checks ARG1 bits within A's precision.
13369 If <sign bit of A> has wider type than A, bits outside
13370 of A's precision in <sign bit of A> need to be checked.
13371 If they are all 0, this optimization needs to be done
13372 in unsigned A's type, if they are all 1 in signed A's type,
13373 otherwise this can't be done. */
13374 if (TYPE_PRECISION (TREE_TYPE (tem))
13375 < TYPE_PRECISION (TREE_TYPE (arg1))
13376 && TYPE_PRECISION (TREE_TYPE (tem))
13377 < TYPE_PRECISION (type))
13379 unsigned HOST_WIDE_INT mask_lo;
13380 HOST_WIDE_INT mask_hi;
13381 int inner_width, outer_width;
13384 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13385 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13386 if (outer_width > TYPE_PRECISION (type))
13387 outer_width = TYPE_PRECISION (type);
13389 if (outer_width > HOST_BITS_PER_WIDE_INT)
13391 mask_hi = ((unsigned HOST_WIDE_INT) -1
13392 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13398 mask_lo = ((unsigned HOST_WIDE_INT) -1
13399 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13401 if (inner_width > HOST_BITS_PER_WIDE_INT)
13403 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13404 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13408 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13409 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13411 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13412 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13414 tem_type = signed_type_for (TREE_TYPE (tem));
13415 tem = fold_convert (tem_type, tem);
13417 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13418 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13420 tem_type = unsigned_type_for (TREE_TYPE (tem));
13421 tem = fold_convert (tem_type, tem);
13428 return fold_convert (type,
13429 fold_build2 (BIT_AND_EXPR,
13430 TREE_TYPE (tem), tem,
13431 fold_convert (TREE_TYPE (tem),
13435 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13436 already handled above. */
13437 if (TREE_CODE (arg0) == BIT_AND_EXPR
13438 && integer_onep (TREE_OPERAND (arg0, 1))
13439 && integer_zerop (op2)
13440 && integer_pow2p (arg1))
13442 tree tem = TREE_OPERAND (arg0, 0);
13444 if (TREE_CODE (tem) == RSHIFT_EXPR
13445 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13446 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13447 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13448 return fold_build2 (BIT_AND_EXPR, type,
13449 TREE_OPERAND (tem, 0), arg1);
13452 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13453 is probably obsolete because the first operand should be a
13454 truth value (that's why we have the two cases above), but let's
13455 leave it in until we can confirm this for all front-ends. */
13456 if (integer_zerop (op2)
13457 && TREE_CODE (arg0) == NE_EXPR
13458 && integer_zerop (TREE_OPERAND (arg0, 1))
13459 && integer_pow2p (arg1)
13460 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13461 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13462 arg1, OEP_ONLY_CONST))
13463 return pedantic_non_lvalue (fold_convert (type,
13464 TREE_OPERAND (arg0, 0)));
13466 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13467 if (integer_zerop (op2)
13468 && truth_value_p (TREE_CODE (arg0))
13469 && truth_value_p (TREE_CODE (arg1)))
13470 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13471 fold_convert (type, arg0),
13474 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13475 if (integer_onep (op2)
13476 && truth_value_p (TREE_CODE (arg0))
13477 && truth_value_p (TREE_CODE (arg1)))
13479 /* Only perform transformation if ARG0 is easily inverted. */
13480 tem = fold_truth_not_expr (arg0);
13482 return fold_build2 (TRUTH_ORIF_EXPR, type,
13483 fold_convert (type, tem),
13487 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13488 if (integer_zerop (arg1)
13489 && truth_value_p (TREE_CODE (arg0))
13490 && truth_value_p (TREE_CODE (op2)))
13492 /* Only perform transformation if ARG0 is easily inverted. */
13493 tem = fold_truth_not_expr (arg0);
13495 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13496 fold_convert (type, tem),
13500 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13501 if (integer_onep (arg1)
13502 && truth_value_p (TREE_CODE (arg0))
13503 && truth_value_p (TREE_CODE (op2)))
13504 return fold_build2 (TRUTH_ORIF_EXPR, type,
13505 fold_convert (type, arg0),
13511 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13512 of fold_ternary on them. */
13513 gcc_unreachable ();
13515 case BIT_FIELD_REF:
13516 if ((TREE_CODE (arg0) == VECTOR_CST
13517 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13518 && type == TREE_TYPE (TREE_TYPE (arg0)))
13520 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13521 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13524 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13525 && (idx % width) == 0
13526 && (idx = idx / width)
13527 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13529 tree elements = NULL_TREE;
13531 if (TREE_CODE (arg0) == VECTOR_CST)
13532 elements = TREE_VECTOR_CST_ELTS (arg0);
13535 unsigned HOST_WIDE_INT idx;
13538 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13539 elements = tree_cons (NULL_TREE, value, elements);
13541 while (idx-- > 0 && elements)
13542 elements = TREE_CHAIN (elements);
13544 return TREE_VALUE (elements);
13546 return fold_convert (type, integer_zero_node);
13550 /* A bit-field-ref that referenced the full argument can be stripped. */
13551 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13552 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13553 && integer_zerop (op2))
13554 return fold_convert (type, arg0);
13560 } /* switch (code) */
13563 /* Perform constant folding and related simplification of EXPR.
13564 The related simplifications include x*1 => x, x*0 => 0, etc.,
13565 and application of the associative law.
13566 NOP_EXPR conversions may be removed freely (as long as we
13567 are careful not to change the type of the overall expression).
13568 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13569 but we can constant-fold them if they have constant operands. */
13571 #ifdef ENABLE_FOLD_CHECKING
13572 # define fold(x) fold_1 (x)
13573 static tree fold_1 (tree);
13579 const tree t = expr;
13580 enum tree_code code = TREE_CODE (t);
13581 enum tree_code_class kind = TREE_CODE_CLASS (code);
13584 /* Return right away if a constant. */
13585 if (kind == tcc_constant)
13588 /* CALL_EXPR-like objects with variable numbers of operands are
13589 treated specially. */
13590 if (kind == tcc_vl_exp)
13592 if (code == CALL_EXPR)
13594 tem = fold_call_expr (expr, false);
13595 return tem ? tem : expr;
13600 if (IS_EXPR_CODE_CLASS (kind))
13602 tree type = TREE_TYPE (t);
13603 tree op0, op1, op2;
13605 switch (TREE_CODE_LENGTH (code))
13608 op0 = TREE_OPERAND (t, 0);
13609 tem = fold_unary (code, type, op0);
13610 return tem ? tem : expr;
13612 op0 = TREE_OPERAND (t, 0);
13613 op1 = TREE_OPERAND (t, 1);
13614 tem = fold_binary (code, type, op0, op1);
13615 return tem ? tem : expr;
13617 op0 = TREE_OPERAND (t, 0);
13618 op1 = TREE_OPERAND (t, 1);
13619 op2 = TREE_OPERAND (t, 2);
13620 tem = fold_ternary (code, type, op0, op1, op2);
13621 return tem ? tem : expr;
13631 tree op0 = TREE_OPERAND (t, 0);
13632 tree op1 = TREE_OPERAND (t, 1);
13634 if (TREE_CODE (op1) == INTEGER_CST
13635 && TREE_CODE (op0) == CONSTRUCTOR
13636 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13638 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13639 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13640 unsigned HOST_WIDE_INT begin = 0;
13642 /* Find a matching index by means of a binary search. */
13643 while (begin != end)
13645 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13646 tree index = VEC_index (constructor_elt, elts, middle)->index;
13648 if (TREE_CODE (index) == INTEGER_CST
13649 && tree_int_cst_lt (index, op1))
13650 begin = middle + 1;
13651 else if (TREE_CODE (index) == INTEGER_CST
13652 && tree_int_cst_lt (op1, index))
13654 else if (TREE_CODE (index) == RANGE_EXPR
13655 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13656 begin = middle + 1;
13657 else if (TREE_CODE (index) == RANGE_EXPR
13658 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13661 return VEC_index (constructor_elt, elts, middle)->value;
13669 return fold (DECL_INITIAL (t));
13673 } /* switch (code) */
13676 #ifdef ENABLE_FOLD_CHECKING
13679 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13680 static void fold_check_failed (const_tree, const_tree);
13681 void print_fold_checksum (const_tree);
13683 /* When --enable-checking=fold, compute a digest of expr before
13684 and after actual fold call to see if fold did not accidentally
13685 change original expr. */
13691 struct md5_ctx ctx;
13692 unsigned char checksum_before[16], checksum_after[16];
13695 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13696 md5_init_ctx (&ctx);
13697 fold_checksum_tree (expr, &ctx, ht);
13698 md5_finish_ctx (&ctx, checksum_before);
13701 ret = fold_1 (expr);
13703 md5_init_ctx (&ctx);
13704 fold_checksum_tree (expr, &ctx, ht);
13705 md5_finish_ctx (&ctx, checksum_after);
13708 if (memcmp (checksum_before, checksum_after, 16))
13709 fold_check_failed (expr, ret);
13715 print_fold_checksum (const_tree expr)
13717 struct md5_ctx ctx;
13718 unsigned char checksum[16], cnt;
13721 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13722 md5_init_ctx (&ctx);
13723 fold_checksum_tree (expr, &ctx, ht);
13724 md5_finish_ctx (&ctx, checksum);
13726 for (cnt = 0; cnt < 16; ++cnt)
13727 fprintf (stderr, "%02x", checksum[cnt]);
13728 putc ('\n', stderr);
13732 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13734 internal_error ("fold check: original tree changed by fold");
13738 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13741 enum tree_code code;
13742 union tree_node buf;
13747 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13748 <= sizeof (struct tree_function_decl))
13749 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13752 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13756 code = TREE_CODE (expr);
13757 if (TREE_CODE_CLASS (code) == tcc_declaration
13758 && DECL_ASSEMBLER_NAME_SET_P (expr))
13760 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13761 memcpy ((char *) &buf, expr, tree_size (expr));
13762 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13763 expr = (tree) &buf;
13765 else if (TREE_CODE_CLASS (code) == tcc_type
13766 && (TYPE_POINTER_TO (expr)
13767 || TYPE_REFERENCE_TO (expr)
13768 || TYPE_CACHED_VALUES_P (expr)
13769 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13770 || TYPE_NEXT_VARIANT (expr)))
13772 /* Allow these fields to be modified. */
13774 memcpy ((char *) &buf, expr, tree_size (expr));
13775 expr = tmp = (tree) &buf;
13776 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13777 TYPE_POINTER_TO (tmp) = NULL;
13778 TYPE_REFERENCE_TO (tmp) = NULL;
13779 TYPE_NEXT_VARIANT (tmp) = NULL;
13780 if (TYPE_CACHED_VALUES_P (tmp))
13782 TYPE_CACHED_VALUES_P (tmp) = 0;
13783 TYPE_CACHED_VALUES (tmp) = NULL;
13786 md5_process_bytes (expr, tree_size (expr), ctx);
13787 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13788 if (TREE_CODE_CLASS (code) != tcc_type
13789 && TREE_CODE_CLASS (code) != tcc_declaration
13790 && code != TREE_LIST
13791 && code != SSA_NAME)
13792 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13793 switch (TREE_CODE_CLASS (code))
13799 md5_process_bytes (TREE_STRING_POINTER (expr),
13800 TREE_STRING_LENGTH (expr), ctx);
13803 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13804 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13807 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13813 case tcc_exceptional:
13817 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13818 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13819 expr = TREE_CHAIN (expr);
13820 goto recursive_label;
13823 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13824 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13830 case tcc_expression:
13831 case tcc_reference:
13832 case tcc_comparison:
13835 case tcc_statement:
13837 len = TREE_OPERAND_LENGTH (expr);
13838 for (i = 0; i < len; ++i)
13839 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13841 case tcc_declaration:
13842 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13843 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13844 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13846 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13847 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13848 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13849 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13850 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13852 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13853 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13855 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13857 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13858 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13859 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13863 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13864 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13865 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13866 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13867 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13868 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13869 if (INTEGRAL_TYPE_P (expr)
13870 || SCALAR_FLOAT_TYPE_P (expr))
13872 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13873 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13875 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13876 if (TREE_CODE (expr) == RECORD_TYPE
13877 || TREE_CODE (expr) == UNION_TYPE
13878 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13879 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13880 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13887 /* Helper function for outputting the checksum of a tree T. When
13888 debugging with gdb, you can "define mynext" to be "next" followed
13889 by "call debug_fold_checksum (op0)", then just trace down till the
13893 debug_fold_checksum (const_tree t)
13896 unsigned char checksum[16];
13897 struct md5_ctx ctx;
13898 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13900 md5_init_ctx (&ctx);
13901 fold_checksum_tree (t, &ctx, ht);
13902 md5_finish_ctx (&ctx, checksum);
13905 for (i = 0; i < 16; i++)
13906 fprintf (stderr, "%d ", checksum[i]);
13908 fprintf (stderr, "\n");
13913 /* Fold a unary tree expression with code CODE of type TYPE with an
13914 operand OP0. Return a folded expression if successful. Otherwise,
13915 return a tree expression with code CODE of type TYPE with an
13919 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13922 #ifdef ENABLE_FOLD_CHECKING
13923 unsigned char checksum_before[16], checksum_after[16];
13924 struct md5_ctx ctx;
13927 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13928 md5_init_ctx (&ctx);
13929 fold_checksum_tree (op0, &ctx, ht);
13930 md5_finish_ctx (&ctx, checksum_before);
13934 tem = fold_unary (code, type, op0);
13936 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13938 #ifdef ENABLE_FOLD_CHECKING
13939 md5_init_ctx (&ctx);
13940 fold_checksum_tree (op0, &ctx, ht);
13941 md5_finish_ctx (&ctx, checksum_after);
13944 if (memcmp (checksum_before, checksum_after, 16))
13945 fold_check_failed (op0, tem);
13950 /* Fold a binary tree expression with code CODE of type TYPE with
13951 operands OP0 and OP1. Return a folded expression if successful.
13952 Otherwise, return a tree expression with code CODE of type TYPE
13953 with operands OP0 and OP1. */
13956 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13960 #ifdef ENABLE_FOLD_CHECKING
13961 unsigned char checksum_before_op0[16],
13962 checksum_before_op1[16],
13963 checksum_after_op0[16],
13964 checksum_after_op1[16];
13965 struct md5_ctx ctx;
13968 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13969 md5_init_ctx (&ctx);
13970 fold_checksum_tree (op0, &ctx, ht);
13971 md5_finish_ctx (&ctx, checksum_before_op0);
13974 md5_init_ctx (&ctx);
13975 fold_checksum_tree (op1, &ctx, ht);
13976 md5_finish_ctx (&ctx, checksum_before_op1);
13980 tem = fold_binary (code, type, op0, op1);
13982 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13984 #ifdef ENABLE_FOLD_CHECKING
13985 md5_init_ctx (&ctx);
13986 fold_checksum_tree (op0, &ctx, ht);
13987 md5_finish_ctx (&ctx, checksum_after_op0);
13990 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13991 fold_check_failed (op0, tem);
13993 md5_init_ctx (&ctx);
13994 fold_checksum_tree (op1, &ctx, ht);
13995 md5_finish_ctx (&ctx, checksum_after_op1);
13998 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13999 fold_check_failed (op1, tem);
14004 /* Fold a ternary tree expression with code CODE of type TYPE with
14005 operands OP0, OP1, and OP2. Return a folded expression if
14006 successful. Otherwise, return a tree expression with code CODE of
14007 type TYPE with operands OP0, OP1, and OP2. */
14010 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
14014 #ifdef ENABLE_FOLD_CHECKING
14015 unsigned char checksum_before_op0[16],
14016 checksum_before_op1[16],
14017 checksum_before_op2[16],
14018 checksum_after_op0[16],
14019 checksum_after_op1[16],
14020 checksum_after_op2[16];
14021 struct md5_ctx ctx;
14024 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14025 md5_init_ctx (&ctx);
14026 fold_checksum_tree (op0, &ctx, ht);
14027 md5_finish_ctx (&ctx, checksum_before_op0);
14030 md5_init_ctx (&ctx);
14031 fold_checksum_tree (op1, &ctx, ht);
14032 md5_finish_ctx (&ctx, checksum_before_op1);
14035 md5_init_ctx (&ctx);
14036 fold_checksum_tree (op2, &ctx, ht);
14037 md5_finish_ctx (&ctx, checksum_before_op2);
14041 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14042 tem = fold_ternary (code, type, op0, op1, op2);
14044 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14046 #ifdef ENABLE_FOLD_CHECKING
14047 md5_init_ctx (&ctx);
14048 fold_checksum_tree (op0, &ctx, ht);
14049 md5_finish_ctx (&ctx, checksum_after_op0);
14052 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14053 fold_check_failed (op0, tem);
14055 md5_init_ctx (&ctx);
14056 fold_checksum_tree (op1, &ctx, ht);
14057 md5_finish_ctx (&ctx, checksum_after_op1);
14060 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14061 fold_check_failed (op1, tem);
14063 md5_init_ctx (&ctx);
14064 fold_checksum_tree (op2, &ctx, ht);
14065 md5_finish_ctx (&ctx, checksum_after_op2);
14068 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14069 fold_check_failed (op2, tem);
14074 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14075 arguments in ARGARRAY, and a null static chain.
14076 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14077 of type TYPE from the given operands as constructed by build_call_array. */
14080 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14083 #ifdef ENABLE_FOLD_CHECKING
14084 unsigned char checksum_before_fn[16],
14085 checksum_before_arglist[16],
14086 checksum_after_fn[16],
14087 checksum_after_arglist[16];
14088 struct md5_ctx ctx;
14092 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14093 md5_init_ctx (&ctx);
14094 fold_checksum_tree (fn, &ctx, ht);
14095 md5_finish_ctx (&ctx, checksum_before_fn);
14098 md5_init_ctx (&ctx);
14099 for (i = 0; i < nargs; i++)
14100 fold_checksum_tree (argarray[i], &ctx, ht);
14101 md5_finish_ctx (&ctx, checksum_before_arglist);
14105 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14107 #ifdef ENABLE_FOLD_CHECKING
14108 md5_init_ctx (&ctx);
14109 fold_checksum_tree (fn, &ctx, ht);
14110 md5_finish_ctx (&ctx, checksum_after_fn);
14113 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14114 fold_check_failed (fn, tem);
14116 md5_init_ctx (&ctx);
14117 for (i = 0; i < nargs; i++)
14118 fold_checksum_tree (argarray[i], &ctx, ht);
14119 md5_finish_ctx (&ctx, checksum_after_arglist);
14122 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14123 fold_check_failed (NULL_TREE, tem);
14128 /* Perform constant folding and related simplification of initializer
14129 expression EXPR. These behave identically to "fold_buildN" but ignore
14130 potential run-time traps and exceptions that fold must preserve. */
14132 #define START_FOLD_INIT \
14133 int saved_signaling_nans = flag_signaling_nans;\
14134 int saved_trapping_math = flag_trapping_math;\
14135 int saved_rounding_math = flag_rounding_math;\
14136 int saved_trapv = flag_trapv;\
14137 int saved_folding_initializer = folding_initializer;\
14138 flag_signaling_nans = 0;\
14139 flag_trapping_math = 0;\
14140 flag_rounding_math = 0;\
14142 folding_initializer = 1;
14144 #define END_FOLD_INIT \
14145 flag_signaling_nans = saved_signaling_nans;\
14146 flag_trapping_math = saved_trapping_math;\
14147 flag_rounding_math = saved_rounding_math;\
14148 flag_trapv = saved_trapv;\
14149 folding_initializer = saved_folding_initializer;
14152 fold_build1_initializer (enum tree_code code, tree type, tree op)
14157 result = fold_build1 (code, type, op);
14164 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14169 result = fold_build2 (code, type, op0, op1);
14176 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14182 result = fold_build3 (code, type, op0, op1, op2);
14189 fold_build_call_array_initializer (tree type, tree fn,
14190 int nargs, tree *argarray)
14195 result = fold_build_call_array (type, fn, nargs, argarray);
14201 #undef START_FOLD_INIT
14202 #undef END_FOLD_INIT
14204 /* Determine if first argument is a multiple of second argument. Return 0 if
14205 it is not, or we cannot easily determined it to be.
14207 An example of the sort of thing we care about (at this point; this routine
14208 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14209 fold cases do now) is discovering that
14211 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14217 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14219 This code also handles discovering that
14221 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14223 is a multiple of 8 so we don't have to worry about dealing with a
14224 possible remainder.
14226 Note that we *look* inside a SAVE_EXPR only to determine how it was
14227 calculated; it is not safe for fold to do much of anything else with the
14228 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14229 at run time. For example, the latter example above *cannot* be implemented
14230 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14231 evaluation time of the original SAVE_EXPR is not necessarily the same at
14232 the time the new expression is evaluated. The only optimization of this
14233 sort that would be valid is changing
14235 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14239 SAVE_EXPR (I) * SAVE_EXPR (J)
14241 (where the same SAVE_EXPR (J) is used in the original and the
14242 transformed version). */
14245 multiple_of_p (tree type, const_tree top, const_tree bottom)
14247 if (operand_equal_p (top, bottom, 0))
14250 if (TREE_CODE (type) != INTEGER_TYPE)
14253 switch (TREE_CODE (top))
14256 /* Bitwise and provides a power of two multiple. If the mask is
14257 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14258 if (!integer_pow2p (bottom))
14263 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14264 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14268 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14269 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14272 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14276 op1 = TREE_OPERAND (top, 1);
14277 /* const_binop may not detect overflow correctly,
14278 so check for it explicitly here. */
14279 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14280 > TREE_INT_CST_LOW (op1)
14281 && TREE_INT_CST_HIGH (op1) == 0
14282 && 0 != (t1 = fold_convert (type,
14283 const_binop (LSHIFT_EXPR,
14286 && !TREE_OVERFLOW (t1))
14287 return multiple_of_p (type, t1, bottom);
14292 /* Can't handle conversions from non-integral or wider integral type. */
14293 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14294 || (TYPE_PRECISION (type)
14295 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14298 /* .. fall through ... */
14301 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14304 if (TREE_CODE (bottom) != INTEGER_CST
14305 || integer_zerop (bottom)
14306 || (TYPE_UNSIGNED (type)
14307 && (tree_int_cst_sgn (top) < 0
14308 || tree_int_cst_sgn (bottom) < 0)))
14310 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14318 /* Return true if CODE or TYPE is known to be non-negative. */
14321 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14323 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14324 && truth_value_p (code))
14325 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14326 have a signed:1 type (where the value is -1 and 0). */
14331 /* Return true if (CODE OP0) is known to be non-negative. If the return
14332 value is based on the assumption that signed overflow is undefined,
14333 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14334 *STRICT_OVERFLOW_P. */
14337 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14338 bool *strict_overflow_p)
14340 if (TYPE_UNSIGNED (type))
14346 /* We can't return 1 if flag_wrapv is set because
14347 ABS_EXPR<INT_MIN> = INT_MIN. */
14348 if (!INTEGRAL_TYPE_P (type))
14350 if (TYPE_OVERFLOW_UNDEFINED (type))
14352 *strict_overflow_p = true;
14357 case NON_LVALUE_EXPR:
14359 case FIX_TRUNC_EXPR:
14360 return tree_expr_nonnegative_warnv_p (op0,
14361 strict_overflow_p);
14365 tree inner_type = TREE_TYPE (op0);
14366 tree outer_type = type;
14368 if (TREE_CODE (outer_type) == REAL_TYPE)
14370 if (TREE_CODE (inner_type) == REAL_TYPE)
14371 return tree_expr_nonnegative_warnv_p (op0,
14372 strict_overflow_p);
14373 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14375 if (TYPE_UNSIGNED (inner_type))
14377 return tree_expr_nonnegative_warnv_p (op0,
14378 strict_overflow_p);
14381 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14383 if (TREE_CODE (inner_type) == REAL_TYPE)
14384 return tree_expr_nonnegative_warnv_p (op0,
14385 strict_overflow_p);
14386 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14387 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14388 && TYPE_UNSIGNED (inner_type);
14394 return tree_simple_nonnegative_warnv_p (code, type);
14397 /* We don't know sign of `t', so be conservative and return false. */
14401 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14402 value is based on the assumption that signed overflow is undefined,
14403 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14404 *STRICT_OVERFLOW_P. */
14407 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14408 tree op1, bool *strict_overflow_p)
14410 if (TYPE_UNSIGNED (type))
14415 case POINTER_PLUS_EXPR:
14417 if (FLOAT_TYPE_P (type))
14418 return (tree_expr_nonnegative_warnv_p (op0,
14420 && tree_expr_nonnegative_warnv_p (op1,
14421 strict_overflow_p));
14423 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14424 both unsigned and at least 2 bits shorter than the result. */
14425 if (TREE_CODE (type) == INTEGER_TYPE
14426 && TREE_CODE (op0) == NOP_EXPR
14427 && TREE_CODE (op1) == NOP_EXPR)
14429 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14430 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14431 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14432 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14434 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14435 TYPE_PRECISION (inner2)) + 1;
14436 return prec < TYPE_PRECISION (type);
14442 if (FLOAT_TYPE_P (type))
14444 /* x * x for floating point x is always non-negative. */
14445 if (operand_equal_p (op0, op1, 0))
14447 return (tree_expr_nonnegative_warnv_p (op0,
14449 && tree_expr_nonnegative_warnv_p (op1,
14450 strict_overflow_p));
14453 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14454 both unsigned and their total bits is shorter than the result. */
14455 if (TREE_CODE (type) == INTEGER_TYPE
14456 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14457 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14459 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14460 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14462 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14463 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14466 bool unsigned0 = TYPE_UNSIGNED (inner0);
14467 bool unsigned1 = TYPE_UNSIGNED (inner1);
14469 if (TREE_CODE (op0) == INTEGER_CST)
14470 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14472 if (TREE_CODE (op1) == INTEGER_CST)
14473 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14475 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14476 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14478 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14479 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14480 : TYPE_PRECISION (inner0);
14482 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14483 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14484 : TYPE_PRECISION (inner1);
14486 return precision0 + precision1 < TYPE_PRECISION (type);
14493 return (tree_expr_nonnegative_warnv_p (op0,
14495 || tree_expr_nonnegative_warnv_p (op1,
14496 strict_overflow_p));
14502 case TRUNC_DIV_EXPR:
14503 case CEIL_DIV_EXPR:
14504 case FLOOR_DIV_EXPR:
14505 case ROUND_DIV_EXPR:
14506 return (tree_expr_nonnegative_warnv_p (op0,
14508 && tree_expr_nonnegative_warnv_p (op1,
14509 strict_overflow_p));
14511 case TRUNC_MOD_EXPR:
14512 case CEIL_MOD_EXPR:
14513 case FLOOR_MOD_EXPR:
14514 case ROUND_MOD_EXPR:
14515 return tree_expr_nonnegative_warnv_p (op0,
14516 strict_overflow_p);
14518 return tree_simple_nonnegative_warnv_p (code, type);
14521 /* We don't know sign of `t', so be conservative and return false. */
14525 /* Return true if T is known to be non-negative. If the return
14526 value is based on the assumption that signed overflow is undefined,
14527 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14528 *STRICT_OVERFLOW_P. */
14531 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14533 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14536 switch (TREE_CODE (t))
14539 return tree_int_cst_sgn (t) >= 0;
14542 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14545 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14548 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14550 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14551 strict_overflow_p));
14553 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14556 /* We don't know sign of `t', so be conservative and return false. */
14560 /* Return true if T is known to be non-negative. If the return
14561 value is based on the assumption that signed overflow is undefined,
14562 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14563 *STRICT_OVERFLOW_P. */
14566 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14567 tree arg0, tree arg1, bool *strict_overflow_p)
14569 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14570 switch (DECL_FUNCTION_CODE (fndecl))
14572 CASE_FLT_FN (BUILT_IN_ACOS):
14573 CASE_FLT_FN (BUILT_IN_ACOSH):
14574 CASE_FLT_FN (BUILT_IN_CABS):
14575 CASE_FLT_FN (BUILT_IN_COSH):
14576 CASE_FLT_FN (BUILT_IN_ERFC):
14577 CASE_FLT_FN (BUILT_IN_EXP):
14578 CASE_FLT_FN (BUILT_IN_EXP10):
14579 CASE_FLT_FN (BUILT_IN_EXP2):
14580 CASE_FLT_FN (BUILT_IN_FABS):
14581 CASE_FLT_FN (BUILT_IN_FDIM):
14582 CASE_FLT_FN (BUILT_IN_HYPOT):
14583 CASE_FLT_FN (BUILT_IN_POW10):
14584 CASE_INT_FN (BUILT_IN_FFS):
14585 CASE_INT_FN (BUILT_IN_PARITY):
14586 CASE_INT_FN (BUILT_IN_POPCOUNT):
14587 case BUILT_IN_BSWAP32:
14588 case BUILT_IN_BSWAP64:
14592 CASE_FLT_FN (BUILT_IN_SQRT):
14593 /* sqrt(-0.0) is -0.0. */
14594 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14596 return tree_expr_nonnegative_warnv_p (arg0,
14597 strict_overflow_p);
14599 CASE_FLT_FN (BUILT_IN_ASINH):
14600 CASE_FLT_FN (BUILT_IN_ATAN):
14601 CASE_FLT_FN (BUILT_IN_ATANH):
14602 CASE_FLT_FN (BUILT_IN_CBRT):
14603 CASE_FLT_FN (BUILT_IN_CEIL):
14604 CASE_FLT_FN (BUILT_IN_ERF):
14605 CASE_FLT_FN (BUILT_IN_EXPM1):
14606 CASE_FLT_FN (BUILT_IN_FLOOR):
14607 CASE_FLT_FN (BUILT_IN_FMOD):
14608 CASE_FLT_FN (BUILT_IN_FREXP):
14609 CASE_FLT_FN (BUILT_IN_LCEIL):
14610 CASE_FLT_FN (BUILT_IN_LDEXP):
14611 CASE_FLT_FN (BUILT_IN_LFLOOR):
14612 CASE_FLT_FN (BUILT_IN_LLCEIL):
14613 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14614 CASE_FLT_FN (BUILT_IN_LLRINT):
14615 CASE_FLT_FN (BUILT_IN_LLROUND):
14616 CASE_FLT_FN (BUILT_IN_LRINT):
14617 CASE_FLT_FN (BUILT_IN_LROUND):
14618 CASE_FLT_FN (BUILT_IN_MODF):
14619 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14620 CASE_FLT_FN (BUILT_IN_RINT):
14621 CASE_FLT_FN (BUILT_IN_ROUND):
14622 CASE_FLT_FN (BUILT_IN_SCALB):
14623 CASE_FLT_FN (BUILT_IN_SCALBLN):
14624 CASE_FLT_FN (BUILT_IN_SCALBN):
14625 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14626 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14627 CASE_FLT_FN (BUILT_IN_SINH):
14628 CASE_FLT_FN (BUILT_IN_TANH):
14629 CASE_FLT_FN (BUILT_IN_TRUNC):
14630 /* True if the 1st argument is nonnegative. */
14631 return tree_expr_nonnegative_warnv_p (arg0,
14632 strict_overflow_p);
14634 CASE_FLT_FN (BUILT_IN_FMAX):
14635 /* True if the 1st OR 2nd arguments are nonnegative. */
14636 return (tree_expr_nonnegative_warnv_p (arg0,
14638 || (tree_expr_nonnegative_warnv_p (arg1,
14639 strict_overflow_p)));
14641 CASE_FLT_FN (BUILT_IN_FMIN):
14642 /* True if the 1st AND 2nd arguments are nonnegative. */
14643 return (tree_expr_nonnegative_warnv_p (arg0,
14645 && (tree_expr_nonnegative_warnv_p (arg1,
14646 strict_overflow_p)));
14648 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14649 /* True if the 2nd argument is nonnegative. */
14650 return tree_expr_nonnegative_warnv_p (arg1,
14651 strict_overflow_p);
14653 CASE_FLT_FN (BUILT_IN_POWI):
14654 /* True if the 1st argument is nonnegative or the second
14655 argument is an even integer. */
14656 if (TREE_CODE (arg1) == INTEGER_CST
14657 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14659 return tree_expr_nonnegative_warnv_p (arg0,
14660 strict_overflow_p);
14662 CASE_FLT_FN (BUILT_IN_POW):
14663 /* True if the 1st argument is nonnegative or the second
14664 argument is an even integer valued real. */
14665 if (TREE_CODE (arg1) == REAL_CST)
14670 c = TREE_REAL_CST (arg1);
14671 n = real_to_integer (&c);
14674 REAL_VALUE_TYPE cint;
14675 real_from_integer (&cint, VOIDmode, n,
14676 n < 0 ? -1 : 0, 0);
14677 if (real_identical (&c, &cint))
14681 return tree_expr_nonnegative_warnv_p (arg0,
14682 strict_overflow_p);
14687 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14691 /* Return true if T is known to be non-negative. If the return
14692 value is based on the assumption that signed overflow is undefined,
14693 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14694 *STRICT_OVERFLOW_P. */
14697 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14699 enum tree_code code = TREE_CODE (t);
14700 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14707 tree temp = TARGET_EXPR_SLOT (t);
14708 t = TARGET_EXPR_INITIAL (t);
14710 /* If the initializer is non-void, then it's a normal expression
14711 that will be assigned to the slot. */
14712 if (!VOID_TYPE_P (t))
14713 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14715 /* Otherwise, the initializer sets the slot in some way. One common
14716 way is an assignment statement at the end of the initializer. */
14719 if (TREE_CODE (t) == BIND_EXPR)
14720 t = expr_last (BIND_EXPR_BODY (t));
14721 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14722 || TREE_CODE (t) == TRY_CATCH_EXPR)
14723 t = expr_last (TREE_OPERAND (t, 0));
14724 else if (TREE_CODE (t) == STATEMENT_LIST)
14729 if (TREE_CODE (t) == MODIFY_EXPR
14730 && TREE_OPERAND (t, 0) == temp)
14731 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14732 strict_overflow_p);
14739 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14740 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14742 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14743 get_callee_fndecl (t),
14746 strict_overflow_p);
14748 case COMPOUND_EXPR:
14750 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14751 strict_overflow_p);
14753 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14754 strict_overflow_p);
14756 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14757 strict_overflow_p);
14760 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14764 /* We don't know sign of `t', so be conservative and return false. */
14768 /* Return true if T is known to be non-negative. If the return
14769 value is based on the assumption that signed overflow is undefined,
14770 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14771 *STRICT_OVERFLOW_P. */
14774 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14776 enum tree_code code;
14777 if (t == error_mark_node)
14780 code = TREE_CODE (t);
14781 switch (TREE_CODE_CLASS (code))
14784 case tcc_comparison:
14785 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14787 TREE_OPERAND (t, 0),
14788 TREE_OPERAND (t, 1),
14789 strict_overflow_p);
14792 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14794 TREE_OPERAND (t, 0),
14795 strict_overflow_p);
14798 case tcc_declaration:
14799 case tcc_reference:
14800 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14808 case TRUTH_AND_EXPR:
14809 case TRUTH_OR_EXPR:
14810 case TRUTH_XOR_EXPR:
14811 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14813 TREE_OPERAND (t, 0),
14814 TREE_OPERAND (t, 1),
14815 strict_overflow_p);
14816 case TRUTH_NOT_EXPR:
14817 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14819 TREE_OPERAND (t, 0),
14820 strict_overflow_p);
14827 case WITH_SIZE_EXPR:
14831 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14834 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14838 /* Return true if `t' is known to be non-negative. Handle warnings
14839 about undefined signed overflow. */
14842 tree_expr_nonnegative_p (tree t)
14844 bool ret, strict_overflow_p;
14846 strict_overflow_p = false;
14847 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14848 if (strict_overflow_p)
14849 fold_overflow_warning (("assuming signed overflow does not occur when "
14850 "determining that expression is always "
14852 WARN_STRICT_OVERFLOW_MISC);
14857 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14858 For floating point we further ensure that T is not denormal.
14859 Similar logic is present in nonzero_address in rtlanal.h.
14861 If the return value is based on the assumption that signed overflow
14862 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14863 change *STRICT_OVERFLOW_P. */
14866 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14867 bool *strict_overflow_p)
14872 return tree_expr_nonzero_warnv_p (op0,
14873 strict_overflow_p);
14877 tree inner_type = TREE_TYPE (op0);
14878 tree outer_type = type;
14880 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14881 && tree_expr_nonzero_warnv_p (op0,
14882 strict_overflow_p));
14886 case NON_LVALUE_EXPR:
14887 return tree_expr_nonzero_warnv_p (op0,
14888 strict_overflow_p);
14897 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14898 For floating point we further ensure that T is not denormal.
14899 Similar logic is present in nonzero_address in rtlanal.h.
14901 If the return value is based on the assumption that signed overflow
14902 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14903 change *STRICT_OVERFLOW_P. */
14906 tree_binary_nonzero_warnv_p (enum tree_code code,
14909 tree op1, bool *strict_overflow_p)
14911 bool sub_strict_overflow_p;
14914 case POINTER_PLUS_EXPR:
14916 if (TYPE_OVERFLOW_UNDEFINED (type))
14918 /* With the presence of negative values it is hard
14919 to say something. */
14920 sub_strict_overflow_p = false;
14921 if (!tree_expr_nonnegative_warnv_p (op0,
14922 &sub_strict_overflow_p)
14923 || !tree_expr_nonnegative_warnv_p (op1,
14924 &sub_strict_overflow_p))
14926 /* One of operands must be positive and the other non-negative. */
14927 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14928 overflows, on a twos-complement machine the sum of two
14929 nonnegative numbers can never be zero. */
14930 return (tree_expr_nonzero_warnv_p (op0,
14932 || tree_expr_nonzero_warnv_p (op1,
14933 strict_overflow_p));
14938 if (TYPE_OVERFLOW_UNDEFINED (type))
14940 if (tree_expr_nonzero_warnv_p (op0,
14942 && tree_expr_nonzero_warnv_p (op1,
14943 strict_overflow_p))
14945 *strict_overflow_p = true;
14952 sub_strict_overflow_p = false;
14953 if (tree_expr_nonzero_warnv_p (op0,
14954 &sub_strict_overflow_p)
14955 && tree_expr_nonzero_warnv_p (op1,
14956 &sub_strict_overflow_p))
14958 if (sub_strict_overflow_p)
14959 *strict_overflow_p = true;
14964 sub_strict_overflow_p = false;
14965 if (tree_expr_nonzero_warnv_p (op0,
14966 &sub_strict_overflow_p))
14968 if (sub_strict_overflow_p)
14969 *strict_overflow_p = true;
14971 /* When both operands are nonzero, then MAX must be too. */
14972 if (tree_expr_nonzero_warnv_p (op1,
14973 strict_overflow_p))
14976 /* MAX where operand 0 is positive is positive. */
14977 return tree_expr_nonnegative_warnv_p (op0,
14978 strict_overflow_p);
14980 /* MAX where operand 1 is positive is positive. */
14981 else if (tree_expr_nonzero_warnv_p (op1,
14982 &sub_strict_overflow_p)
14983 && tree_expr_nonnegative_warnv_p (op1,
14984 &sub_strict_overflow_p))
14986 if (sub_strict_overflow_p)
14987 *strict_overflow_p = true;
14993 return (tree_expr_nonzero_warnv_p (op1,
14995 || tree_expr_nonzero_warnv_p (op0,
14996 strict_overflow_p));
15005 /* Return true when T is an address and is known to be nonzero.
15006 For floating point we further ensure that T is not denormal.
15007 Similar logic is present in nonzero_address in rtlanal.h.
15009 If the return value is based on the assumption that signed overflow
15010 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15011 change *STRICT_OVERFLOW_P. */
15014 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15016 bool sub_strict_overflow_p;
15017 switch (TREE_CODE (t))
15020 return !integer_zerop (t);
15024 tree base = get_base_address (TREE_OPERAND (t, 0));
15029 /* Weak declarations may link to NULL. */
15030 if (VAR_OR_FUNCTION_DECL_P (base))
15031 return !DECL_WEAK (base);
15033 /* Constants are never weak. */
15034 if (CONSTANT_CLASS_P (base))
15041 sub_strict_overflow_p = false;
15042 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15043 &sub_strict_overflow_p)
15044 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15045 &sub_strict_overflow_p))
15047 if (sub_strict_overflow_p)
15048 *strict_overflow_p = true;
15059 /* Return true when T is an address and is known to be nonzero.
15060 For floating point we further ensure that T is not denormal.
15061 Similar logic is present in nonzero_address in rtlanal.h.
15063 If the return value is based on the assumption that signed overflow
15064 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15065 change *STRICT_OVERFLOW_P. */
15068 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15070 tree type = TREE_TYPE (t);
15071 enum tree_code code;
15073 /* Doing something useful for floating point would need more work. */
15074 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15077 code = TREE_CODE (t);
15078 switch (TREE_CODE_CLASS (code))
15081 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15082 strict_overflow_p);
15084 case tcc_comparison:
15085 return tree_binary_nonzero_warnv_p (code, type,
15086 TREE_OPERAND (t, 0),
15087 TREE_OPERAND (t, 1),
15088 strict_overflow_p);
15090 case tcc_declaration:
15091 case tcc_reference:
15092 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15100 case TRUTH_NOT_EXPR:
15101 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15102 strict_overflow_p);
15104 case TRUTH_AND_EXPR:
15105 case TRUTH_OR_EXPR:
15106 case TRUTH_XOR_EXPR:
15107 return tree_binary_nonzero_warnv_p (code, type,
15108 TREE_OPERAND (t, 0),
15109 TREE_OPERAND (t, 1),
15110 strict_overflow_p);
15117 case WITH_SIZE_EXPR:
15121 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15123 case COMPOUND_EXPR:
15126 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15127 strict_overflow_p);
15130 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15131 strict_overflow_p);
15134 return alloca_call_p (t);
15142 /* Return true when T is an address and is known to be nonzero.
15143 Handle warnings about undefined signed overflow. */
15146 tree_expr_nonzero_p (tree t)
15148 bool ret, strict_overflow_p;
15150 strict_overflow_p = false;
15151 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15152 if (strict_overflow_p)
15153 fold_overflow_warning (("assuming signed overflow does not occur when "
15154 "determining that expression is always "
15156 WARN_STRICT_OVERFLOW_MISC);
15160 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15161 attempt to fold the expression to a constant without modifying TYPE,
15164 If the expression could be simplified to a constant, then return
15165 the constant. If the expression would not be simplified to a
15166 constant, then return NULL_TREE. */
15169 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15171 tree tem = fold_binary (code, type, op0, op1);
15172 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15175 /* Given the components of a unary expression CODE, TYPE and OP0,
15176 attempt to fold the expression to a constant without modifying
15179 If the expression could be simplified to a constant, then return
15180 the constant. If the expression would not be simplified to a
15181 constant, then return NULL_TREE. */
15184 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15186 tree tem = fold_unary (code, type, op0);
15187 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15190 /* If EXP represents referencing an element in a constant string
15191 (either via pointer arithmetic or array indexing), return the
15192 tree representing the value accessed, otherwise return NULL. */
15195 fold_read_from_constant_string (tree exp)
15197 if ((TREE_CODE (exp) == INDIRECT_REF
15198 || TREE_CODE (exp) == ARRAY_REF)
15199 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15201 tree exp1 = TREE_OPERAND (exp, 0);
15205 if (TREE_CODE (exp) == INDIRECT_REF)
15206 string = string_constant (exp1, &index);
15209 tree low_bound = array_ref_low_bound (exp);
15210 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15212 /* Optimize the special-case of a zero lower bound.
15214 We convert the low_bound to sizetype to avoid some problems
15215 with constant folding. (E.g. suppose the lower bound is 1,
15216 and its mode is QI. Without the conversion,l (ARRAY
15217 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15218 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15219 if (! integer_zerop (low_bound))
15220 index = size_diffop (index, fold_convert (sizetype, low_bound));
15226 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15227 && TREE_CODE (string) == STRING_CST
15228 && TREE_CODE (index) == INTEGER_CST
15229 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15230 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15232 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15233 return build_int_cst_type (TREE_TYPE (exp),
15234 (TREE_STRING_POINTER (string)
15235 [TREE_INT_CST_LOW (index)]));
15240 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15241 an integer constant, real, or fixed-point constant.
15243 TYPE is the type of the result. */
15246 fold_negate_const (tree arg0, tree type)
15248 tree t = NULL_TREE;
15250 switch (TREE_CODE (arg0))
15254 unsigned HOST_WIDE_INT low;
15255 HOST_WIDE_INT high;
15256 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15257 TREE_INT_CST_HIGH (arg0),
15259 t = force_fit_type_double (type, low, high, 1,
15260 (overflow | TREE_OVERFLOW (arg0))
15261 && !TYPE_UNSIGNED (type));
15266 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15271 FIXED_VALUE_TYPE f;
15272 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15273 &(TREE_FIXED_CST (arg0)), NULL,
15274 TYPE_SATURATING (type));
15275 t = build_fixed (type, f);
15276 /* Propagate overflow flags. */
15277 if (overflow_p | TREE_OVERFLOW (arg0))
15279 TREE_OVERFLOW (t) = 1;
15280 TREE_CONSTANT_OVERFLOW (t) = 1;
15282 else if (TREE_CONSTANT_OVERFLOW (arg0))
15283 TREE_CONSTANT_OVERFLOW (t) = 1;
15288 gcc_unreachable ();
15294 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15295 an integer constant or real constant.
15297 TYPE is the type of the result. */
15300 fold_abs_const (tree arg0, tree type)
15302 tree t = NULL_TREE;
15304 switch (TREE_CODE (arg0))
15307 /* If the value is unsigned, then the absolute value is
15308 the same as the ordinary value. */
15309 if (TYPE_UNSIGNED (type))
15311 /* Similarly, if the value is non-negative. */
15312 else if (INT_CST_LT (integer_minus_one_node, arg0))
15314 /* If the value is negative, then the absolute value is
15318 unsigned HOST_WIDE_INT low;
15319 HOST_WIDE_INT high;
15320 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15321 TREE_INT_CST_HIGH (arg0),
15323 t = force_fit_type_double (type, low, high, -1,
15324 overflow | TREE_OVERFLOW (arg0));
15329 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15330 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15336 gcc_unreachable ();
15342 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15343 constant. TYPE is the type of the result. */
15346 fold_not_const (tree arg0, tree type)
15348 tree t = NULL_TREE;
15350 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15352 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15353 ~TREE_INT_CST_HIGH (arg0), 0,
15354 TREE_OVERFLOW (arg0));
15359 /* Given CODE, a relational operator, the target type, TYPE and two
15360 constant operands OP0 and OP1, return the result of the
15361 relational operation. If the result is not a compile time
15362 constant, then return NULL_TREE. */
15365 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15367 int result, invert;
15369 /* From here on, the only cases we handle are when the result is
15370 known to be a constant. */
15372 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15374 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15375 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15377 /* Handle the cases where either operand is a NaN. */
15378 if (real_isnan (c0) || real_isnan (c1))
15388 case UNORDERED_EXPR:
15402 if (flag_trapping_math)
15408 gcc_unreachable ();
15411 return constant_boolean_node (result, type);
15414 return constant_boolean_node (real_compare (code, c0, c1), type);
15417 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15419 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15420 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15421 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15424 /* Handle equality/inequality of complex constants. */
15425 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15427 tree rcond = fold_relational_const (code, type,
15428 TREE_REALPART (op0),
15429 TREE_REALPART (op1));
15430 tree icond = fold_relational_const (code, type,
15431 TREE_IMAGPART (op0),
15432 TREE_IMAGPART (op1));
15433 if (code == EQ_EXPR)
15434 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15435 else if (code == NE_EXPR)
15436 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15441 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15443 To compute GT, swap the arguments and do LT.
15444 To compute GE, do LT and invert the result.
15445 To compute LE, swap the arguments, do LT and invert the result.
15446 To compute NE, do EQ and invert the result.
15448 Therefore, the code below must handle only EQ and LT. */
15450 if (code == LE_EXPR || code == GT_EXPR)
15455 code = swap_tree_comparison (code);
15458 /* Note that it is safe to invert for real values here because we
15459 have already handled the one case that it matters. */
15462 if (code == NE_EXPR || code == GE_EXPR)
15465 code = invert_tree_comparison (code, false);
15468 /* Compute a result for LT or EQ if args permit;
15469 Otherwise return T. */
15470 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15472 if (code == EQ_EXPR)
15473 result = tree_int_cst_equal (op0, op1);
15474 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15475 result = INT_CST_LT_UNSIGNED (op0, op1);
15477 result = INT_CST_LT (op0, op1);
15484 return constant_boolean_node (result, type);
15487 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15488 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15492 fold_build_cleanup_point_expr (tree type, tree expr)
15494 /* If the expression does not have side effects then we don't have to wrap
15495 it with a cleanup point expression. */
15496 if (!TREE_SIDE_EFFECTS (expr))
15499 /* If the expression is a return, check to see if the expression inside the
15500 return has no side effects or the right hand side of the modify expression
15501 inside the return. If either don't have side effects set we don't need to
15502 wrap the expression in a cleanup point expression. Note we don't check the
15503 left hand side of the modify because it should always be a return decl. */
15504 if (TREE_CODE (expr) == RETURN_EXPR)
15506 tree op = TREE_OPERAND (expr, 0);
15507 if (!op || !TREE_SIDE_EFFECTS (op))
15509 op = TREE_OPERAND (op, 1);
15510 if (!TREE_SIDE_EFFECTS (op))
15514 return build1 (CLEANUP_POINT_EXPR, type, expr);
15517 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15518 of an indirection through OP0, or NULL_TREE if no simplification is
15522 fold_indirect_ref_1 (tree type, tree op0)
15528 subtype = TREE_TYPE (sub);
15529 if (!POINTER_TYPE_P (subtype))
15532 if (TREE_CODE (sub) == ADDR_EXPR)
15534 tree op = TREE_OPERAND (sub, 0);
15535 tree optype = TREE_TYPE (op);
15536 /* *&CONST_DECL -> to the value of the const decl. */
15537 if (TREE_CODE (op) == CONST_DECL)
15538 return DECL_INITIAL (op);
15539 /* *&p => p; make sure to handle *&"str"[cst] here. */
15540 if (type == optype)
15542 tree fop = fold_read_from_constant_string (op);
15548 /* *(foo *)&fooarray => fooarray[0] */
15549 else if (TREE_CODE (optype) == ARRAY_TYPE
15550 && type == TREE_TYPE (optype))
15552 tree type_domain = TYPE_DOMAIN (optype);
15553 tree min_val = size_zero_node;
15554 if (type_domain && TYPE_MIN_VALUE (type_domain))
15555 min_val = TYPE_MIN_VALUE (type_domain);
15556 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15558 /* *(foo *)&complexfoo => __real__ complexfoo */
15559 else if (TREE_CODE (optype) == COMPLEX_TYPE
15560 && type == TREE_TYPE (optype))
15561 return fold_build1 (REALPART_EXPR, type, op);
15562 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15563 else if (TREE_CODE (optype) == VECTOR_TYPE
15564 && type == TREE_TYPE (optype))
15566 tree part_width = TYPE_SIZE (type);
15567 tree index = bitsize_int (0);
15568 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15572 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15573 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15574 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15576 tree op00 = TREE_OPERAND (sub, 0);
15577 tree op01 = TREE_OPERAND (sub, 1);
15581 op00type = TREE_TYPE (op00);
15582 if (TREE_CODE (op00) == ADDR_EXPR
15583 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15584 && type == TREE_TYPE (TREE_TYPE (op00type)))
15586 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15587 tree part_width = TYPE_SIZE (type);
15588 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15589 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15590 tree index = bitsize_int (indexi);
15592 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15593 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15594 part_width, index);
15600 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15601 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15602 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15604 tree op00 = TREE_OPERAND (sub, 0);
15605 tree op01 = TREE_OPERAND (sub, 1);
15609 op00type = TREE_TYPE (op00);
15610 if (TREE_CODE (op00) == ADDR_EXPR
15611 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15612 && type == TREE_TYPE (TREE_TYPE (op00type)))
15614 tree size = TYPE_SIZE_UNIT (type);
15615 if (tree_int_cst_equal (size, op01))
15616 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15620 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15621 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15622 && type == TREE_TYPE (TREE_TYPE (subtype)))
15625 tree min_val = size_zero_node;
15626 sub = build_fold_indirect_ref (sub);
15627 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15628 if (type_domain && TYPE_MIN_VALUE (type_domain))
15629 min_val = TYPE_MIN_VALUE (type_domain);
15630 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15636 /* Builds an expression for an indirection through T, simplifying some
15640 build_fold_indirect_ref (tree t)
15642 tree type = TREE_TYPE (TREE_TYPE (t));
15643 tree sub = fold_indirect_ref_1 (type, t);
15648 return build1 (INDIRECT_REF, type, t);
15651 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15654 fold_indirect_ref (tree t)
15656 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15664 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15665 whose result is ignored. The type of the returned tree need not be
15666 the same as the original expression. */
15669 fold_ignored_result (tree t)
15671 if (!TREE_SIDE_EFFECTS (t))
15672 return integer_zero_node;
15675 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15678 t = TREE_OPERAND (t, 0);
15682 case tcc_comparison:
15683 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15684 t = TREE_OPERAND (t, 0);
15685 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15686 t = TREE_OPERAND (t, 1);
15691 case tcc_expression:
15692 switch (TREE_CODE (t))
15694 case COMPOUND_EXPR:
15695 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15697 t = TREE_OPERAND (t, 0);
15701 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15702 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15704 t = TREE_OPERAND (t, 0);
15717 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15718 This can only be applied to objects of a sizetype. */
15721 round_up (tree value, int divisor)
15723 tree div = NULL_TREE;
15725 gcc_assert (divisor > 0);
15729 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15730 have to do anything. Only do this when we are not given a const,
15731 because in that case, this check is more expensive than just
15733 if (TREE_CODE (value) != INTEGER_CST)
15735 div = build_int_cst (TREE_TYPE (value), divisor);
15737 if (multiple_of_p (TREE_TYPE (value), value, div))
15741 /* If divisor is a power of two, simplify this to bit manipulation. */
15742 if (divisor == (divisor & -divisor))
15744 if (TREE_CODE (value) == INTEGER_CST)
15746 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15747 unsigned HOST_WIDE_INT high;
15750 if ((low & (divisor - 1)) == 0)
15753 overflow_p = TREE_OVERFLOW (value);
15754 high = TREE_INT_CST_HIGH (value);
15755 low &= ~(divisor - 1);
15764 return force_fit_type_double (TREE_TYPE (value), low, high,
15771 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15772 value = size_binop (PLUS_EXPR, value, t);
15773 t = build_int_cst (TREE_TYPE (value), -divisor);
15774 value = size_binop (BIT_AND_EXPR, value, t);
15780 div = build_int_cst (TREE_TYPE (value), divisor);
15781 value = size_binop (CEIL_DIV_EXPR, value, div);
15782 value = size_binop (MULT_EXPR, value, div);
15788 /* Likewise, but round down. */
15791 round_down (tree value, int divisor)
15793 tree div = NULL_TREE;
15795 gcc_assert (divisor > 0);
15799 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15800 have to do anything. Only do this when we are not given a const,
15801 because in that case, this check is more expensive than just
15803 if (TREE_CODE (value) != INTEGER_CST)
15805 div = build_int_cst (TREE_TYPE (value), divisor);
15807 if (multiple_of_p (TREE_TYPE (value), value, div))
15811 /* If divisor is a power of two, simplify this to bit manipulation. */
15812 if (divisor == (divisor & -divisor))
15816 t = build_int_cst (TREE_TYPE (value), -divisor);
15817 value = size_binop (BIT_AND_EXPR, value, t);
15822 div = build_int_cst (TREE_TYPE (value), divisor);
15823 value = size_binop (FLOOR_DIV_EXPR, value, div);
15824 value = size_binop (MULT_EXPR, value, div);
15830 /* Returns the pointer to the base of the object addressed by EXP and
15831 extracts the information about the offset of the access, storing it
15832 to PBITPOS and POFFSET. */
15835 split_address_to_core_and_offset (tree exp,
15836 HOST_WIDE_INT *pbitpos, tree *poffset)
15839 enum machine_mode mode;
15840 int unsignedp, volatilep;
15841 HOST_WIDE_INT bitsize;
15843 if (TREE_CODE (exp) == ADDR_EXPR)
15845 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15846 poffset, &mode, &unsignedp, &volatilep,
15848 core = fold_addr_expr (core);
15854 *poffset = NULL_TREE;
15860 /* Returns true if addresses of E1 and E2 differ by a constant, false
15861 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15864 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15867 HOST_WIDE_INT bitpos1, bitpos2;
15868 tree toffset1, toffset2, tdiff, type;
15870 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15871 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15873 if (bitpos1 % BITS_PER_UNIT != 0
15874 || bitpos2 % BITS_PER_UNIT != 0
15875 || !operand_equal_p (core1, core2, 0))
15878 if (toffset1 && toffset2)
15880 type = TREE_TYPE (toffset1);
15881 if (type != TREE_TYPE (toffset2))
15882 toffset2 = fold_convert (type, toffset2);
15884 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15885 if (!cst_and_fits_in_hwi (tdiff))
15888 *diff = int_cst_value (tdiff);
15890 else if (toffset1 || toffset2)
15892 /* If only one of the offsets is non-constant, the difference cannot
15899 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15903 /* Simplify the floating point expression EXP when the sign of the
15904 result is not significant. Return NULL_TREE if no simplification
15908 fold_strip_sign_ops (tree exp)
15912 switch (TREE_CODE (exp))
15916 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15917 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15921 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15923 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15924 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15925 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15926 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15927 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15928 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15931 case COMPOUND_EXPR:
15932 arg0 = TREE_OPERAND (exp, 0);
15933 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15935 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15939 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15940 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15942 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15943 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15944 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15949 const enum built_in_function fcode = builtin_mathfn_code (exp);
15952 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15953 /* Strip copysign function call, return the 1st argument. */
15954 arg0 = CALL_EXPR_ARG (exp, 0);
15955 arg1 = CALL_EXPR_ARG (exp, 1);
15956 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15959 /* Strip sign ops from the argument of "odd" math functions. */
15960 if (negate_mathfn_p (fcode))
15962 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15964 return build_call_expr (get_callee_fndecl (exp), 1, arg0);