1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
51 #include "double-int.h"
58 #include "fold-const.h"
59 #include "stor-layout.h"
61 #include "tree-iterator.h"
65 #include "hard-reg-set.h"
67 #include "statistics.h"
69 #include "fixed-value.h"
70 #include "insn-config.h"
80 #include "diagnostic-core.h"
82 #include "langhooks.h"
85 #include "basic-block.h"
86 #include "tree-ssa-alias.h"
87 #include "internal-fn.h"
89 #include "gimple-expr.h"
94 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
97 #include "plugin-api.h"
100 #include "generic-match.h"
103 /* Nonzero if we are folding constants inside an initializer; zero
105 int folding_initializer = 0;
107 /* The following constants represent a bit based encoding of GCC's
108 comparison operators. This encoding simplifies transformations
109 on relational comparison operators, such as AND and OR. */
110 enum comparison_code {
129 static bool negate_mathfn_p (enum built_in_function);
130 static bool negate_expr_p (tree);
131 static tree negate_expr (tree);
132 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
133 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
134 static enum comparison_code comparison_to_compcode (enum tree_code);
135 static enum tree_code compcode_to_comparison (enum comparison_code);
136 static int operand_equal_for_comparison_p (tree, tree, tree);
137 static int twoval_comparison_p (tree, tree *, tree *, int *);
138 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
139 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
140 static tree make_bit_field_ref (location_t, tree, tree,
141 HOST_WIDE_INT, HOST_WIDE_INT, int);
142 static tree optimize_bit_field_compare (location_t, enum tree_code,
144 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
146 machine_mode *, int *, int *,
148 static int simple_operand_p (const_tree);
149 static bool simple_operand_p_2 (tree);
150 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
151 static tree range_predecessor (tree);
152 static tree range_successor (tree);
153 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
155 static tree unextend (tree, int, int, tree);
156 static tree optimize_minmax_comparison (location_t, enum tree_code,
158 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
159 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
160 static tree fold_binary_op_with_conditional_arg (location_t,
161 enum tree_code, tree,
164 static tree fold_mathfn_compare (location_t,
165 enum built_in_function, enum tree_code,
167 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
168 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
169 static bool reorder_operands_p (const_tree, const_tree);
170 static tree fold_negate_const (tree, tree);
171 static tree fold_not_const (const_tree, tree);
172 static tree fold_relational_const (enum tree_code, tree, tree, tree);
173 static tree fold_convert_const (enum tree_code, tree, tree);
174 static tree fold_view_convert_expr (tree, tree);
175 static bool vec_cst_ctor_to_array (tree, tree *);
178 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
179 Otherwise, return LOC. */
182 expr_location_or (tree t, location_t loc)
184 location_t tloc = EXPR_LOCATION (t);
185 return tloc == UNKNOWN_LOCATION ? loc : tloc;
188 /* Similar to protected_set_expr_location, but never modify x in place,
189 if location can and needs to be set, unshare it. */
192 protected_set_expr_location_unshare (tree x, location_t loc)
194 if (CAN_HAVE_LOCATION_P (x)
195 && EXPR_LOCATION (x) != loc
196 && !(TREE_CODE (x) == SAVE_EXPR
197 || TREE_CODE (x) == TARGET_EXPR
198 || TREE_CODE (x) == BIND_EXPR))
201 SET_EXPR_LOCATION (x, loc);
206 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
207 division and returns the quotient. Otherwise returns
211 div_if_zero_remainder (const_tree arg1, const_tree arg2)
215 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
217 return wide_int_to_tree (TREE_TYPE (arg1), quo);
222 /* This is nonzero if we should defer warnings about undefined
223 overflow. This facility exists because these warnings are a
224 special case. The code to estimate loop iterations does not want
225 to issue any warnings, since it works with expressions which do not
226 occur in user code. Various bits of cleanup code call fold(), but
227 only use the result if it has certain characteristics (e.g., is a
228 constant); that code only wants to issue a warning if the result is
231 static int fold_deferring_overflow_warnings;
233 /* If a warning about undefined overflow is deferred, this is the
234 warning. Note that this may cause us to turn two warnings into
235 one, but that is fine since it is sufficient to only give one
236 warning per expression. */
238 static const char* fold_deferred_overflow_warning;
240 /* If a warning about undefined overflow is deferred, this is the
241 level at which the warning should be emitted. */
243 static enum warn_strict_overflow_code fold_deferred_overflow_code;
245 /* Start deferring overflow warnings. We could use a stack here to
246 permit nested calls, but at present it is not necessary. */
249 fold_defer_overflow_warnings (void)
251 ++fold_deferring_overflow_warnings;
254 /* Stop deferring overflow warnings. If there is a pending warning,
255 and ISSUE is true, then issue the warning if appropriate. STMT is
256 the statement with which the warning should be associated (used for
257 location information); STMT may be NULL. CODE is the level of the
258 warning--a warn_strict_overflow_code value. This function will use
259 the smaller of CODE and the deferred code when deciding whether to
260 issue the warning. CODE may be zero to mean to always use the
264 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
269 gcc_assert (fold_deferring_overflow_warnings > 0);
270 --fold_deferring_overflow_warnings;
271 if (fold_deferring_overflow_warnings > 0)
273 if (fold_deferred_overflow_warning != NULL
275 && code < (int) fold_deferred_overflow_code)
276 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
280 warnmsg = fold_deferred_overflow_warning;
281 fold_deferred_overflow_warning = NULL;
283 if (!issue || warnmsg == NULL)
286 if (gimple_no_warning_p (stmt))
289 /* Use the smallest code level when deciding to issue the
291 if (code == 0 || code > (int) fold_deferred_overflow_code)
292 code = fold_deferred_overflow_code;
294 if (!issue_strict_overflow_warning (code))
298 locus = input_location;
300 locus = gimple_location (stmt);
301 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
304 /* Stop deferring overflow warnings, ignoring any deferred
308 fold_undefer_and_ignore_overflow_warnings (void)
310 fold_undefer_overflow_warnings (false, NULL, 0);
313 /* Whether we are deferring overflow warnings. */
316 fold_deferring_overflow_warnings_p (void)
318 return fold_deferring_overflow_warnings > 0;
321 /* This is called when we fold something based on the fact that signed
322 overflow is undefined. */
325 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
327 if (fold_deferring_overflow_warnings > 0)
329 if (fold_deferred_overflow_warning == NULL
330 || wc < fold_deferred_overflow_code)
332 fold_deferred_overflow_warning = gmsgid;
333 fold_deferred_overflow_code = wc;
336 else if (issue_strict_overflow_warning (wc))
337 warning (OPT_Wstrict_overflow, gmsgid);
340 /* Return true if the built-in mathematical function specified by CODE
341 is odd, i.e. -f(x) == f(-x). */
344 negate_mathfn_p (enum built_in_function code)
348 CASE_FLT_FN (BUILT_IN_ASIN):
349 CASE_FLT_FN (BUILT_IN_ASINH):
350 CASE_FLT_FN (BUILT_IN_ATAN):
351 CASE_FLT_FN (BUILT_IN_ATANH):
352 CASE_FLT_FN (BUILT_IN_CASIN):
353 CASE_FLT_FN (BUILT_IN_CASINH):
354 CASE_FLT_FN (BUILT_IN_CATAN):
355 CASE_FLT_FN (BUILT_IN_CATANH):
356 CASE_FLT_FN (BUILT_IN_CBRT):
357 CASE_FLT_FN (BUILT_IN_CPROJ):
358 CASE_FLT_FN (BUILT_IN_CSIN):
359 CASE_FLT_FN (BUILT_IN_CSINH):
360 CASE_FLT_FN (BUILT_IN_CTAN):
361 CASE_FLT_FN (BUILT_IN_CTANH):
362 CASE_FLT_FN (BUILT_IN_ERF):
363 CASE_FLT_FN (BUILT_IN_LLROUND):
364 CASE_FLT_FN (BUILT_IN_LROUND):
365 CASE_FLT_FN (BUILT_IN_ROUND):
366 CASE_FLT_FN (BUILT_IN_SIN):
367 CASE_FLT_FN (BUILT_IN_SINH):
368 CASE_FLT_FN (BUILT_IN_TAN):
369 CASE_FLT_FN (BUILT_IN_TANH):
370 CASE_FLT_FN (BUILT_IN_TRUNC):
373 CASE_FLT_FN (BUILT_IN_LLRINT):
374 CASE_FLT_FN (BUILT_IN_LRINT):
375 CASE_FLT_FN (BUILT_IN_NEARBYINT):
376 CASE_FLT_FN (BUILT_IN_RINT):
377 return !flag_rounding_math;
385 /* Check whether we may negate an integer constant T without causing
389 may_negate_without_overflow_p (const_tree t)
393 gcc_assert (TREE_CODE (t) == INTEGER_CST);
395 type = TREE_TYPE (t);
396 if (TYPE_UNSIGNED (type))
399 return !wi::only_sign_bit_p (t);
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
406 negate_expr_p (tree t)
413 type = TREE_TYPE (t);
416 switch (TREE_CODE (t))
419 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
432 return !TYPE_OVERFLOW_SANITIZED (type);
435 /* We want to canonicalize to positive real constants. Pretend
436 that only negative ones can be easily negated. */
437 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
440 return negate_expr_p (TREE_REALPART (t))
441 && negate_expr_p (TREE_IMAGPART (t));
445 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
448 int count = TYPE_VECTOR_SUBPARTS (type), i;
450 for (i = 0; i < count; i++)
451 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
458 return negate_expr_p (TREE_OPERAND (t, 0))
459 && negate_expr_p (TREE_OPERAND (t, 1));
462 return negate_expr_p (TREE_OPERAND (t, 0));
465 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
466 || HONOR_SIGNED_ZEROS (element_mode (type))
467 || (INTEGRAL_TYPE_P (type)
468 && ! TYPE_OVERFLOW_WRAPS (type)))
470 /* -(A + B) -> (-B) - A. */
471 if (negate_expr_p (TREE_OPERAND (t, 1))
472 && reorder_operands_p (TREE_OPERAND (t, 0),
473 TREE_OPERAND (t, 1)))
475 /* -(A + B) -> (-A) - B. */
476 return negate_expr_p (TREE_OPERAND (t, 0));
479 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
480 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
481 && !HONOR_SIGNED_ZEROS (element_mode (type))
482 && (! INTEGRAL_TYPE_P (type)
483 || TYPE_OVERFLOW_WRAPS (type))
484 && reorder_operands_p (TREE_OPERAND (t, 0),
485 TREE_OPERAND (t, 1));
488 if (TYPE_UNSIGNED (type))
490 /* INT_MIN/n * n doesn't overflow while negating one operand it does
491 if n is a power of two. */
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
494 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
495 && ! integer_pow2p (TREE_OPERAND (t, 0)))
496 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
497 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
503 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
504 return negate_expr_p (TREE_OPERAND (t, 1))
505 || negate_expr_p (TREE_OPERAND (t, 0));
511 /* In general we can't negate A / B, because if A is INT_MIN and
512 B is 1, we may turn this into INT_MIN / -1 which is undefined
513 and actually traps on some architectures. But if overflow is
514 undefined, we can negate, because - (INT_MIN / 1) is an
516 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
518 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
520 /* If overflow is undefined then we have to be careful because
521 we ask whether it's ok to associate the negate with the
522 division which is not ok for example for
523 -((a - b) / c) where (-(a - b)) / c may invoke undefined
524 overflow because of negating INT_MIN. So do not use
525 negate_expr_p here but open-code the two important cases. */
526 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
527 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
528 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
531 else if (negate_expr_p (TREE_OPERAND (t, 0)))
533 return negate_expr_p (TREE_OPERAND (t, 1));
536 /* Negate -((double)float) as (double)(-float). */
537 if (TREE_CODE (type) == REAL_TYPE)
539 tree tem = strip_float_extensions (t);
541 return negate_expr_p (tem);
546 /* Negate -f(x) as f(-x). */
547 if (negate_mathfn_p (builtin_mathfn_code (t)))
548 return negate_expr_p (CALL_EXPR_ARG (t, 0));
552 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
553 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
555 tree op1 = TREE_OPERAND (t, 1);
556 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
567 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
568 simplification is possible.
569 If negate_expr_p would return true for T, NULL_TREE will never be
573 fold_negate_expr (location_t loc, tree t)
575 tree type = TREE_TYPE (t);
578 switch (TREE_CODE (t))
580 /* Convert - (~A) to A + 1. */
582 if (INTEGRAL_TYPE_P (type))
583 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
584 build_one_cst (type));
588 tem = fold_negate_const (t, type);
589 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
590 || (ANY_INTEGRAL_TYPE_P (type)
591 && !TYPE_OVERFLOW_TRAPS (type)
592 && TYPE_OVERFLOW_WRAPS (type))
593 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
598 tem = fold_negate_const (t, type);
602 tem = fold_negate_const (t, type);
607 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
608 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
610 return build_complex (type, rpart, ipart);
616 int count = TYPE_VECTOR_SUBPARTS (type), i;
617 tree *elts = XALLOCAVEC (tree, count);
619 for (i = 0; i < count; i++)
621 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
622 if (elts[i] == NULL_TREE)
626 return build_vector (type, elts);
630 if (negate_expr_p (t))
631 return fold_build2_loc (loc, COMPLEX_EXPR, type,
632 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
633 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
637 if (negate_expr_p (t))
638 return fold_build1_loc (loc, CONJ_EXPR, type,
639 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
643 if (!TYPE_OVERFLOW_SANITIZED (type))
644 return TREE_OPERAND (t, 0);
648 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
649 && !HONOR_SIGNED_ZEROS (element_mode (type)))
651 /* -(A + B) -> (-B) - A. */
652 if (negate_expr_p (TREE_OPERAND (t, 1))
653 && reorder_operands_p (TREE_OPERAND (t, 0),
654 TREE_OPERAND (t, 1)))
656 tem = negate_expr (TREE_OPERAND (t, 1));
657 return fold_build2_loc (loc, MINUS_EXPR, type,
658 tem, TREE_OPERAND (t, 0));
661 /* -(A + B) -> (-A) - B. */
662 if (negate_expr_p (TREE_OPERAND (t, 0)))
664 tem = negate_expr (TREE_OPERAND (t, 0));
665 return fold_build2_loc (loc, MINUS_EXPR, type,
666 tem, TREE_OPERAND (t, 1));
672 /* - (A - B) -> B - A */
673 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
674 && !HONOR_SIGNED_ZEROS (element_mode (type))
675 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
676 return fold_build2_loc (loc, MINUS_EXPR, type,
677 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
681 if (TYPE_UNSIGNED (type))
687 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
689 tem = TREE_OPERAND (t, 1);
690 if (negate_expr_p (tem))
691 return fold_build2_loc (loc, TREE_CODE (t), type,
692 TREE_OPERAND (t, 0), negate_expr (tem));
693 tem = TREE_OPERAND (t, 0);
694 if (negate_expr_p (tem))
695 return fold_build2_loc (loc, TREE_CODE (t), type,
696 negate_expr (tem), TREE_OPERAND (t, 1));
703 /* In general we can't negate A / B, because if A is INT_MIN and
704 B is 1, we may turn this into INT_MIN / -1 which is undefined
705 and actually traps on some architectures. But if overflow is
706 undefined, we can negate, because - (INT_MIN / 1) is an
708 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
710 const char * const warnmsg = G_("assuming signed overflow does not "
711 "occur when negating a division");
712 tem = TREE_OPERAND (t, 1);
713 if (negate_expr_p (tem))
715 if (INTEGRAL_TYPE_P (type)
716 && (TREE_CODE (tem) != INTEGER_CST
717 || integer_onep (tem)))
718 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
719 return fold_build2_loc (loc, TREE_CODE (t), type,
720 TREE_OPERAND (t, 0), negate_expr (tem));
722 /* If overflow is undefined then we have to be careful because
723 we ask whether it's ok to associate the negate with the
724 division which is not ok for example for
725 -((a - b) / c) where (-(a - b)) / c may invoke undefined
726 overflow because of negating INT_MIN. So do not use
727 negate_expr_p here but open-code the two important cases. */
728 tem = TREE_OPERAND (t, 0);
729 if ((INTEGRAL_TYPE_P (type)
730 && (TREE_CODE (tem) == NEGATE_EXPR
731 || (TREE_CODE (tem) == INTEGER_CST
732 && may_negate_without_overflow_p (tem))))
733 || !INTEGRAL_TYPE_P (type))
734 return fold_build2_loc (loc, TREE_CODE (t), type,
735 negate_expr (tem), TREE_OPERAND (t, 1));
740 /* Convert -((double)float) into (double)(-float). */
741 if (TREE_CODE (type) == REAL_TYPE)
743 tem = strip_float_extensions (t);
744 if (tem != t && negate_expr_p (tem))
745 return fold_convert_loc (loc, type, negate_expr (tem));
750 /* Negate -f(x) as f(-x). */
751 if (negate_mathfn_p (builtin_mathfn_code (t))
752 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
756 fndecl = get_callee_fndecl (t);
757 arg = negate_expr (CALL_EXPR_ARG (t, 0));
758 return build_call_expr_loc (loc, fndecl, 1, arg);
763 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
764 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
766 tree op1 = TREE_OPERAND (t, 1);
767 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
769 tree ntype = TYPE_UNSIGNED (type)
770 ? signed_type_for (type)
771 : unsigned_type_for (type);
772 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
773 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
774 return fold_convert_loc (loc, type, temp);
786 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
787 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
799 loc = EXPR_LOCATION (t);
800 type = TREE_TYPE (t);
803 tem = fold_negate_expr (loc, t);
805 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
806 return fold_convert_loc (loc, type, tem);
809 /* Split a tree IN into a constant, literal and variable parts that could be
810 combined with CODE to make IN. "constant" means an expression with
811 TREE_CONSTANT but that isn't an actual constant. CODE must be a
812 commutative arithmetic operation. Store the constant part into *CONP,
813 the literal in *LITP and return the variable part. If a part isn't
814 present, set it to null. If the tree does not decompose in this way,
815 return the entire tree as the variable part and the other parts as null.
817 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
818 case, we negate an operand that was subtracted. Except if it is a
819 literal for which we use *MINUS_LITP instead.
821 If NEGATE_P is true, we are negating all of IN, again except a literal
822 for which we use *MINUS_LITP instead.
824 If IN is itself a literal or constant, return it as appropriate.
826 Note that we do not guarantee that any of the three values will be the
827 same type as IN, but they will have the same signedness and mode. */
830 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
831 tree *minus_litp, int negate_p)
839 /* Strip any conversions that don't change the machine mode or signedness. */
840 STRIP_SIGN_NOPS (in);
842 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
843 || TREE_CODE (in) == FIXED_CST)
845 else if (TREE_CODE (in) == code
846 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
847 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
848 /* We can associate addition and subtraction together (even
849 though the C standard doesn't say so) for integers because
850 the value is not affected. For reals, the value might be
851 affected, so we can't. */
852 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
853 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
855 tree op0 = TREE_OPERAND (in, 0);
856 tree op1 = TREE_OPERAND (in, 1);
857 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
858 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
860 /* First see if either of the operands is a literal, then a constant. */
861 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
862 || TREE_CODE (op0) == FIXED_CST)
863 *litp = op0, op0 = 0;
864 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
865 || TREE_CODE (op1) == FIXED_CST)
866 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
868 if (op0 != 0 && TREE_CONSTANT (op0))
869 *conp = op0, op0 = 0;
870 else if (op1 != 0 && TREE_CONSTANT (op1))
871 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
873 /* If we haven't dealt with either operand, this is not a case we can
874 decompose. Otherwise, VAR is either of the ones remaining, if any. */
875 if (op0 != 0 && op1 != 0)
880 var = op1, neg_var_p = neg1_p;
882 /* Now do any needed negations. */
884 *minus_litp = *litp, *litp = 0;
886 *conp = negate_expr (*conp);
888 var = negate_expr (var);
890 else if (TREE_CODE (in) == BIT_NOT_EXPR
891 && code == PLUS_EXPR)
893 /* -X - 1 is folded to ~X, undo that here. */
894 *minus_litp = build_one_cst (TREE_TYPE (in));
895 var = negate_expr (TREE_OPERAND (in, 0));
897 else if (TREE_CONSTANT (in))
905 *minus_litp = *litp, *litp = 0;
906 else if (*minus_litp)
907 *litp = *minus_litp, *minus_litp = 0;
908 *conp = negate_expr (*conp);
909 var = negate_expr (var);
915 /* Re-associate trees split by the above function. T1 and T2 are
916 either expressions to associate or null. Return the new
917 expression, if any. LOC is the location of the new expression. If
918 we build an operation, do it in TYPE and with CODE. */
921 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
928 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
929 try to fold this since we will have infinite recursion. But do
930 deal with any NEGATE_EXPRs. */
931 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
932 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
934 if (code == PLUS_EXPR)
936 if (TREE_CODE (t1) == NEGATE_EXPR)
937 return build2_loc (loc, MINUS_EXPR, type,
938 fold_convert_loc (loc, type, t2),
939 fold_convert_loc (loc, type,
940 TREE_OPERAND (t1, 0)));
941 else if (TREE_CODE (t2) == NEGATE_EXPR)
942 return build2_loc (loc, MINUS_EXPR, type,
943 fold_convert_loc (loc, type, t1),
944 fold_convert_loc (loc, type,
945 TREE_OPERAND (t2, 0)));
946 else if (integer_zerop (t2))
947 return fold_convert_loc (loc, type, t1);
949 else if (code == MINUS_EXPR)
951 if (integer_zerop (t2))
952 return fold_convert_loc (loc, type, t1);
955 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
956 fold_convert_loc (loc, type, t2));
959 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
960 fold_convert_loc (loc, type, t2));
963 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
964 for use in int_const_binop, size_binop and size_diffop. */
967 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
969 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
971 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
986 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
987 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
988 && TYPE_MODE (type1) == TYPE_MODE (type2);
992 /* Combine two integer constants ARG1 and ARG2 under operation CODE
993 to produce a new constant. Return NULL_TREE if we don't know how
994 to evaluate CODE at compile-time. */
997 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
1002 tree type = TREE_TYPE (arg1);
1003 signop sign = TYPE_SIGN (type);
1004 bool overflow = false;
1006 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
1007 TYPE_SIGN (TREE_TYPE (parg2)));
1012 res = wi::bit_or (arg1, arg2);
1016 res = wi::bit_xor (arg1, arg2);
1020 res = wi::bit_and (arg1, arg2);
1025 if (wi::neg_p (arg2))
1028 if (code == RSHIFT_EXPR)
1034 if (code == RSHIFT_EXPR)
1035 /* It's unclear from the C standard whether shifts can overflow.
1036 The following code ignores overflow; perhaps a C standard
1037 interpretation ruling is needed. */
1038 res = wi::rshift (arg1, arg2, sign);
1040 res = wi::lshift (arg1, arg2);
1045 if (wi::neg_p (arg2))
1048 if (code == RROTATE_EXPR)
1049 code = LROTATE_EXPR;
1051 code = RROTATE_EXPR;
1054 if (code == RROTATE_EXPR)
1055 res = wi::rrotate (arg1, arg2);
1057 res = wi::lrotate (arg1, arg2);
1061 res = wi::add (arg1, arg2, sign, &overflow);
1065 res = wi::sub (arg1, arg2, sign, &overflow);
1069 res = wi::mul (arg1, arg2, sign, &overflow);
1072 case MULT_HIGHPART_EXPR:
1073 res = wi::mul_high (arg1, arg2, sign);
1076 case TRUNC_DIV_EXPR:
1077 case EXACT_DIV_EXPR:
1080 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1083 case FLOOR_DIV_EXPR:
1086 res = wi::div_floor (arg1, arg2, sign, &overflow);
1092 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1095 case ROUND_DIV_EXPR:
1098 res = wi::div_round (arg1, arg2, sign, &overflow);
1101 case TRUNC_MOD_EXPR:
1104 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1107 case FLOOR_MOD_EXPR:
1110 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1116 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1119 case ROUND_MOD_EXPR:
1122 res = wi::mod_round (arg1, arg2, sign, &overflow);
1126 res = wi::min (arg1, arg2, sign);
1130 res = wi::max (arg1, arg2, sign);
1137 t = force_fit_type (type, res, overflowable,
1138 (((sign == SIGNED || overflowable == -1)
1140 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1146 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1148 return int_const_binop_1 (code, arg1, arg2, 1);
1151 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1152 constant. We assume ARG1 and ARG2 have the same data type, or at least
1153 are the same kind of constant and the same machine mode. Return zero if
1154 combining the constants is not allowed in the current operating mode. */
1157 const_binop (enum tree_code code, tree arg1, tree arg2)
1159 /* Sanity check for the recursive cases. */
1166 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1168 if (code == POINTER_PLUS_EXPR)
1169 return int_const_binop (PLUS_EXPR,
1170 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1172 return int_const_binop (code, arg1, arg2);
1175 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1180 REAL_VALUE_TYPE value;
1181 REAL_VALUE_TYPE result;
1185 /* The following codes are handled by real_arithmetic. */
1200 d1 = TREE_REAL_CST (arg1);
1201 d2 = TREE_REAL_CST (arg2);
1203 type = TREE_TYPE (arg1);
1204 mode = TYPE_MODE (type);
1206 /* Don't perform operation if we honor signaling NaNs and
1207 either operand is a NaN. */
1208 if (HONOR_SNANS (mode)
1209 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1212 /* Don't perform operation if it would raise a division
1213 by zero exception. */
1214 if (code == RDIV_EXPR
1215 && REAL_VALUES_EQUAL (d2, dconst0)
1216 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1219 /* If either operand is a NaN, just return it. Otherwise, set up
1220 for floating-point trap; we return an overflow. */
1221 if (REAL_VALUE_ISNAN (d1))
1223 else if (REAL_VALUE_ISNAN (d2))
1226 inexact = real_arithmetic (&value, code, &d1, &d2);
1227 real_convert (&result, mode, &value);
1229 /* Don't constant fold this floating point operation if
1230 the result has overflowed and flag_trapping_math. */
1231 if (flag_trapping_math
1232 && MODE_HAS_INFINITIES (mode)
1233 && REAL_VALUE_ISINF (result)
1234 && !REAL_VALUE_ISINF (d1)
1235 && !REAL_VALUE_ISINF (d2))
1238 /* Don't constant fold this floating point operation if the
1239 result may dependent upon the run-time rounding mode and
1240 flag_rounding_math is set, or if GCC's software emulation
1241 is unable to accurately represent the result. */
1242 if ((flag_rounding_math
1243 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1244 && (inexact || !real_identical (&result, &value)))
1247 t = build_real (type, result);
1249 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1253 if (TREE_CODE (arg1) == FIXED_CST)
1255 FIXED_VALUE_TYPE f1;
1256 FIXED_VALUE_TYPE f2;
1257 FIXED_VALUE_TYPE result;
1262 /* The following codes are handled by fixed_arithmetic. */
1268 case TRUNC_DIV_EXPR:
1269 if (TREE_CODE (arg2) != FIXED_CST)
1271 f2 = TREE_FIXED_CST (arg2);
1277 if (TREE_CODE (arg2) != INTEGER_CST)
1280 f2.data.high = w2.elt (1);
1281 f2.data.low = w2.elt (0);
1290 f1 = TREE_FIXED_CST (arg1);
1291 type = TREE_TYPE (arg1);
1292 sat_p = TYPE_SATURATING (type);
1293 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1294 t = build_fixed (type, result);
1295 /* Propagate overflow flags. */
1296 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1297 TREE_OVERFLOW (t) = 1;
1301 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1303 tree type = TREE_TYPE (arg1);
1304 tree r1 = TREE_REALPART (arg1);
1305 tree i1 = TREE_IMAGPART (arg1);
1306 tree r2 = TREE_REALPART (arg2);
1307 tree i2 = TREE_IMAGPART (arg2);
1314 real = const_binop (code, r1, r2);
1315 imag = const_binop (code, i1, i2);
1319 if (COMPLEX_FLOAT_TYPE_P (type))
1320 return do_mpc_arg2 (arg1, arg2, type,
1321 /* do_nonfinite= */ folding_initializer,
1324 real = const_binop (MINUS_EXPR,
1325 const_binop (MULT_EXPR, r1, r2),
1326 const_binop (MULT_EXPR, i1, i2));
1327 imag = const_binop (PLUS_EXPR,
1328 const_binop (MULT_EXPR, r1, i2),
1329 const_binop (MULT_EXPR, i1, r2));
1333 if (COMPLEX_FLOAT_TYPE_P (type))
1334 return do_mpc_arg2 (arg1, arg2, type,
1335 /* do_nonfinite= */ folding_initializer,
1338 case TRUNC_DIV_EXPR:
1340 case FLOOR_DIV_EXPR:
1341 case ROUND_DIV_EXPR:
1342 if (flag_complex_method == 0)
1344 /* Keep this algorithm in sync with
1345 tree-complex.c:expand_complex_div_straight().
1347 Expand complex division to scalars, straightforward algorithm.
1348 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1352 = const_binop (PLUS_EXPR,
1353 const_binop (MULT_EXPR, r2, r2),
1354 const_binop (MULT_EXPR, i2, i2));
1356 = const_binop (PLUS_EXPR,
1357 const_binop (MULT_EXPR, r1, r2),
1358 const_binop (MULT_EXPR, i1, i2));
1360 = const_binop (MINUS_EXPR,
1361 const_binop (MULT_EXPR, i1, r2),
1362 const_binop (MULT_EXPR, r1, i2));
1364 real = const_binop (code, t1, magsquared);
1365 imag = const_binop (code, t2, magsquared);
1369 /* Keep this algorithm in sync with
1370 tree-complex.c:expand_complex_div_wide().
1372 Expand complex division to scalars, modified algorithm to minimize
1373 overflow with wide input ranges. */
1374 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1375 fold_abs_const (r2, TREE_TYPE (type)),
1376 fold_abs_const (i2, TREE_TYPE (type)));
1378 if (integer_nonzerop (compare))
1380 /* In the TRUE branch, we compute
1382 div = (br * ratio) + bi;
1383 tr = (ar * ratio) + ai;
1384 ti = (ai * ratio) - ar;
1387 tree ratio = const_binop (code, r2, i2);
1388 tree div = const_binop (PLUS_EXPR, i2,
1389 const_binop (MULT_EXPR, r2, ratio));
1390 real = const_binop (MULT_EXPR, r1, ratio);
1391 real = const_binop (PLUS_EXPR, real, i1);
1392 real = const_binop (code, real, div);
1394 imag = const_binop (MULT_EXPR, i1, ratio);
1395 imag = const_binop (MINUS_EXPR, imag, r1);
1396 imag = const_binop (code, imag, div);
1400 /* In the FALSE branch, we compute
1402 divisor = (d * ratio) + c;
1403 tr = (b * ratio) + a;
1404 ti = b - (a * ratio);
1407 tree ratio = const_binop (code, i2, r2);
1408 tree div = const_binop (PLUS_EXPR, r2,
1409 const_binop (MULT_EXPR, i2, ratio));
1411 real = const_binop (MULT_EXPR, i1, ratio);
1412 real = const_binop (PLUS_EXPR, real, r1);
1413 real = const_binop (code, real, div);
1415 imag = const_binop (MULT_EXPR, r1, ratio);
1416 imag = const_binop (MINUS_EXPR, i1, imag);
1417 imag = const_binop (code, imag, div);
1427 return build_complex (type, real, imag);
1430 if (TREE_CODE (arg1) == VECTOR_CST
1431 && TREE_CODE (arg2) == VECTOR_CST)
1433 tree type = TREE_TYPE (arg1);
1434 int count = TYPE_VECTOR_SUBPARTS (type), i;
1435 tree *elts = XALLOCAVEC (tree, count);
1437 for (i = 0; i < count; i++)
1439 tree elem1 = VECTOR_CST_ELT (arg1, i);
1440 tree elem2 = VECTOR_CST_ELT (arg2, i);
1442 elts[i] = const_binop (code, elem1, elem2);
1444 /* It is possible that const_binop cannot handle the given
1445 code and return NULL_TREE */
1446 if (elts[i] == NULL_TREE)
1450 return build_vector (type, elts);
1453 /* Shifts allow a scalar offset for a vector. */
1454 if (TREE_CODE (arg1) == VECTOR_CST
1455 && TREE_CODE (arg2) == INTEGER_CST)
1457 tree type = TREE_TYPE (arg1);
1458 int count = TYPE_VECTOR_SUBPARTS (type), i;
1459 tree *elts = XALLOCAVEC (tree, count);
1461 for (i = 0; i < count; i++)
1463 tree elem1 = VECTOR_CST_ELT (arg1, i);
1465 elts[i] = const_binop (code, elem1, arg2);
1467 /* It is possible that const_binop cannot handle the given
1468 code and return NULL_TREE. */
1469 if (elts[i] == NULL_TREE)
1473 return build_vector (type, elts);
1478 /* Overload that adds a TYPE parameter to be able to dispatch
1479 to fold_relational_const. */
1482 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1484 if (TREE_CODE_CLASS (code) == tcc_comparison)
1485 return fold_relational_const (code, type, arg1, arg2);
1487 /* ??? Until we make the const_binop worker take the type of the
1488 result as argument put those cases that need it here. */
1492 if ((TREE_CODE (arg1) == REAL_CST
1493 && TREE_CODE (arg2) == REAL_CST)
1494 || (TREE_CODE (arg1) == INTEGER_CST
1495 && TREE_CODE (arg2) == INTEGER_CST))
1496 return build_complex (type, arg1, arg2);
1499 case VEC_PACK_TRUNC_EXPR:
1500 case VEC_PACK_FIX_TRUNC_EXPR:
1502 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1505 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1506 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1507 if (TREE_CODE (arg1) != VECTOR_CST
1508 || TREE_CODE (arg2) != VECTOR_CST)
1511 elts = XALLOCAVEC (tree, nelts);
1512 if (!vec_cst_ctor_to_array (arg1, elts)
1513 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1516 for (i = 0; i < nelts; i++)
1518 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1519 ? NOP_EXPR : FIX_TRUNC_EXPR,
1520 TREE_TYPE (type), elts[i]);
1521 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1525 return build_vector (type, elts);
1528 case VEC_WIDEN_MULT_LO_EXPR:
1529 case VEC_WIDEN_MULT_HI_EXPR:
1530 case VEC_WIDEN_MULT_EVEN_EXPR:
1531 case VEC_WIDEN_MULT_ODD_EXPR:
1533 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1534 unsigned int out, ofs, scale;
1537 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1538 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1539 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1542 elts = XALLOCAVEC (tree, nelts * 4);
1543 if (!vec_cst_ctor_to_array (arg1, elts)
1544 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1547 if (code == VEC_WIDEN_MULT_LO_EXPR)
1548 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1549 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1550 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1551 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1553 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1556 for (out = 0; out < nelts; out++)
1558 unsigned int in1 = (out << scale) + ofs;
1559 unsigned int in2 = in1 + nelts * 2;
1562 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1563 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1565 if (t1 == NULL_TREE || t2 == NULL_TREE)
1567 elts[out] = const_binop (MULT_EXPR, t1, t2);
1568 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1572 return build_vector (type, elts);
1578 if (TREE_CODE_CLASS (code) != tcc_binary)
1581 /* Make sure type and arg0 have the same saturating flag. */
1582 gcc_checking_assert (TYPE_SATURATING (type)
1583 == TYPE_SATURATING (TREE_TYPE (arg1)));
1585 return const_binop (code, arg1, arg2);
1588 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1589 Return zero if computing the constants is not possible. */
1592 const_unop (enum tree_code code, tree type, tree arg0)
1598 case FIX_TRUNC_EXPR:
1599 case FIXED_CONVERT_EXPR:
1600 return fold_convert_const (code, type, arg0);
1602 case ADDR_SPACE_CONVERT_EXPR:
1603 if (integer_zerop (arg0))
1604 return fold_convert_const (code, type, arg0);
1607 case VIEW_CONVERT_EXPR:
1608 return fold_view_convert_expr (type, arg0);
1612 /* Can't call fold_negate_const directly here as that doesn't
1613 handle all cases and we might not be able to negate some
1615 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1616 if (tem && CONSTANT_CLASS_P (tem))
1622 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1623 return fold_abs_const (arg0, type);
1627 if (TREE_CODE (arg0) == COMPLEX_CST)
1629 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1631 return build_complex (type, TREE_REALPART (arg0), ipart);
1636 if (TREE_CODE (arg0) == INTEGER_CST)
1637 return fold_not_const (arg0, type);
1638 /* Perform BIT_NOT_EXPR on each element individually. */
1639 else if (TREE_CODE (arg0) == VECTOR_CST)
1643 unsigned count = VECTOR_CST_NELTS (arg0), i;
1645 elements = XALLOCAVEC (tree, count);
1646 for (i = 0; i < count; i++)
1648 elem = VECTOR_CST_ELT (arg0, i);
1649 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1650 if (elem == NULL_TREE)
1655 return build_vector (type, elements);
1659 case TRUTH_NOT_EXPR:
1660 if (TREE_CODE (arg0) == INTEGER_CST)
1661 return constant_boolean_node (integer_zerop (arg0), type);
1665 if (TREE_CODE (arg0) == COMPLEX_CST)
1666 return fold_convert (type, TREE_REALPART (arg0));
1670 if (TREE_CODE (arg0) == COMPLEX_CST)
1671 return fold_convert (type, TREE_IMAGPART (arg0));
1674 case VEC_UNPACK_LO_EXPR:
1675 case VEC_UNPACK_HI_EXPR:
1676 case VEC_UNPACK_FLOAT_LO_EXPR:
1677 case VEC_UNPACK_FLOAT_HI_EXPR:
1679 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1681 enum tree_code subcode;
1683 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1684 if (TREE_CODE (arg0) != VECTOR_CST)
1687 elts = XALLOCAVEC (tree, nelts * 2);
1688 if (!vec_cst_ctor_to_array (arg0, elts))
1691 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1692 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1695 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1698 subcode = FLOAT_EXPR;
1700 for (i = 0; i < nelts; i++)
1702 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1703 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1707 return build_vector (type, elts);
1710 case REDUC_MIN_EXPR:
1711 case REDUC_MAX_EXPR:
1712 case REDUC_PLUS_EXPR:
1714 unsigned int nelts, i;
1716 enum tree_code subcode;
1718 if (TREE_CODE (arg0) != VECTOR_CST)
1720 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1722 elts = XALLOCAVEC (tree, nelts);
1723 if (!vec_cst_ctor_to_array (arg0, elts))
1728 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1729 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1730 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1731 default: gcc_unreachable ();
1734 for (i = 1; i < nelts; i++)
1736 elts[0] = const_binop (subcode, elts[0], elts[i]);
1737 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1751 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1752 indicates which particular sizetype to create. */
1755 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1757 return build_int_cst (sizetype_tab[(int) kind], number);
1760 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1761 is a tree code. The type of the result is taken from the operands.
1762 Both must be equivalent integer types, ala int_binop_types_match_p.
1763 If the operands are constant, so is the result. */
1766 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1768 tree type = TREE_TYPE (arg0);
1770 if (arg0 == error_mark_node || arg1 == error_mark_node)
1771 return error_mark_node;
1773 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1776 /* Handle the special case of two integer constants faster. */
1777 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1779 /* And some specific cases even faster than that. */
1780 if (code == PLUS_EXPR)
1782 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1784 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1787 else if (code == MINUS_EXPR)
1789 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1792 else if (code == MULT_EXPR)
1794 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1798 /* Handle general case of two integer constants. For sizetype
1799 constant calculations we always want to know about overflow,
1800 even in the unsigned case. */
1801 return int_const_binop_1 (code, arg0, arg1, -1);
1804 return fold_build2_loc (loc, code, type, arg0, arg1);
1807 /* Given two values, either both of sizetype or both of bitsizetype,
1808 compute the difference between the two values. Return the value
1809 in signed type corresponding to the type of the operands. */
1812 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1814 tree type = TREE_TYPE (arg0);
1817 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1820 /* If the type is already signed, just do the simple thing. */
1821 if (!TYPE_UNSIGNED (type))
1822 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1824 if (type == sizetype)
1826 else if (type == bitsizetype)
1827 ctype = sbitsizetype;
1829 ctype = signed_type_for (type);
1831 /* If either operand is not a constant, do the conversions to the signed
1832 type and subtract. The hardware will do the right thing with any
1833 overflow in the subtraction. */
1834 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1835 return size_binop_loc (loc, MINUS_EXPR,
1836 fold_convert_loc (loc, ctype, arg0),
1837 fold_convert_loc (loc, ctype, arg1));
1839 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1840 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1841 overflow) and negate (which can't either). Special-case a result
1842 of zero while we're here. */
1843 if (tree_int_cst_equal (arg0, arg1))
1844 return build_int_cst (ctype, 0);
1845 else if (tree_int_cst_lt (arg1, arg0))
1846 return fold_convert_loc (loc, ctype,
1847 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1849 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1850 fold_convert_loc (loc, ctype,
1851 size_binop_loc (loc,
1856 /* A subroutine of fold_convert_const handling conversions of an
1857 INTEGER_CST to another integer type. */
1860 fold_convert_const_int_from_int (tree type, const_tree arg1)
1862 /* Given an integer constant, make new constant with new type,
1863 appropriately sign-extended or truncated. Use widest_int
1864 so that any extension is done according ARG1's type. */
1865 return force_fit_type (type, wi::to_widest (arg1),
1866 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1867 TREE_OVERFLOW (arg1));
1870 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1871 to an integer type. */
1874 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1876 bool overflow = false;
1879 /* The following code implements the floating point to integer
1880 conversion rules required by the Java Language Specification,
1881 that IEEE NaNs are mapped to zero and values that overflow
1882 the target precision saturate, i.e. values greater than
1883 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1884 are mapped to INT_MIN. These semantics are allowed by the
1885 C and C++ standards that simply state that the behavior of
1886 FP-to-integer conversion is unspecified upon overflow. */
1890 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1894 case FIX_TRUNC_EXPR:
1895 real_trunc (&r, VOIDmode, &x);
1902 /* If R is NaN, return zero and show we have an overflow. */
1903 if (REAL_VALUE_ISNAN (r))
1906 val = wi::zero (TYPE_PRECISION (type));
1909 /* See if R is less than the lower bound or greater than the
1914 tree lt = TYPE_MIN_VALUE (type);
1915 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1916 if (REAL_VALUES_LESS (r, l))
1925 tree ut = TYPE_MAX_VALUE (type);
1928 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1929 if (REAL_VALUES_LESS (u, r))
1938 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1940 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1944 /* A subroutine of fold_convert_const handling conversions of a
1945 FIXED_CST to an integer type. */
1948 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1951 double_int temp, temp_trunc;
1954 /* Right shift FIXED_CST to temp by fbit. */
1955 temp = TREE_FIXED_CST (arg1).data;
1956 mode = TREE_FIXED_CST (arg1).mode;
1957 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1959 temp = temp.rshift (GET_MODE_FBIT (mode),
1960 HOST_BITS_PER_DOUBLE_INT,
1961 SIGNED_FIXED_POINT_MODE_P (mode));
1963 /* Left shift temp to temp_trunc by fbit. */
1964 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1965 HOST_BITS_PER_DOUBLE_INT,
1966 SIGNED_FIXED_POINT_MODE_P (mode));
1970 temp = double_int_zero;
1971 temp_trunc = double_int_zero;
1974 /* If FIXED_CST is negative, we need to round the value toward 0.
1975 By checking if the fractional bits are not zero to add 1 to temp. */
1976 if (SIGNED_FIXED_POINT_MODE_P (mode)
1977 && temp_trunc.is_negative ()
1978 && TREE_FIXED_CST (arg1).data != temp_trunc)
1979 temp += double_int_one;
1981 /* Given a fixed-point constant, make new constant with new type,
1982 appropriately sign-extended or truncated. */
1983 t = force_fit_type (type, temp, -1,
1984 (temp.is_negative ()
1985 && (TYPE_UNSIGNED (type)
1986 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1987 | TREE_OVERFLOW (arg1));
1992 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1993 to another floating point type. */
1996 fold_convert_const_real_from_real (tree type, const_tree arg1)
1998 REAL_VALUE_TYPE value;
2001 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2002 t = build_real (type, value);
2004 /* If converting an infinity or NAN to a representation that doesn't
2005 have one, set the overflow bit so that we can produce some kind of
2006 error message at the appropriate point if necessary. It's not the
2007 most user-friendly message, but it's better than nothing. */
2008 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2009 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2010 TREE_OVERFLOW (t) = 1;
2011 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2012 && !MODE_HAS_NANS (TYPE_MODE (type)))
2013 TREE_OVERFLOW (t) = 1;
2014 /* Regular overflow, conversion produced an infinity in a mode that
2015 can't represent them. */
2016 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2017 && REAL_VALUE_ISINF (value)
2018 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2019 TREE_OVERFLOW (t) = 1;
2021 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2025 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2026 to a floating point type. */
2029 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2031 REAL_VALUE_TYPE value;
2034 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2035 t = build_real (type, value);
2037 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2041 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2042 to another fixed-point type. */
2045 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2047 FIXED_VALUE_TYPE value;
2051 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2052 TYPE_SATURATING (type));
2053 t = build_fixed (type, value);
2055 /* Propagate overflow flags. */
2056 if (overflow_p | TREE_OVERFLOW (arg1))
2057 TREE_OVERFLOW (t) = 1;
2061 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2062 to a fixed-point type. */
2065 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2067 FIXED_VALUE_TYPE value;
2072 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2074 di.low = TREE_INT_CST_ELT (arg1, 0);
2075 if (TREE_INT_CST_NUNITS (arg1) == 1)
2076 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2078 di.high = TREE_INT_CST_ELT (arg1, 1);
2080 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2081 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2082 TYPE_SATURATING (type));
2083 t = build_fixed (type, value);
2085 /* Propagate overflow flags. */
2086 if (overflow_p | TREE_OVERFLOW (arg1))
2087 TREE_OVERFLOW (t) = 1;
2091 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2092 to a fixed-point type. */
2095 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2097 FIXED_VALUE_TYPE value;
2101 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2102 &TREE_REAL_CST (arg1),
2103 TYPE_SATURATING (type));
2104 t = build_fixed (type, value);
2106 /* Propagate overflow flags. */
2107 if (overflow_p | TREE_OVERFLOW (arg1))
2108 TREE_OVERFLOW (t) = 1;
2112 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2113 type TYPE. If no simplification can be done return NULL_TREE. */
2116 fold_convert_const (enum tree_code code, tree type, tree arg1)
2118 if (TREE_TYPE (arg1) == type)
2121 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2122 || TREE_CODE (type) == OFFSET_TYPE)
2124 if (TREE_CODE (arg1) == INTEGER_CST)
2125 return fold_convert_const_int_from_int (type, arg1);
2126 else if (TREE_CODE (arg1) == REAL_CST)
2127 return fold_convert_const_int_from_real (code, type, arg1);
2128 else if (TREE_CODE (arg1) == FIXED_CST)
2129 return fold_convert_const_int_from_fixed (type, arg1);
2131 else if (TREE_CODE (type) == REAL_TYPE)
2133 if (TREE_CODE (arg1) == INTEGER_CST)
2134 return build_real_from_int_cst (type, arg1);
2135 else if (TREE_CODE (arg1) == REAL_CST)
2136 return fold_convert_const_real_from_real (type, arg1);
2137 else if (TREE_CODE (arg1) == FIXED_CST)
2138 return fold_convert_const_real_from_fixed (type, arg1);
2140 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2142 if (TREE_CODE (arg1) == FIXED_CST)
2143 return fold_convert_const_fixed_from_fixed (type, arg1);
2144 else if (TREE_CODE (arg1) == INTEGER_CST)
2145 return fold_convert_const_fixed_from_int (type, arg1);
2146 else if (TREE_CODE (arg1) == REAL_CST)
2147 return fold_convert_const_fixed_from_real (type, arg1);
2152 /* Construct a vector of zero elements of vector type TYPE. */
2155 build_zero_vector (tree type)
2159 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2160 return build_vector_from_val (type, t);
2163 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2166 fold_convertible_p (const_tree type, const_tree arg)
2168 tree orig = TREE_TYPE (arg);
2173 if (TREE_CODE (arg) == ERROR_MARK
2174 || TREE_CODE (type) == ERROR_MARK
2175 || TREE_CODE (orig) == ERROR_MARK)
2178 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2181 switch (TREE_CODE (type))
2183 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2184 case POINTER_TYPE: case REFERENCE_TYPE:
2186 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2187 || TREE_CODE (orig) == OFFSET_TYPE)
2189 return (TREE_CODE (orig) == VECTOR_TYPE
2190 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2193 case FIXED_POINT_TYPE:
2197 return TREE_CODE (type) == TREE_CODE (orig);
2204 /* Convert expression ARG to type TYPE. Used by the middle-end for
2205 simple conversions in preference to calling the front-end's convert. */
2208 fold_convert_loc (location_t loc, tree type, tree arg)
2210 tree orig = TREE_TYPE (arg);
2216 if (TREE_CODE (arg) == ERROR_MARK
2217 || TREE_CODE (type) == ERROR_MARK
2218 || TREE_CODE (orig) == ERROR_MARK)
2219 return error_mark_node;
2221 switch (TREE_CODE (type))
2224 case REFERENCE_TYPE:
2225 /* Handle conversions between pointers to different address spaces. */
2226 if (POINTER_TYPE_P (orig)
2227 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2228 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2229 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2232 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2234 if (TREE_CODE (arg) == INTEGER_CST)
2236 tem = fold_convert_const (NOP_EXPR, type, arg);
2237 if (tem != NULL_TREE)
2240 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2241 || TREE_CODE (orig) == OFFSET_TYPE)
2242 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2243 if (TREE_CODE (orig) == COMPLEX_TYPE)
2244 return fold_convert_loc (loc, type,
2245 fold_build1_loc (loc, REALPART_EXPR,
2246 TREE_TYPE (orig), arg));
2247 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2248 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2249 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2252 if (TREE_CODE (arg) == INTEGER_CST)
2254 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2255 if (tem != NULL_TREE)
2258 else if (TREE_CODE (arg) == REAL_CST)
2260 tem = fold_convert_const (NOP_EXPR, type, arg);
2261 if (tem != NULL_TREE)
2264 else if (TREE_CODE (arg) == FIXED_CST)
2266 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2267 if (tem != NULL_TREE)
2271 switch (TREE_CODE (orig))
2274 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2275 case POINTER_TYPE: case REFERENCE_TYPE:
2276 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2279 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2281 case FIXED_POINT_TYPE:
2282 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2285 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2286 return fold_convert_loc (loc, type, tem);
2292 case FIXED_POINT_TYPE:
2293 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2294 || TREE_CODE (arg) == REAL_CST)
2296 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2297 if (tem != NULL_TREE)
2298 goto fold_convert_exit;
2301 switch (TREE_CODE (orig))
2303 case FIXED_POINT_TYPE:
2308 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2311 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2312 return fold_convert_loc (loc, type, tem);
2319 switch (TREE_CODE (orig))
2322 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2323 case POINTER_TYPE: case REFERENCE_TYPE:
2325 case FIXED_POINT_TYPE:
2326 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2327 fold_convert_loc (loc, TREE_TYPE (type), arg),
2328 fold_convert_loc (loc, TREE_TYPE (type),
2329 integer_zero_node));
2334 if (TREE_CODE (arg) == COMPLEX_EXPR)
2336 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2337 TREE_OPERAND (arg, 0));
2338 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2339 TREE_OPERAND (arg, 1));
2340 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2343 arg = save_expr (arg);
2344 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2345 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2346 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2347 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2348 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2356 if (integer_zerop (arg))
2357 return build_zero_vector (type);
2358 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2359 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2360 || TREE_CODE (orig) == VECTOR_TYPE);
2361 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2364 tem = fold_ignored_result (arg);
2365 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2368 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2369 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2373 protected_set_expr_location_unshare (tem, loc);
2377 /* Return false if expr can be assumed not to be an lvalue, true
2381 maybe_lvalue_p (const_tree x)
2383 /* We only need to wrap lvalue tree codes. */
2384 switch (TREE_CODE (x))
2397 case ARRAY_RANGE_REF:
2403 case PREINCREMENT_EXPR:
2404 case PREDECREMENT_EXPR:
2406 case TRY_CATCH_EXPR:
2407 case WITH_CLEANUP_EXPR:
2416 /* Assume the worst for front-end tree codes. */
2417 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2425 /* Return an expr equal to X but certainly not valid as an lvalue. */
2428 non_lvalue_loc (location_t loc, tree x)
2430 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2435 if (! maybe_lvalue_p (x))
2437 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2440 /* When pedantic, return an expr equal to X but certainly not valid as a
2441 pedantic lvalue. Otherwise, return X. */
2444 pedantic_non_lvalue_loc (location_t loc, tree x)
2446 return protected_set_expr_location_unshare (x, loc);
2449 /* Given a tree comparison code, return the code that is the logical inverse.
2450 It is generally not safe to do this for floating-point comparisons, except
2451 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2452 ERROR_MARK in this case. */
2455 invert_tree_comparison (enum tree_code code, bool honor_nans)
2457 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2458 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2468 return honor_nans ? UNLE_EXPR : LE_EXPR;
2470 return honor_nans ? UNLT_EXPR : LT_EXPR;
2472 return honor_nans ? UNGE_EXPR : GE_EXPR;
2474 return honor_nans ? UNGT_EXPR : GT_EXPR;
2488 return UNORDERED_EXPR;
2489 case UNORDERED_EXPR:
2490 return ORDERED_EXPR;
2496 /* Similar, but return the comparison that results if the operands are
2497 swapped. This is safe for floating-point. */
2500 swap_tree_comparison (enum tree_code code)
2507 case UNORDERED_EXPR:
2533 /* Convert a comparison tree code from an enum tree_code representation
2534 into a compcode bit-based encoding. This function is the inverse of
2535 compcode_to_comparison. */
2537 static enum comparison_code
2538 comparison_to_compcode (enum tree_code code)
2555 return COMPCODE_ORD;
2556 case UNORDERED_EXPR:
2557 return COMPCODE_UNORD;
2559 return COMPCODE_UNLT;
2561 return COMPCODE_UNEQ;
2563 return COMPCODE_UNLE;
2565 return COMPCODE_UNGT;
2567 return COMPCODE_LTGT;
2569 return COMPCODE_UNGE;
2575 /* Convert a compcode bit-based encoding of a comparison operator back
2576 to GCC's enum tree_code representation. This function is the
2577 inverse of comparison_to_compcode. */
2579 static enum tree_code
2580 compcode_to_comparison (enum comparison_code code)
2597 return ORDERED_EXPR;
2598 case COMPCODE_UNORD:
2599 return UNORDERED_EXPR;
2617 /* Return a tree for the comparison which is the combination of
2618 doing the AND or OR (depending on CODE) of the two operations LCODE
2619 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2620 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2621 if this makes the transformation invalid. */
2624 combine_comparisons (location_t loc,
2625 enum tree_code code, enum tree_code lcode,
2626 enum tree_code rcode, tree truth_type,
2627 tree ll_arg, tree lr_arg)
2629 bool honor_nans = HONOR_NANS (ll_arg);
2630 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2631 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2636 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2637 compcode = lcompcode & rcompcode;
2640 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2641 compcode = lcompcode | rcompcode;
2650 /* Eliminate unordered comparisons, as well as LTGT and ORD
2651 which are not used unless the mode has NaNs. */
2652 compcode &= ~COMPCODE_UNORD;
2653 if (compcode == COMPCODE_LTGT)
2654 compcode = COMPCODE_NE;
2655 else if (compcode == COMPCODE_ORD)
2656 compcode = COMPCODE_TRUE;
2658 else if (flag_trapping_math)
2660 /* Check that the original operation and the optimized ones will trap
2661 under the same condition. */
2662 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2663 && (lcompcode != COMPCODE_EQ)
2664 && (lcompcode != COMPCODE_ORD);
2665 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2666 && (rcompcode != COMPCODE_EQ)
2667 && (rcompcode != COMPCODE_ORD);
2668 bool trap = (compcode & COMPCODE_UNORD) == 0
2669 && (compcode != COMPCODE_EQ)
2670 && (compcode != COMPCODE_ORD);
2672 /* In a short-circuited boolean expression the LHS might be
2673 such that the RHS, if evaluated, will never trap. For
2674 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2675 if neither x nor y is NaN. (This is a mixed blessing: for
2676 example, the expression above will never trap, hence
2677 optimizing it to x < y would be invalid). */
2678 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2679 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2682 /* If the comparison was short-circuited, and only the RHS
2683 trapped, we may now generate a spurious trap. */
2685 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2688 /* If we changed the conditions that cause a trap, we lose. */
2689 if ((ltrap || rtrap) != trap)
2693 if (compcode == COMPCODE_TRUE)
2694 return constant_boolean_node (true, truth_type);
2695 else if (compcode == COMPCODE_FALSE)
2696 return constant_boolean_node (false, truth_type);
2699 enum tree_code tcode;
2701 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2702 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2706 /* Return nonzero if two operands (typically of the same tree node)
2707 are necessarily equal. If either argument has side-effects this
2708 function returns zero. FLAGS modifies behavior as follows:
2710 If OEP_ONLY_CONST is set, only return nonzero for constants.
2711 This function tests whether the operands are indistinguishable;
2712 it does not test whether they are equal using C's == operation.
2713 The distinction is important for IEEE floating point, because
2714 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2715 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2717 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2718 even though it may hold multiple values during a function.
2719 This is because a GCC tree node guarantees that nothing else is
2720 executed between the evaluation of its "operands" (which may often
2721 be evaluated in arbitrary order). Hence if the operands themselves
2722 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2723 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2724 unset means assuming isochronic (or instantaneous) tree equivalence.
2725 Unless comparing arbitrary expression trees, such as from different
2726 statements, this flag can usually be left unset.
2728 If OEP_PURE_SAME is set, then pure functions with identical arguments
2729 are considered the same. It is used when the caller has other ways
2730 to ensure that global memory is unchanged in between. */
2733 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2735 /* If either is ERROR_MARK, they aren't equal. */
2736 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2737 || TREE_TYPE (arg0) == error_mark_node
2738 || TREE_TYPE (arg1) == error_mark_node)
2741 /* Similar, if either does not have a type (like a released SSA name),
2742 they aren't equal. */
2743 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2746 /* Check equality of integer constants before bailing out due to
2747 precision differences. */
2748 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2749 return tree_int_cst_equal (arg0, arg1);
2751 /* If both types don't have the same signedness, then we can't consider
2752 them equal. We must check this before the STRIP_NOPS calls
2753 because they may change the signedness of the arguments. As pointers
2754 strictly don't have a signedness, require either two pointers or
2755 two non-pointers as well. */
2756 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2757 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2760 /* We cannot consider pointers to different address space equal. */
2761 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2762 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2763 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2766 /* If both types don't have the same precision, then it is not safe
2768 if (element_precision (TREE_TYPE (arg0))
2769 != element_precision (TREE_TYPE (arg1)))
2775 /* In case both args are comparisons but with different comparison
2776 code, try to swap the comparison operands of one arg to produce
2777 a match and compare that variant. */
2778 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2779 && COMPARISON_CLASS_P (arg0)
2780 && COMPARISON_CLASS_P (arg1))
2782 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2784 if (TREE_CODE (arg0) == swap_code)
2785 return operand_equal_p (TREE_OPERAND (arg0, 0),
2786 TREE_OPERAND (arg1, 1), flags)
2787 && operand_equal_p (TREE_OPERAND (arg0, 1),
2788 TREE_OPERAND (arg1, 0), flags);
2791 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2792 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2793 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2796 /* This is needed for conversions and for COMPONENT_REF.
2797 Might as well play it safe and always test this. */
2798 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2799 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2800 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2803 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2804 We don't care about side effects in that case because the SAVE_EXPR
2805 takes care of that for us. In all other cases, two expressions are
2806 equal if they have no side effects. If we have two identical
2807 expressions with side effects that should be treated the same due
2808 to the only side effects being identical SAVE_EXPR's, that will
2809 be detected in the recursive calls below.
2810 If we are taking an invariant address of two identical objects
2811 they are necessarily equal as well. */
2812 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2813 && (TREE_CODE (arg0) == SAVE_EXPR
2814 || (flags & OEP_CONSTANT_ADDRESS_OF)
2815 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2818 /* Next handle constant cases, those for which we can return 1 even
2819 if ONLY_CONST is set. */
2820 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2821 switch (TREE_CODE (arg0))
2824 return tree_int_cst_equal (arg0, arg1);
2827 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2828 TREE_FIXED_CST (arg1));
2831 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2832 TREE_REAL_CST (arg1)))
2836 if (!HONOR_SIGNED_ZEROS (arg0))
2838 /* If we do not distinguish between signed and unsigned zero,
2839 consider them equal. */
2840 if (real_zerop (arg0) && real_zerop (arg1))
2849 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2852 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2854 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2855 VECTOR_CST_ELT (arg1, i), flags))
2862 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2864 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2868 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2869 && ! memcmp (TREE_STRING_POINTER (arg0),
2870 TREE_STRING_POINTER (arg1),
2871 TREE_STRING_LENGTH (arg0)));
2874 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2875 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2876 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2881 if (flags & OEP_ONLY_CONST)
2884 /* Define macros to test an operand from arg0 and arg1 for equality and a
2885 variant that allows null and views null as being different from any
2886 non-null value. In the latter case, if either is null, the both
2887 must be; otherwise, do the normal comparison. */
2888 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2889 TREE_OPERAND (arg1, N), flags)
2891 #define OP_SAME_WITH_NULL(N) \
2892 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2893 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2895 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2898 /* Two conversions are equal only if signedness and modes match. */
2899 switch (TREE_CODE (arg0))
2902 case FIX_TRUNC_EXPR:
2903 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2904 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2914 case tcc_comparison:
2916 if (OP_SAME (0) && OP_SAME (1))
2919 /* For commutative ops, allow the other order. */
2920 return (commutative_tree_code (TREE_CODE (arg0))
2921 && operand_equal_p (TREE_OPERAND (arg0, 0),
2922 TREE_OPERAND (arg1, 1), flags)
2923 && operand_equal_p (TREE_OPERAND (arg0, 1),
2924 TREE_OPERAND (arg1, 0), flags));
2927 /* If either of the pointer (or reference) expressions we are
2928 dereferencing contain a side effect, these cannot be equal,
2929 but their addresses can be. */
2930 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2931 && (TREE_SIDE_EFFECTS (arg0)
2932 || TREE_SIDE_EFFECTS (arg1)))
2935 switch (TREE_CODE (arg0))
2938 if (!(flags & OEP_ADDRESS_OF)
2939 && (TYPE_ALIGN (TREE_TYPE (arg0))
2940 != TYPE_ALIGN (TREE_TYPE (arg1))))
2942 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2949 case TARGET_MEM_REF:
2951 /* Require equal access sizes, and similar pointer types.
2952 We can have incomplete types for array references of
2953 variable-sized arrays from the Fortran frontend
2954 though. Also verify the types are compatible. */
2955 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2956 || (TYPE_SIZE (TREE_TYPE (arg0))
2957 && TYPE_SIZE (TREE_TYPE (arg1))
2958 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2959 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2960 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2961 && ((flags & OEP_ADDRESS_OF)
2962 || (alias_ptr_types_compatible_p
2963 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2964 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2965 && (MR_DEPENDENCE_CLIQUE (arg0)
2966 == MR_DEPENDENCE_CLIQUE (arg1))
2967 && (MR_DEPENDENCE_BASE (arg0)
2968 == MR_DEPENDENCE_BASE (arg1))
2969 && (TYPE_ALIGN (TREE_TYPE (arg0))
2970 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2972 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2973 return (OP_SAME (0) && OP_SAME (1)
2974 /* TARGET_MEM_REF require equal extra operands. */
2975 && (TREE_CODE (arg0) != TARGET_MEM_REF
2976 || (OP_SAME_WITH_NULL (2)
2977 && OP_SAME_WITH_NULL (3)
2978 && OP_SAME_WITH_NULL (4))));
2981 case ARRAY_RANGE_REF:
2982 /* Operands 2 and 3 may be null.
2983 Compare the array index by value if it is constant first as we
2984 may have different types but same value here. */
2987 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2988 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2989 TREE_OPERAND (arg1, 1))
2991 && OP_SAME_WITH_NULL (2)
2992 && OP_SAME_WITH_NULL (3));
2995 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2996 may be NULL when we're called to compare MEM_EXPRs. */
2997 if (!OP_SAME_WITH_NULL (0)
3000 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3001 return OP_SAME_WITH_NULL (2);
3006 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3007 return OP_SAME (1) && OP_SAME (2);
3013 case tcc_expression:
3014 switch (TREE_CODE (arg0))
3017 return operand_equal_p (TREE_OPERAND (arg0, 0),
3018 TREE_OPERAND (arg1, 0),
3019 flags | OEP_ADDRESS_OF);
3021 case TRUTH_NOT_EXPR:
3024 case TRUTH_ANDIF_EXPR:
3025 case TRUTH_ORIF_EXPR:
3026 return OP_SAME (0) && OP_SAME (1);
3029 case WIDEN_MULT_PLUS_EXPR:
3030 case WIDEN_MULT_MINUS_EXPR:
3033 /* The multiplcation operands are commutative. */
3036 case TRUTH_AND_EXPR:
3038 case TRUTH_XOR_EXPR:
3039 if (OP_SAME (0) && OP_SAME (1))
3042 /* Otherwise take into account this is a commutative operation. */
3043 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3044 TREE_OPERAND (arg1, 1), flags)
3045 && operand_equal_p (TREE_OPERAND (arg0, 1),
3046 TREE_OPERAND (arg1, 0), flags));
3051 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3058 switch (TREE_CODE (arg0))
3061 /* If the CALL_EXPRs call different functions, then they
3062 clearly can not be equal. */
3063 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3068 unsigned int cef = call_expr_flags (arg0);
3069 if (flags & OEP_PURE_SAME)
3070 cef &= ECF_CONST | ECF_PURE;
3077 /* Now see if all the arguments are the same. */
3079 const_call_expr_arg_iterator iter0, iter1;
3081 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3082 a1 = first_const_call_expr_arg (arg1, &iter1);
3084 a0 = next_const_call_expr_arg (&iter0),
3085 a1 = next_const_call_expr_arg (&iter1))
3086 if (! operand_equal_p (a0, a1, flags))
3089 /* If we get here and both argument lists are exhausted
3090 then the CALL_EXPRs are equal. */
3091 return ! (a0 || a1);
3097 case tcc_declaration:
3098 /* Consider __builtin_sqrt equal to sqrt. */
3099 return (TREE_CODE (arg0) == FUNCTION_DECL
3100 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3101 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3102 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3109 #undef OP_SAME_WITH_NULL
3112 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3113 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3115 When in doubt, return 0. */
3118 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3120 int unsignedp1, unsignedpo;
3121 tree primarg0, primarg1, primother;
3122 unsigned int correct_width;
3124 if (operand_equal_p (arg0, arg1, 0))
3127 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3128 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3131 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3132 and see if the inner values are the same. This removes any
3133 signedness comparison, which doesn't matter here. */
3134 primarg0 = arg0, primarg1 = arg1;
3135 STRIP_NOPS (primarg0);
3136 STRIP_NOPS (primarg1);
3137 if (operand_equal_p (primarg0, primarg1, 0))
3140 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3141 actual comparison operand, ARG0.
3143 First throw away any conversions to wider types
3144 already present in the operands. */
3146 primarg1 = get_narrower (arg1, &unsignedp1);
3147 primother = get_narrower (other, &unsignedpo);
3149 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3150 if (unsignedp1 == unsignedpo
3151 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3152 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3154 tree type = TREE_TYPE (arg0);
3156 /* Make sure shorter operand is extended the right way
3157 to match the longer operand. */
3158 primarg1 = fold_convert (signed_or_unsigned_type_for
3159 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3161 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3168 /* See if ARG is an expression that is either a comparison or is performing
3169 arithmetic on comparisons. The comparisons must only be comparing
3170 two different values, which will be stored in *CVAL1 and *CVAL2; if
3171 they are nonzero it means that some operands have already been found.
3172 No variables may be used anywhere else in the expression except in the
3173 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3174 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3176 If this is true, return 1. Otherwise, return zero. */
3179 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3181 enum tree_code code = TREE_CODE (arg);
3182 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3184 /* We can handle some of the tcc_expression cases here. */
3185 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3187 else if (tclass == tcc_expression
3188 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3189 || code == COMPOUND_EXPR))
3190 tclass = tcc_binary;
3192 else if (tclass == tcc_expression && code == SAVE_EXPR
3193 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3195 /* If we've already found a CVAL1 or CVAL2, this expression is
3196 two complex to handle. */
3197 if (*cval1 || *cval2)
3207 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3210 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3211 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3212 cval1, cval2, save_p));
3217 case tcc_expression:
3218 if (code == COND_EXPR)
3219 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3220 cval1, cval2, save_p)
3221 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3222 cval1, cval2, save_p)
3223 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3224 cval1, cval2, save_p));
3227 case tcc_comparison:
3228 /* First see if we can handle the first operand, then the second. For
3229 the second operand, we know *CVAL1 can't be zero. It must be that
3230 one side of the comparison is each of the values; test for the
3231 case where this isn't true by failing if the two operands
3234 if (operand_equal_p (TREE_OPERAND (arg, 0),
3235 TREE_OPERAND (arg, 1), 0))
3239 *cval1 = TREE_OPERAND (arg, 0);
3240 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3242 else if (*cval2 == 0)
3243 *cval2 = TREE_OPERAND (arg, 0);
3244 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3249 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3251 else if (*cval2 == 0)
3252 *cval2 = TREE_OPERAND (arg, 1);
3253 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3265 /* ARG is a tree that is known to contain just arithmetic operations and
3266 comparisons. Evaluate the operations in the tree substituting NEW0 for
3267 any occurrence of OLD0 as an operand of a comparison and likewise for
3271 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3272 tree old1, tree new1)
3274 tree type = TREE_TYPE (arg);
3275 enum tree_code code = TREE_CODE (arg);
3276 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3278 /* We can handle some of the tcc_expression cases here. */
3279 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3281 else if (tclass == tcc_expression
3282 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3283 tclass = tcc_binary;
3288 return fold_build1_loc (loc, code, type,
3289 eval_subst (loc, TREE_OPERAND (arg, 0),
3290 old0, new0, old1, new1));
3293 return fold_build2_loc (loc, code, type,
3294 eval_subst (loc, TREE_OPERAND (arg, 0),
3295 old0, new0, old1, new1),
3296 eval_subst (loc, TREE_OPERAND (arg, 1),
3297 old0, new0, old1, new1));
3299 case tcc_expression:
3303 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3307 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3311 return fold_build3_loc (loc, code, type,
3312 eval_subst (loc, TREE_OPERAND (arg, 0),
3313 old0, new0, old1, new1),
3314 eval_subst (loc, TREE_OPERAND (arg, 1),
3315 old0, new0, old1, new1),
3316 eval_subst (loc, TREE_OPERAND (arg, 2),
3317 old0, new0, old1, new1));
3321 /* Fall through - ??? */
3323 case tcc_comparison:
3325 tree arg0 = TREE_OPERAND (arg, 0);
3326 tree arg1 = TREE_OPERAND (arg, 1);
3328 /* We need to check both for exact equality and tree equality. The
3329 former will be true if the operand has a side-effect. In that
3330 case, we know the operand occurred exactly once. */
3332 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3334 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3337 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3339 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3342 return fold_build2_loc (loc, code, type, arg0, arg1);
3350 /* Return a tree for the case when the result of an expression is RESULT
3351 converted to TYPE and OMITTED was previously an operand of the expression
3352 but is now not needed (e.g., we folded OMITTED * 0).
3354 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3355 the conversion of RESULT to TYPE. */
3358 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3360 tree t = fold_convert_loc (loc, type, result);
3362 /* If the resulting operand is an empty statement, just return the omitted
3363 statement casted to void. */
3364 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3365 return build1_loc (loc, NOP_EXPR, void_type_node,
3366 fold_ignored_result (omitted));
3368 if (TREE_SIDE_EFFECTS (omitted))
3369 return build2_loc (loc, COMPOUND_EXPR, type,
3370 fold_ignored_result (omitted), t);
3372 return non_lvalue_loc (loc, t);
3375 /* Return a tree for the case when the result of an expression is RESULT
3376 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3377 of the expression but are now not needed.
3379 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3380 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3381 evaluated before OMITTED2. Otherwise, if neither has side effects,
3382 just do the conversion of RESULT to TYPE. */
3385 omit_two_operands_loc (location_t loc, tree type, tree result,
3386 tree omitted1, tree omitted2)
3388 tree t = fold_convert_loc (loc, type, result);
3390 if (TREE_SIDE_EFFECTS (omitted2))
3391 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3392 if (TREE_SIDE_EFFECTS (omitted1))
3393 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3395 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3399 /* Return a simplified tree node for the truth-negation of ARG. This
3400 never alters ARG itself. We assume that ARG is an operation that
3401 returns a truth value (0 or 1).
3403 FIXME: one would think we would fold the result, but it causes
3404 problems with the dominator optimizer. */
3407 fold_truth_not_expr (location_t loc, tree arg)
3409 tree type = TREE_TYPE (arg);
3410 enum tree_code code = TREE_CODE (arg);
3411 location_t loc1, loc2;
3413 /* If this is a comparison, we can simply invert it, except for
3414 floating-point non-equality comparisons, in which case we just
3415 enclose a TRUTH_NOT_EXPR around what we have. */
3417 if (TREE_CODE_CLASS (code) == tcc_comparison)
3419 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3420 if (FLOAT_TYPE_P (op_type)
3421 && flag_trapping_math
3422 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3423 && code != NE_EXPR && code != EQ_EXPR)
3426 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3427 if (code == ERROR_MARK)
3430 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3431 TREE_OPERAND (arg, 1));
3437 return constant_boolean_node (integer_zerop (arg), type);
3439 case TRUTH_AND_EXPR:
3440 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3441 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3442 return build2_loc (loc, TRUTH_OR_EXPR, type,
3443 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3444 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3447 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3448 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3449 return build2_loc (loc, TRUTH_AND_EXPR, type,
3450 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3451 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3453 case TRUTH_XOR_EXPR:
3454 /* Here we can invert either operand. We invert the first operand
3455 unless the second operand is a TRUTH_NOT_EXPR in which case our
3456 result is the XOR of the first operand with the inside of the
3457 negation of the second operand. */
3459 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3460 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3461 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3463 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3464 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3465 TREE_OPERAND (arg, 1));
3467 case TRUTH_ANDIF_EXPR:
3468 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3469 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3470 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3471 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3472 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3474 case TRUTH_ORIF_EXPR:
3475 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3476 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3477 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3478 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3479 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3481 case TRUTH_NOT_EXPR:
3482 return TREE_OPERAND (arg, 0);
3486 tree arg1 = TREE_OPERAND (arg, 1);
3487 tree arg2 = TREE_OPERAND (arg, 2);
3489 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3490 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3492 /* A COND_EXPR may have a throw as one operand, which
3493 then has void type. Just leave void operands
3495 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3496 VOID_TYPE_P (TREE_TYPE (arg1))
3497 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3498 VOID_TYPE_P (TREE_TYPE (arg2))
3499 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3503 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3504 return build2_loc (loc, COMPOUND_EXPR, type,
3505 TREE_OPERAND (arg, 0),
3506 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3508 case NON_LVALUE_EXPR:
3509 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3510 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3513 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3514 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3516 /* ... fall through ... */
3519 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3520 return build1_loc (loc, TREE_CODE (arg), type,
3521 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3524 if (!integer_onep (TREE_OPERAND (arg, 1)))
3526 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3529 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3531 case CLEANUP_POINT_EXPR:
3532 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3533 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3534 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3541 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3542 assume that ARG is an operation that returns a truth value (0 or 1
3543 for scalars, 0 or -1 for vectors). Return the folded expression if
3544 folding is successful. Otherwise, return NULL_TREE. */
3547 fold_invert_truthvalue (location_t loc, tree arg)
3549 tree type = TREE_TYPE (arg);
3550 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3556 /* Return a simplified tree node for the truth-negation of ARG. This
3557 never alters ARG itself. We assume that ARG is an operation that
3558 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3561 invert_truthvalue_loc (location_t loc, tree arg)
3563 if (TREE_CODE (arg) == ERROR_MARK)
3566 tree type = TREE_TYPE (arg);
3567 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3573 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3574 operands are another bit-wise operation with a common input. If so,
3575 distribute the bit operations to save an operation and possibly two if
3576 constants are involved. For example, convert
3577 (A | B) & (A | C) into A | (B & C)
3578 Further simplification will occur if B and C are constants.
3580 If this optimization cannot be done, 0 will be returned. */
3583 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3584 tree arg0, tree arg1)
3589 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3590 || TREE_CODE (arg0) == code
3591 || (TREE_CODE (arg0) != BIT_AND_EXPR
3592 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3595 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3597 common = TREE_OPERAND (arg0, 0);
3598 left = TREE_OPERAND (arg0, 1);
3599 right = TREE_OPERAND (arg1, 1);
3601 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3603 common = TREE_OPERAND (arg0, 0);
3604 left = TREE_OPERAND (arg0, 1);
3605 right = TREE_OPERAND (arg1, 0);
3607 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3609 common = TREE_OPERAND (arg0, 1);
3610 left = TREE_OPERAND (arg0, 0);
3611 right = TREE_OPERAND (arg1, 1);
3613 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3615 common = TREE_OPERAND (arg0, 1);
3616 left = TREE_OPERAND (arg0, 0);
3617 right = TREE_OPERAND (arg1, 0);
3622 common = fold_convert_loc (loc, type, common);
3623 left = fold_convert_loc (loc, type, left);
3624 right = fold_convert_loc (loc, type, right);
3625 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3626 fold_build2_loc (loc, code, type, left, right));
3629 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3630 with code CODE. This optimization is unsafe. */
3632 distribute_real_division (location_t loc, enum tree_code code, tree type,
3633 tree arg0, tree arg1)
3635 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3636 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3638 /* (A / C) +- (B / C) -> (A +- B) / C. */
3640 && operand_equal_p (TREE_OPERAND (arg0, 1),
3641 TREE_OPERAND (arg1, 1), 0))
3642 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3643 fold_build2_loc (loc, code, type,
3644 TREE_OPERAND (arg0, 0),
3645 TREE_OPERAND (arg1, 0)),
3646 TREE_OPERAND (arg0, 1));
3648 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3649 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3650 TREE_OPERAND (arg1, 0), 0)
3651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3652 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3654 REAL_VALUE_TYPE r0, r1;
3655 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3656 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3658 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3660 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3661 real_arithmetic (&r0, code, &r0, &r1);
3662 return fold_build2_loc (loc, MULT_EXPR, type,
3663 TREE_OPERAND (arg0, 0),
3664 build_real (type, r0));
3670 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3671 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3674 make_bit_field_ref (location_t loc, tree inner, tree type,
3675 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3677 tree result, bftype;
3681 tree size = TYPE_SIZE (TREE_TYPE (inner));
3682 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3683 || POINTER_TYPE_P (TREE_TYPE (inner)))
3684 && tree_fits_shwi_p (size)
3685 && tree_to_shwi (size) == bitsize)
3686 return fold_convert_loc (loc, type, inner);
3690 if (TYPE_PRECISION (bftype) != bitsize
3691 || TYPE_UNSIGNED (bftype) == !unsignedp)
3692 bftype = build_nonstandard_integer_type (bitsize, 0);
3694 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3695 size_int (bitsize), bitsize_int (bitpos));
3698 result = fold_convert_loc (loc, type, result);
3703 /* Optimize a bit-field compare.
3705 There are two cases: First is a compare against a constant and the
3706 second is a comparison of two items where the fields are at the same
3707 bit position relative to the start of a chunk (byte, halfword, word)
3708 large enough to contain it. In these cases we can avoid the shift
3709 implicit in bitfield extractions.
3711 For constants, we emit a compare of the shifted constant with the
3712 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3713 compared. For two fields at the same position, we do the ANDs with the
3714 similar mask and compare the result of the ANDs.
3716 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3717 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3718 are the left and right operands of the comparison, respectively.
3720 If the optimization described above can be done, we return the resulting
3721 tree. Otherwise we return zero. */
3724 optimize_bit_field_compare (location_t loc, enum tree_code code,
3725 tree compare_type, tree lhs, tree rhs)
3727 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3728 tree type = TREE_TYPE (lhs);
3730 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3731 machine_mode lmode, rmode, nmode;
3732 int lunsignedp, runsignedp;
3733 int lvolatilep = 0, rvolatilep = 0;
3734 tree linner, rinner = NULL_TREE;
3738 /* Get all the information about the extractions being done. If the bit size
3739 if the same as the size of the underlying object, we aren't doing an
3740 extraction at all and so can do nothing. We also don't want to
3741 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3742 then will no longer be able to replace it. */
3743 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3744 &lunsignedp, &lvolatilep, false);
3745 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3746 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3751 /* If this is not a constant, we can only do something if bit positions,
3752 sizes, and signedness are the same. */
3753 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3754 &runsignedp, &rvolatilep, false);
3756 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3757 || lunsignedp != runsignedp || offset != 0
3758 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3762 /* See if we can find a mode to refer to this field. We should be able to,
3763 but fail if we can't. */
3764 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3765 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3766 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3767 TYPE_ALIGN (TREE_TYPE (rinner))),
3769 if (nmode == VOIDmode)
3772 /* Set signed and unsigned types of the precision of this mode for the
3774 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3776 /* Compute the bit position and size for the new reference and our offset
3777 within it. If the new reference is the same size as the original, we
3778 won't optimize anything, so return zero. */
3779 nbitsize = GET_MODE_BITSIZE (nmode);
3780 nbitpos = lbitpos & ~ (nbitsize - 1);
3782 if (nbitsize == lbitsize)
3785 if (BYTES_BIG_ENDIAN)
3786 lbitpos = nbitsize - lbitsize - lbitpos;
3788 /* Make the mask to be used against the extracted field. */
3789 mask = build_int_cst_type (unsigned_type, -1);
3790 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3791 mask = const_binop (RSHIFT_EXPR, mask,
3792 size_int (nbitsize - lbitsize - lbitpos));
3795 /* If not comparing with constant, just rework the comparison
3797 return fold_build2_loc (loc, code, compare_type,
3798 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3799 make_bit_field_ref (loc, linner,
3804 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3805 make_bit_field_ref (loc, rinner,
3811 /* Otherwise, we are handling the constant case. See if the constant is too
3812 big for the field. Warn and return a tree of for 0 (false) if so. We do
3813 this not only for its own sake, but to avoid having to test for this
3814 error case below. If we didn't, we might generate wrong code.
3816 For unsigned fields, the constant shifted right by the field length should
3817 be all zero. For signed fields, the high-order bits should agree with
3822 if (wi::lrshift (rhs, lbitsize) != 0)
3824 warning (0, "comparison is always %d due to width of bit-field",
3826 return constant_boolean_node (code == NE_EXPR, compare_type);
3831 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3832 if (tem != 0 && tem != -1)
3834 warning (0, "comparison is always %d due to width of bit-field",
3836 return constant_boolean_node (code == NE_EXPR, compare_type);
3840 /* Single-bit compares should always be against zero. */
3841 if (lbitsize == 1 && ! integer_zerop (rhs))
3843 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3844 rhs = build_int_cst (type, 0);
3847 /* Make a new bitfield reference, shift the constant over the
3848 appropriate number of bits and mask it with the computed mask
3849 (in case this was a signed field). If we changed it, make a new one. */
3850 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3852 rhs = const_binop (BIT_AND_EXPR,
3853 const_binop (LSHIFT_EXPR,
3854 fold_convert_loc (loc, unsigned_type, rhs),
3855 size_int (lbitpos)),
3858 lhs = build2_loc (loc, code, compare_type,
3859 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3863 /* Subroutine for fold_truth_andor_1: decode a field reference.
3865 If EXP is a comparison reference, we return the innermost reference.
3867 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3868 set to the starting bit number.
3870 If the innermost field can be completely contained in a mode-sized
3871 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3873 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3874 otherwise it is not changed.
3876 *PUNSIGNEDP is set to the signedness of the field.
3878 *PMASK is set to the mask used. This is either contained in a
3879 BIT_AND_EXPR or derived from the width of the field.
3881 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3883 Return 0 if this is not a component reference or is one that we can't
3884 do anything with. */
3887 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3888 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3889 int *punsignedp, int *pvolatilep,
3890 tree *pmask, tree *pand_mask)
3892 tree outer_type = 0;
3894 tree mask, inner, offset;
3896 unsigned int precision;
3898 /* All the optimizations using this function assume integer fields.
3899 There are problems with FP fields since the type_for_size call
3900 below can fail for, e.g., XFmode. */
3901 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3904 /* We are interested in the bare arrangement of bits, so strip everything
3905 that doesn't affect the machine mode. However, record the type of the
3906 outermost expression if it may matter below. */
3907 if (CONVERT_EXPR_P (exp)
3908 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3909 outer_type = TREE_TYPE (exp);
3912 if (TREE_CODE (exp) == BIT_AND_EXPR)
3914 and_mask = TREE_OPERAND (exp, 1);
3915 exp = TREE_OPERAND (exp, 0);
3916 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3917 if (TREE_CODE (and_mask) != INTEGER_CST)
3921 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3922 punsignedp, pvolatilep, false);
3923 if ((inner == exp && and_mask == 0)
3924 || *pbitsize < 0 || offset != 0
3925 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3928 /* If the number of bits in the reference is the same as the bitsize of
3929 the outer type, then the outer type gives the signedness. Otherwise
3930 (in case of a small bitfield) the signedness is unchanged. */
3931 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3932 *punsignedp = TYPE_UNSIGNED (outer_type);
3934 /* Compute the mask to access the bitfield. */
3935 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3936 precision = TYPE_PRECISION (unsigned_type);
3938 mask = build_int_cst_type (unsigned_type, -1);
3940 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3941 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3943 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3945 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3946 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3949 *pand_mask = and_mask;
3953 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3954 bit positions and MASK is SIGNED. */
3957 all_ones_mask_p (const_tree mask, unsigned int size)
3959 tree type = TREE_TYPE (mask);
3960 unsigned int precision = TYPE_PRECISION (type);
3962 /* If this function returns true when the type of the mask is
3963 UNSIGNED, then there will be errors. In particular see
3964 gcc.c-torture/execute/990326-1.c. There does not appear to be
3965 any documentation paper trail as to why this is so. But the pre
3966 wide-int worked with that restriction and it has been preserved
3968 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3971 return wi::mask (size, false, precision) == mask;
3974 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3975 represents the sign bit of EXP's type. If EXP represents a sign
3976 or zero extension, also test VAL against the unextended type.
3977 The return value is the (sub)expression whose sign bit is VAL,
3978 or NULL_TREE otherwise. */
3981 sign_bit_p (tree exp, const_tree val)
3986 /* Tree EXP must have an integral type. */
3987 t = TREE_TYPE (exp);
3988 if (! INTEGRAL_TYPE_P (t))
3991 /* Tree VAL must be an integer constant. */
3992 if (TREE_CODE (val) != INTEGER_CST
3993 || TREE_OVERFLOW (val))
3996 width = TYPE_PRECISION (t);
3997 if (wi::only_sign_bit_p (val, width))
4000 /* Handle extension from a narrower type. */
4001 if (TREE_CODE (exp) == NOP_EXPR
4002 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4003 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4008 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4009 to be evaluated unconditionally. */
4012 simple_operand_p (const_tree exp)
4014 /* Strip any conversions that don't change the machine mode. */
4017 return (CONSTANT_CLASS_P (exp)
4018 || TREE_CODE (exp) == SSA_NAME
4020 && ! TREE_ADDRESSABLE (exp)
4021 && ! TREE_THIS_VOLATILE (exp)
4022 && ! DECL_NONLOCAL (exp)
4023 /* Don't regard global variables as simple. They may be
4024 allocated in ways unknown to the compiler (shared memory,
4025 #pragma weak, etc). */
4026 && ! TREE_PUBLIC (exp)
4027 && ! DECL_EXTERNAL (exp)
4028 /* Weakrefs are not safe to be read, since they can be NULL.
4029 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4030 have DECL_WEAK flag set. */
4031 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4032 /* Loading a static variable is unduly expensive, but global
4033 registers aren't expensive. */
4034 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4037 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4038 to be evaluated unconditionally.
4039 I addition to simple_operand_p, we assume that comparisons, conversions,
4040 and logic-not operations are simple, if their operands are simple, too. */
4043 simple_operand_p_2 (tree exp)
4045 enum tree_code code;
4047 if (TREE_SIDE_EFFECTS (exp)
4048 || tree_could_trap_p (exp))
4051 while (CONVERT_EXPR_P (exp))
4052 exp = TREE_OPERAND (exp, 0);
4054 code = TREE_CODE (exp);
4056 if (TREE_CODE_CLASS (code) == tcc_comparison)
4057 return (simple_operand_p (TREE_OPERAND (exp, 0))
4058 && simple_operand_p (TREE_OPERAND (exp, 1)));
4060 if (code == TRUTH_NOT_EXPR)
4061 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4063 return simple_operand_p (exp);
4067 /* The following functions are subroutines to fold_range_test and allow it to
4068 try to change a logical combination of comparisons into a range test.
4071 X == 2 || X == 3 || X == 4 || X == 5
4075 (unsigned) (X - 2) <= 3
4077 We describe each set of comparisons as being either inside or outside
4078 a range, using a variable named like IN_P, and then describe the
4079 range with a lower and upper bound. If one of the bounds is omitted,
4080 it represents either the highest or lowest value of the type.
4082 In the comments below, we represent a range by two numbers in brackets
4083 preceded by a "+" to designate being inside that range, or a "-" to
4084 designate being outside that range, so the condition can be inverted by
4085 flipping the prefix. An omitted bound is represented by a "-". For
4086 example, "- [-, 10]" means being outside the range starting at the lowest
4087 possible value and ending at 10, in other words, being greater than 10.
4088 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4091 We set up things so that the missing bounds are handled in a consistent
4092 manner so neither a missing bound nor "true" and "false" need to be
4093 handled using a special case. */
4095 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4096 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4097 and UPPER1_P are nonzero if the respective argument is an upper bound
4098 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4099 must be specified for a comparison. ARG1 will be converted to ARG0's
4100 type if both are specified. */
4103 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4104 tree arg1, int upper1_p)
4110 /* If neither arg represents infinity, do the normal operation.
4111 Else, if not a comparison, return infinity. Else handle the special
4112 comparison rules. Note that most of the cases below won't occur, but
4113 are handled for consistency. */
4115 if (arg0 != 0 && arg1 != 0)
4117 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4118 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4120 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4123 if (TREE_CODE_CLASS (code) != tcc_comparison)
4126 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4127 for neither. In real maths, we cannot assume open ended ranges are
4128 the same. But, this is computer arithmetic, where numbers are finite.
4129 We can therefore make the transformation of any unbounded range with
4130 the value Z, Z being greater than any representable number. This permits
4131 us to treat unbounded ranges as equal. */
4132 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4133 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4137 result = sgn0 == sgn1;
4140 result = sgn0 != sgn1;
4143 result = sgn0 < sgn1;
4146 result = sgn0 <= sgn1;
4149 result = sgn0 > sgn1;
4152 result = sgn0 >= sgn1;
4158 return constant_boolean_node (result, type);
4161 /* Helper routine for make_range. Perform one step for it, return
4162 new expression if the loop should continue or NULL_TREE if it should
4166 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4167 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4168 bool *strict_overflow_p)
4170 tree arg0_type = TREE_TYPE (arg0);
4171 tree n_low, n_high, low = *p_low, high = *p_high;
4172 int in_p = *p_in_p, n_in_p;
4176 case TRUTH_NOT_EXPR:
4177 /* We can only do something if the range is testing for zero. */
4178 if (low == NULL_TREE || high == NULL_TREE
4179 || ! integer_zerop (low) || ! integer_zerop (high))
4184 case EQ_EXPR: case NE_EXPR:
4185 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4186 /* We can only do something if the range is testing for zero
4187 and if the second operand is an integer constant. Note that
4188 saying something is "in" the range we make is done by
4189 complementing IN_P since it will set in the initial case of
4190 being not equal to zero; "out" is leaving it alone. */
4191 if (low == NULL_TREE || high == NULL_TREE
4192 || ! integer_zerop (low) || ! integer_zerop (high)
4193 || TREE_CODE (arg1) != INTEGER_CST)
4198 case NE_EXPR: /* - [c, c] */
4201 case EQ_EXPR: /* + [c, c] */
4202 in_p = ! in_p, low = high = arg1;
4204 case GT_EXPR: /* - [-, c] */
4205 low = 0, high = arg1;
4207 case GE_EXPR: /* + [c, -] */
4208 in_p = ! in_p, low = arg1, high = 0;
4210 case LT_EXPR: /* - [c, -] */
4211 low = arg1, high = 0;
4213 case LE_EXPR: /* + [-, c] */
4214 in_p = ! in_p, low = 0, high = arg1;
4220 /* If this is an unsigned comparison, we also know that EXP is
4221 greater than or equal to zero. We base the range tests we make
4222 on that fact, so we record it here so we can parse existing
4223 range tests. We test arg0_type since often the return type
4224 of, e.g. EQ_EXPR, is boolean. */
4225 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4227 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4229 build_int_cst (arg0_type, 0),
4233 in_p = n_in_p, low = n_low, high = n_high;
4235 /* If the high bound is missing, but we have a nonzero low
4236 bound, reverse the range so it goes from zero to the low bound
4238 if (high == 0 && low && ! integer_zerop (low))
4241 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4242 build_int_cst (TREE_TYPE (low), 1), 0);
4243 low = build_int_cst (arg0_type, 0);
4253 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4254 low and high are non-NULL, then normalize will DTRT. */
4255 if (!TYPE_UNSIGNED (arg0_type)
4256 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4258 if (low == NULL_TREE)
4259 low = TYPE_MIN_VALUE (arg0_type);
4260 if (high == NULL_TREE)
4261 high = TYPE_MAX_VALUE (arg0_type);
4264 /* (-x) IN [a,b] -> x in [-b, -a] */
4265 n_low = range_binop (MINUS_EXPR, exp_type,
4266 build_int_cst (exp_type, 0),
4268 n_high = range_binop (MINUS_EXPR, exp_type,
4269 build_int_cst (exp_type, 0),
4271 if (n_high != 0 && TREE_OVERFLOW (n_high))
4277 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4278 build_int_cst (exp_type, 1));
4282 if (TREE_CODE (arg1) != INTEGER_CST)
4285 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4286 move a constant to the other side. */
4287 if (!TYPE_UNSIGNED (arg0_type)
4288 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4291 /* If EXP is signed, any overflow in the computation is undefined,
4292 so we don't worry about it so long as our computations on
4293 the bounds don't overflow. For unsigned, overflow is defined
4294 and this is exactly the right thing. */
4295 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4296 arg0_type, low, 0, arg1, 0);
4297 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4298 arg0_type, high, 1, arg1, 0);
4299 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4300 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4303 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4304 *strict_overflow_p = true;
4307 /* Check for an unsigned range which has wrapped around the maximum
4308 value thus making n_high < n_low, and normalize it. */
4309 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4311 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4312 build_int_cst (TREE_TYPE (n_high), 1), 0);
4313 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4314 build_int_cst (TREE_TYPE (n_low), 1), 0);
4316 /* If the range is of the form +/- [ x+1, x ], we won't
4317 be able to normalize it. But then, it represents the
4318 whole range or the empty set, so make it
4320 if (tree_int_cst_equal (n_low, low)
4321 && tree_int_cst_equal (n_high, high))
4327 low = n_low, high = n_high;
4335 case NON_LVALUE_EXPR:
4336 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4339 if (! INTEGRAL_TYPE_P (arg0_type)
4340 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4341 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4344 n_low = low, n_high = high;
4347 n_low = fold_convert_loc (loc, arg0_type, n_low);
4350 n_high = fold_convert_loc (loc, arg0_type, n_high);
4352 /* If we're converting arg0 from an unsigned type, to exp,
4353 a signed type, we will be doing the comparison as unsigned.
4354 The tests above have already verified that LOW and HIGH
4357 So we have to ensure that we will handle large unsigned
4358 values the same way that the current signed bounds treat
4361 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4365 /* For fixed-point modes, we need to pass the saturating flag
4366 as the 2nd parameter. */
4367 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4369 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4370 TYPE_SATURATING (arg0_type));
4373 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4375 /* A range without an upper bound is, naturally, unbounded.
4376 Since convert would have cropped a very large value, use
4377 the max value for the destination type. */
4379 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4380 : TYPE_MAX_VALUE (arg0_type);
4382 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4383 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4384 fold_convert_loc (loc, arg0_type,
4386 build_int_cst (arg0_type, 1));
4388 /* If the low bound is specified, "and" the range with the
4389 range for which the original unsigned value will be
4393 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4394 1, fold_convert_loc (loc, arg0_type,
4399 in_p = (n_in_p == in_p);
4403 /* Otherwise, "or" the range with the range of the input
4404 that will be interpreted as negative. */
4405 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4406 1, fold_convert_loc (loc, arg0_type,
4411 in_p = (in_p != n_in_p);
4425 /* Given EXP, a logical expression, set the range it is testing into
4426 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4427 actually being tested. *PLOW and *PHIGH will be made of the same
4428 type as the returned expression. If EXP is not a comparison, we
4429 will most likely not be returning a useful value and range. Set
4430 *STRICT_OVERFLOW_P to true if the return value is only valid
4431 because signed overflow is undefined; otherwise, do not change
4432 *STRICT_OVERFLOW_P. */
4435 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4436 bool *strict_overflow_p)
4438 enum tree_code code;
4439 tree arg0, arg1 = NULL_TREE;
4440 tree exp_type, nexp;
4443 location_t loc = EXPR_LOCATION (exp);
4445 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4446 and see if we can refine the range. Some of the cases below may not
4447 happen, but it doesn't seem worth worrying about this. We "continue"
4448 the outer loop when we've changed something; otherwise we "break"
4449 the switch, which will "break" the while. */
4452 low = high = build_int_cst (TREE_TYPE (exp), 0);
4456 code = TREE_CODE (exp);
4457 exp_type = TREE_TYPE (exp);
4460 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4462 if (TREE_OPERAND_LENGTH (exp) > 0)
4463 arg0 = TREE_OPERAND (exp, 0);
4464 if (TREE_CODE_CLASS (code) == tcc_binary
4465 || TREE_CODE_CLASS (code) == tcc_comparison
4466 || (TREE_CODE_CLASS (code) == tcc_expression
4467 && TREE_OPERAND_LENGTH (exp) > 1))
4468 arg1 = TREE_OPERAND (exp, 1);
4470 if (arg0 == NULL_TREE)
4473 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4474 &high, &in_p, strict_overflow_p);
4475 if (nexp == NULL_TREE)
4480 /* If EXP is a constant, we can evaluate whether this is true or false. */
4481 if (TREE_CODE (exp) == INTEGER_CST)
4483 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4485 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4491 *pin_p = in_p, *plow = low, *phigh = high;
4495 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4496 type, TYPE, return an expression to test if EXP is in (or out of, depending
4497 on IN_P) the range. Return 0 if the test couldn't be created. */
4500 build_range_check (location_t loc, tree type, tree exp, int in_p,
4501 tree low, tree high)
4503 tree etype = TREE_TYPE (exp), value;
4505 #ifdef HAVE_canonicalize_funcptr_for_compare
4506 /* Disable this optimization for function pointer expressions
4507 on targets that require function pointer canonicalization. */
4508 if (HAVE_canonicalize_funcptr_for_compare
4509 && TREE_CODE (etype) == POINTER_TYPE
4510 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4516 value = build_range_check (loc, type, exp, 1, low, high);
4518 return invert_truthvalue_loc (loc, value);
4523 if (low == 0 && high == 0)
4524 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4527 return fold_build2_loc (loc, LE_EXPR, type, exp,
4528 fold_convert_loc (loc, etype, high));
4531 return fold_build2_loc (loc, GE_EXPR, type, exp,
4532 fold_convert_loc (loc, etype, low));
4534 if (operand_equal_p (low, high, 0))
4535 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4536 fold_convert_loc (loc, etype, low));
4538 if (integer_zerop (low))
4540 if (! TYPE_UNSIGNED (etype))
4542 etype = unsigned_type_for (etype);
4543 high = fold_convert_loc (loc, etype, high);
4544 exp = fold_convert_loc (loc, etype, exp);
4546 return build_range_check (loc, type, exp, 1, 0, high);
4549 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4550 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4552 int prec = TYPE_PRECISION (etype);
4554 if (wi::mask (prec - 1, false, prec) == high)
4556 if (TYPE_UNSIGNED (etype))
4558 tree signed_etype = signed_type_for (etype);
4559 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4561 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4563 etype = signed_etype;
4564 exp = fold_convert_loc (loc, etype, exp);
4566 return fold_build2_loc (loc, GT_EXPR, type, exp,
4567 build_int_cst (etype, 0));
4571 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4572 This requires wrap-around arithmetics for the type of the expression.
4573 First make sure that arithmetics in this type is valid, then make sure
4574 that it wraps around. */
4575 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4576 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4577 TYPE_UNSIGNED (etype));
4579 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4581 tree utype, minv, maxv;
4583 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4584 for the type in question, as we rely on this here. */
4585 utype = unsigned_type_for (etype);
4586 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4587 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4588 build_int_cst (TREE_TYPE (maxv), 1), 1);
4589 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4591 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4598 high = fold_convert_loc (loc, etype, high);
4599 low = fold_convert_loc (loc, etype, low);
4600 exp = fold_convert_loc (loc, etype, exp);
4602 value = const_binop (MINUS_EXPR, high, low);
4605 if (POINTER_TYPE_P (etype))
4607 if (value != 0 && !TREE_OVERFLOW (value))
4609 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4610 return build_range_check (loc, type,
4611 fold_build_pointer_plus_loc (loc, exp, low),
4612 1, build_int_cst (etype, 0), value);
4617 if (value != 0 && !TREE_OVERFLOW (value))
4618 return build_range_check (loc, type,
4619 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4620 1, build_int_cst (etype, 0), value);
4625 /* Return the predecessor of VAL in its type, handling the infinite case. */
4628 range_predecessor (tree val)
4630 tree type = TREE_TYPE (val);
4632 if (INTEGRAL_TYPE_P (type)
4633 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4636 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4637 build_int_cst (TREE_TYPE (val), 1), 0);
4640 /* Return the successor of VAL in its type, handling the infinite case. */
4643 range_successor (tree val)
4645 tree type = TREE_TYPE (val);
4647 if (INTEGRAL_TYPE_P (type)
4648 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4651 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4652 build_int_cst (TREE_TYPE (val), 1), 0);
4655 /* Given two ranges, see if we can merge them into one. Return 1 if we
4656 can, 0 if we can't. Set the output range into the specified parameters. */
4659 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4660 tree high0, int in1_p, tree low1, tree high1)
4668 int lowequal = ((low0 == 0 && low1 == 0)
4669 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4670 low0, 0, low1, 0)));
4671 int highequal = ((high0 == 0 && high1 == 0)
4672 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4673 high0, 1, high1, 1)));
4675 /* Make range 0 be the range that starts first, or ends last if they
4676 start at the same value. Swap them if it isn't. */
4677 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4680 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4681 high1, 1, high0, 1))))
4683 temp = in0_p, in0_p = in1_p, in1_p = temp;
4684 tem = low0, low0 = low1, low1 = tem;
4685 tem = high0, high0 = high1, high1 = tem;
4688 /* Now flag two cases, whether the ranges are disjoint or whether the
4689 second range is totally subsumed in the first. Note that the tests
4690 below are simplified by the ones above. */
4691 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4692 high0, 1, low1, 0));
4693 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4694 high1, 1, high0, 1));
4696 /* We now have four cases, depending on whether we are including or
4697 excluding the two ranges. */
4700 /* If they don't overlap, the result is false. If the second range
4701 is a subset it is the result. Otherwise, the range is from the start
4702 of the second to the end of the first. */
4704 in_p = 0, low = high = 0;
4706 in_p = 1, low = low1, high = high1;
4708 in_p = 1, low = low1, high = high0;
4711 else if (in0_p && ! in1_p)
4713 /* If they don't overlap, the result is the first range. If they are
4714 equal, the result is false. If the second range is a subset of the
4715 first, and the ranges begin at the same place, we go from just after
4716 the end of the second range to the end of the first. If the second
4717 range is not a subset of the first, or if it is a subset and both
4718 ranges end at the same place, the range starts at the start of the
4719 first range and ends just before the second range.
4720 Otherwise, we can't describe this as a single range. */
4722 in_p = 1, low = low0, high = high0;
4723 else if (lowequal && highequal)
4724 in_p = 0, low = high = 0;
4725 else if (subset && lowequal)
4727 low = range_successor (high1);
4732 /* We are in the weird situation where high0 > high1 but
4733 high1 has no successor. Punt. */
4737 else if (! subset || highequal)
4740 high = range_predecessor (low1);
4744 /* low0 < low1 but low1 has no predecessor. Punt. */
4752 else if (! in0_p && in1_p)
4754 /* If they don't overlap, the result is the second range. If the second
4755 is a subset of the first, the result is false. Otherwise,
4756 the range starts just after the first range and ends at the
4757 end of the second. */
4759 in_p = 1, low = low1, high = high1;
4760 else if (subset || highequal)
4761 in_p = 0, low = high = 0;
4764 low = range_successor (high0);
4769 /* high1 > high0 but high0 has no successor. Punt. */
4777 /* The case where we are excluding both ranges. Here the complex case
4778 is if they don't overlap. In that case, the only time we have a
4779 range is if they are adjacent. If the second is a subset of the
4780 first, the result is the first. Otherwise, the range to exclude
4781 starts at the beginning of the first range and ends at the end of the
4785 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4786 range_successor (high0),
4788 in_p = 0, low = low0, high = high1;
4791 /* Canonicalize - [min, x] into - [-, x]. */
4792 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4793 switch (TREE_CODE (TREE_TYPE (low0)))
4796 if (TYPE_PRECISION (TREE_TYPE (low0))
4797 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4801 if (tree_int_cst_equal (low0,
4802 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4806 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4807 && integer_zerop (low0))
4814 /* Canonicalize - [x, max] into - [x, -]. */
4815 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4816 switch (TREE_CODE (TREE_TYPE (high1)))
4819 if (TYPE_PRECISION (TREE_TYPE (high1))
4820 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4824 if (tree_int_cst_equal (high1,
4825 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4829 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4830 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4832 build_int_cst (TREE_TYPE (high1), 1),
4840 /* The ranges might be also adjacent between the maximum and
4841 minimum values of the given type. For
4842 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4843 return + [x + 1, y - 1]. */
4844 if (low0 == 0 && high1 == 0)
4846 low = range_successor (high0);
4847 high = range_predecessor (low1);
4848 if (low == 0 || high == 0)
4858 in_p = 0, low = low0, high = high0;
4860 in_p = 0, low = low0, high = high1;
4863 *pin_p = in_p, *plow = low, *phigh = high;
4868 /* Subroutine of fold, looking inside expressions of the form
4869 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4870 of the COND_EXPR. This function is being used also to optimize
4871 A op B ? C : A, by reversing the comparison first.
4873 Return a folded expression whose code is not a COND_EXPR
4874 anymore, or NULL_TREE if no folding opportunity is found. */
4877 fold_cond_expr_with_comparison (location_t loc, tree type,
4878 tree arg0, tree arg1, tree arg2)
4880 enum tree_code comp_code = TREE_CODE (arg0);
4881 tree arg00 = TREE_OPERAND (arg0, 0);
4882 tree arg01 = TREE_OPERAND (arg0, 1);
4883 tree arg1_type = TREE_TYPE (arg1);
4889 /* If we have A op 0 ? A : -A, consider applying the following
4892 A == 0? A : -A same as -A
4893 A != 0? A : -A same as A
4894 A >= 0? A : -A same as abs (A)
4895 A > 0? A : -A same as abs (A)
4896 A <= 0? A : -A same as -abs (A)
4897 A < 0? A : -A same as -abs (A)
4899 None of these transformations work for modes with signed
4900 zeros. If A is +/-0, the first two transformations will
4901 change the sign of the result (from +0 to -0, or vice
4902 versa). The last four will fix the sign of the result,
4903 even though the original expressions could be positive or
4904 negative, depending on the sign of A.
4906 Note that all these transformations are correct if A is
4907 NaN, since the two alternatives (A and -A) are also NaNs. */
4908 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4909 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4910 ? real_zerop (arg01)
4911 : integer_zerop (arg01))
4912 && ((TREE_CODE (arg2) == NEGATE_EXPR
4913 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4914 /* In the case that A is of the form X-Y, '-A' (arg2) may
4915 have already been folded to Y-X, check for that. */
4916 || (TREE_CODE (arg1) == MINUS_EXPR
4917 && TREE_CODE (arg2) == MINUS_EXPR
4918 && operand_equal_p (TREE_OPERAND (arg1, 0),
4919 TREE_OPERAND (arg2, 1), 0)
4920 && operand_equal_p (TREE_OPERAND (arg1, 1),
4921 TREE_OPERAND (arg2, 0), 0))))
4926 tem = fold_convert_loc (loc, arg1_type, arg1);
4927 return pedantic_non_lvalue_loc (loc,
4928 fold_convert_loc (loc, type,
4929 negate_expr (tem)));
4932 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4935 if (flag_trapping_math)
4940 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4942 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4943 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4946 if (flag_trapping_math)
4950 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4952 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4953 return negate_expr (fold_convert_loc (loc, type, tem));
4955 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4959 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4960 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4961 both transformations are correct when A is NaN: A != 0
4962 is then true, and A == 0 is false. */
4964 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4965 && integer_zerop (arg01) && integer_zerop (arg2))
4967 if (comp_code == NE_EXPR)
4968 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4969 else if (comp_code == EQ_EXPR)
4970 return build_zero_cst (type);
4973 /* Try some transformations of A op B ? A : B.
4975 A == B? A : B same as B
4976 A != B? A : B same as A
4977 A >= B? A : B same as max (A, B)
4978 A > B? A : B same as max (B, A)
4979 A <= B? A : B same as min (A, B)
4980 A < B? A : B same as min (B, A)
4982 As above, these transformations don't work in the presence
4983 of signed zeros. For example, if A and B are zeros of
4984 opposite sign, the first two transformations will change
4985 the sign of the result. In the last four, the original
4986 expressions give different results for (A=+0, B=-0) and
4987 (A=-0, B=+0), but the transformed expressions do not.
4989 The first two transformations are correct if either A or B
4990 is a NaN. In the first transformation, the condition will
4991 be false, and B will indeed be chosen. In the case of the
4992 second transformation, the condition A != B will be true,
4993 and A will be chosen.
4995 The conversions to max() and min() are not correct if B is
4996 a number and A is not. The conditions in the original
4997 expressions will be false, so all four give B. The min()
4998 and max() versions would give a NaN instead. */
4999 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5000 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5001 /* Avoid these transformations if the COND_EXPR may be used
5002 as an lvalue in the C++ front-end. PR c++/19199. */
5004 || VECTOR_TYPE_P (type)
5005 || (! lang_GNU_CXX ()
5006 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5007 || ! maybe_lvalue_p (arg1)
5008 || ! maybe_lvalue_p (arg2)))
5010 tree comp_op0 = arg00;
5011 tree comp_op1 = arg01;
5012 tree comp_type = TREE_TYPE (comp_op0);
5014 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5015 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5025 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5027 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5032 /* In C++ a ?: expression can be an lvalue, so put the
5033 operand which will be used if they are equal first
5034 so that we can convert this back to the
5035 corresponding COND_EXPR. */
5036 if (!HONOR_NANS (arg1))
5038 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5039 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5040 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5041 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5042 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5043 comp_op1, comp_op0);
5044 return pedantic_non_lvalue_loc (loc,
5045 fold_convert_loc (loc, type, tem));
5052 if (!HONOR_NANS (arg1))
5054 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5055 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5056 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5057 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5058 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5059 comp_op1, comp_op0);
5060 return pedantic_non_lvalue_loc (loc,
5061 fold_convert_loc (loc, type, tem));
5065 if (!HONOR_NANS (arg1))
5066 return pedantic_non_lvalue_loc (loc,
5067 fold_convert_loc (loc, type, arg2));
5070 if (!HONOR_NANS (arg1))
5071 return pedantic_non_lvalue_loc (loc,
5072 fold_convert_loc (loc, type, arg1));
5075 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5080 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5081 we might still be able to simplify this. For example,
5082 if C1 is one less or one more than C2, this might have started
5083 out as a MIN or MAX and been transformed by this function.
5084 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5086 if (INTEGRAL_TYPE_P (type)
5087 && TREE_CODE (arg01) == INTEGER_CST
5088 && TREE_CODE (arg2) == INTEGER_CST)
5092 if (TREE_CODE (arg1) == INTEGER_CST)
5094 /* We can replace A with C1 in this case. */
5095 arg1 = fold_convert_loc (loc, type, arg01);
5096 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5099 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5100 MIN_EXPR, to preserve the signedness of the comparison. */
5101 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5103 && operand_equal_p (arg01,
5104 const_binop (PLUS_EXPR, arg2,
5105 build_int_cst (type, 1)),
5108 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5109 fold_convert_loc (loc, TREE_TYPE (arg00),
5111 return pedantic_non_lvalue_loc (loc,
5112 fold_convert_loc (loc, type, tem));
5117 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5119 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5121 && operand_equal_p (arg01,
5122 const_binop (MINUS_EXPR, arg2,
5123 build_int_cst (type, 1)),
5126 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5127 fold_convert_loc (loc, TREE_TYPE (arg00),
5129 return pedantic_non_lvalue_loc (loc,
5130 fold_convert_loc (loc, type, tem));
5135 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5136 MAX_EXPR, to preserve the signedness of the comparison. */
5137 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5139 && operand_equal_p (arg01,
5140 const_binop (MINUS_EXPR, arg2,
5141 build_int_cst (type, 1)),
5144 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5145 fold_convert_loc (loc, TREE_TYPE (arg00),
5147 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5152 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5153 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5155 && operand_equal_p (arg01,
5156 const_binop (PLUS_EXPR, arg2,
5157 build_int_cst (type, 1)),
5160 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5161 fold_convert_loc (loc, TREE_TYPE (arg00),
5163 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5177 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5178 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5179 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5183 /* EXP is some logical combination of boolean tests. See if we can
5184 merge it into some range test. Return the new tree if so. */
5187 fold_range_test (location_t loc, enum tree_code code, tree type,
5190 int or_op = (code == TRUTH_ORIF_EXPR
5191 || code == TRUTH_OR_EXPR);
5192 int in0_p, in1_p, in_p;
5193 tree low0, low1, low, high0, high1, high;
5194 bool strict_overflow_p = false;
5196 const char * const warnmsg = G_("assuming signed overflow does not occur "
5197 "when simplifying range test");
5199 if (!INTEGRAL_TYPE_P (type))
5202 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5203 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5205 /* If this is an OR operation, invert both sides; we will invert
5206 again at the end. */
5208 in0_p = ! in0_p, in1_p = ! in1_p;
5210 /* If both expressions are the same, if we can merge the ranges, and we
5211 can build the range test, return it or it inverted. If one of the
5212 ranges is always true or always false, consider it to be the same
5213 expression as the other. */
5214 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5215 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5217 && 0 != (tem = (build_range_check (loc, type,
5219 : rhs != 0 ? rhs : integer_zero_node,
5222 if (strict_overflow_p)
5223 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5224 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5227 /* On machines where the branch cost is expensive, if this is a
5228 short-circuited branch and the underlying object on both sides
5229 is the same, make a non-short-circuit operation. */
5230 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5231 && lhs != 0 && rhs != 0
5232 && (code == TRUTH_ANDIF_EXPR
5233 || code == TRUTH_ORIF_EXPR)
5234 && operand_equal_p (lhs, rhs, 0))
5236 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5237 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5238 which cases we can't do this. */
5239 if (simple_operand_p (lhs))
5240 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5241 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5244 else if (!lang_hooks.decls.global_bindings_p ()
5245 && !CONTAINS_PLACEHOLDER_P (lhs))
5247 tree common = save_expr (lhs);
5249 if (0 != (lhs = build_range_check (loc, type, common,
5250 or_op ? ! in0_p : in0_p,
5252 && (0 != (rhs = build_range_check (loc, type, common,
5253 or_op ? ! in1_p : in1_p,
5256 if (strict_overflow_p)
5257 fold_overflow_warning (warnmsg,
5258 WARN_STRICT_OVERFLOW_COMPARISON);
5259 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5260 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5269 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5270 bit value. Arrange things so the extra bits will be set to zero if and
5271 only if C is signed-extended to its full width. If MASK is nonzero,
5272 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5275 unextend (tree c, int p, int unsignedp, tree mask)
5277 tree type = TREE_TYPE (c);
5278 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5281 if (p == modesize || unsignedp)
5284 /* We work by getting just the sign bit into the low-order bit, then
5285 into the high-order bit, then sign-extend. We then XOR that value
5287 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5289 /* We must use a signed type in order to get an arithmetic right shift.
5290 However, we must also avoid introducing accidental overflows, so that
5291 a subsequent call to integer_zerop will work. Hence we must
5292 do the type conversion here. At this point, the constant is either
5293 zero or one, and the conversion to a signed type can never overflow.
5294 We could get an overflow if this conversion is done anywhere else. */
5295 if (TYPE_UNSIGNED (type))
5296 temp = fold_convert (signed_type_for (type), temp);
5298 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5299 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5301 temp = const_binop (BIT_AND_EXPR, temp,
5302 fold_convert (TREE_TYPE (c), mask));
5303 /* If necessary, convert the type back to match the type of C. */
5304 if (TYPE_UNSIGNED (type))
5305 temp = fold_convert (type, temp);
5307 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5310 /* For an expression that has the form
5314 we can drop one of the inner expressions and simplify to
5318 LOC is the location of the resulting expression. OP is the inner
5319 logical operation; the left-hand side in the examples above, while CMPOP
5320 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5321 removing a condition that guards another, as in
5322 (A != NULL && A->...) || A == NULL
5323 which we must not transform. If RHS_ONLY is true, only eliminate the
5324 right-most operand of the inner logical operation. */
5327 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5330 tree type = TREE_TYPE (cmpop);
5331 enum tree_code code = TREE_CODE (cmpop);
5332 enum tree_code truthop_code = TREE_CODE (op);
5333 tree lhs = TREE_OPERAND (op, 0);
5334 tree rhs = TREE_OPERAND (op, 1);
5335 tree orig_lhs = lhs, orig_rhs = rhs;
5336 enum tree_code rhs_code = TREE_CODE (rhs);
5337 enum tree_code lhs_code = TREE_CODE (lhs);
5338 enum tree_code inv_code;
5340 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5343 if (TREE_CODE_CLASS (code) != tcc_comparison)
5346 if (rhs_code == truthop_code)
5348 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5349 if (newrhs != NULL_TREE)
5352 rhs_code = TREE_CODE (rhs);
5355 if (lhs_code == truthop_code && !rhs_only)
5357 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5358 if (newlhs != NULL_TREE)
5361 lhs_code = TREE_CODE (lhs);
5365 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5366 if (inv_code == rhs_code
5367 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5368 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5370 if (!rhs_only && inv_code == lhs_code
5371 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5372 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5374 if (rhs != orig_rhs || lhs != orig_lhs)
5375 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5380 /* Find ways of folding logical expressions of LHS and RHS:
5381 Try to merge two comparisons to the same innermost item.
5382 Look for range tests like "ch >= '0' && ch <= '9'".
5383 Look for combinations of simple terms on machines with expensive branches
5384 and evaluate the RHS unconditionally.
5386 For example, if we have p->a == 2 && p->b == 4 and we can make an
5387 object large enough to span both A and B, we can do this with a comparison
5388 against the object ANDed with the a mask.
5390 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5391 operations to do this with one comparison.
5393 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5394 function and the one above.
5396 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5397 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5399 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5402 We return the simplified tree or 0 if no optimization is possible. */
5405 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5408 /* If this is the "or" of two comparisons, we can do something if
5409 the comparisons are NE_EXPR. If this is the "and", we can do something
5410 if the comparisons are EQ_EXPR. I.e.,
5411 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5413 WANTED_CODE is this operation code. For single bit fields, we can
5414 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5415 comparison for one-bit fields. */
5417 enum tree_code wanted_code;
5418 enum tree_code lcode, rcode;
5419 tree ll_arg, lr_arg, rl_arg, rr_arg;
5420 tree ll_inner, lr_inner, rl_inner, rr_inner;
5421 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5422 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5423 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5424 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5425 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5426 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5427 machine_mode lnmode, rnmode;
5428 tree ll_mask, lr_mask, rl_mask, rr_mask;
5429 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5430 tree l_const, r_const;
5431 tree lntype, rntype, result;
5432 HOST_WIDE_INT first_bit, end_bit;
5435 /* Start by getting the comparison codes. Fail if anything is volatile.
5436 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5437 it were surrounded with a NE_EXPR. */
5439 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5442 lcode = TREE_CODE (lhs);
5443 rcode = TREE_CODE (rhs);
5445 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5447 lhs = build2 (NE_EXPR, truth_type, lhs,
5448 build_int_cst (TREE_TYPE (lhs), 0));
5452 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5454 rhs = build2 (NE_EXPR, truth_type, rhs,
5455 build_int_cst (TREE_TYPE (rhs), 0));
5459 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5460 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5463 ll_arg = TREE_OPERAND (lhs, 0);
5464 lr_arg = TREE_OPERAND (lhs, 1);
5465 rl_arg = TREE_OPERAND (rhs, 0);
5466 rr_arg = TREE_OPERAND (rhs, 1);
5468 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5469 if (simple_operand_p (ll_arg)
5470 && simple_operand_p (lr_arg))
5472 if (operand_equal_p (ll_arg, rl_arg, 0)
5473 && operand_equal_p (lr_arg, rr_arg, 0))
5475 result = combine_comparisons (loc, code, lcode, rcode,
5476 truth_type, ll_arg, lr_arg);
5480 else if (operand_equal_p (ll_arg, rr_arg, 0)
5481 && operand_equal_p (lr_arg, rl_arg, 0))
5483 result = combine_comparisons (loc, code, lcode,
5484 swap_tree_comparison (rcode),
5485 truth_type, ll_arg, lr_arg);
5491 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5492 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5494 /* If the RHS can be evaluated unconditionally and its operands are
5495 simple, it wins to evaluate the RHS unconditionally on machines
5496 with expensive branches. In this case, this isn't a comparison
5497 that can be merged. */
5499 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5501 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5502 && simple_operand_p (rl_arg)
5503 && simple_operand_p (rr_arg))
5505 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5506 if (code == TRUTH_OR_EXPR
5507 && lcode == NE_EXPR && integer_zerop (lr_arg)
5508 && rcode == NE_EXPR && integer_zerop (rr_arg)
5509 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5510 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5511 return build2_loc (loc, NE_EXPR, truth_type,
5512 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5514 build_int_cst (TREE_TYPE (ll_arg), 0));
5516 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5517 if (code == TRUTH_AND_EXPR
5518 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5519 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5520 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5521 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5522 return build2_loc (loc, EQ_EXPR, truth_type,
5523 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5525 build_int_cst (TREE_TYPE (ll_arg), 0));
5528 /* See if the comparisons can be merged. Then get all the parameters for
5531 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5532 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5536 ll_inner = decode_field_reference (loc, ll_arg,
5537 &ll_bitsize, &ll_bitpos, &ll_mode,
5538 &ll_unsignedp, &volatilep, &ll_mask,
5540 lr_inner = decode_field_reference (loc, lr_arg,
5541 &lr_bitsize, &lr_bitpos, &lr_mode,
5542 &lr_unsignedp, &volatilep, &lr_mask,
5544 rl_inner = decode_field_reference (loc, rl_arg,
5545 &rl_bitsize, &rl_bitpos, &rl_mode,
5546 &rl_unsignedp, &volatilep, &rl_mask,
5548 rr_inner = decode_field_reference (loc, rr_arg,
5549 &rr_bitsize, &rr_bitpos, &rr_mode,
5550 &rr_unsignedp, &volatilep, &rr_mask,
5553 /* It must be true that the inner operation on the lhs of each
5554 comparison must be the same if we are to be able to do anything.
5555 Then see if we have constants. If not, the same must be true for
5557 if (volatilep || ll_inner == 0 || rl_inner == 0
5558 || ! operand_equal_p (ll_inner, rl_inner, 0))
5561 if (TREE_CODE (lr_arg) == INTEGER_CST
5562 && TREE_CODE (rr_arg) == INTEGER_CST)
5563 l_const = lr_arg, r_const = rr_arg;
5564 else if (lr_inner == 0 || rr_inner == 0
5565 || ! operand_equal_p (lr_inner, rr_inner, 0))
5568 l_const = r_const = 0;
5570 /* If either comparison code is not correct for our logical operation,
5571 fail. However, we can convert a one-bit comparison against zero into
5572 the opposite comparison against that bit being set in the field. */
5574 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5575 if (lcode != wanted_code)
5577 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5579 /* Make the left operand unsigned, since we are only interested
5580 in the value of one bit. Otherwise we are doing the wrong
5589 /* This is analogous to the code for l_const above. */
5590 if (rcode != wanted_code)
5592 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5601 /* See if we can find a mode that contains both fields being compared on
5602 the left. If we can't, fail. Otherwise, update all constants and masks
5603 to be relative to a field of that size. */
5604 first_bit = MIN (ll_bitpos, rl_bitpos);
5605 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5606 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5607 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5609 if (lnmode == VOIDmode)
5612 lnbitsize = GET_MODE_BITSIZE (lnmode);
5613 lnbitpos = first_bit & ~ (lnbitsize - 1);
5614 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5615 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5617 if (BYTES_BIG_ENDIAN)
5619 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5620 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5623 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5624 size_int (xll_bitpos));
5625 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5626 size_int (xrl_bitpos));
5630 l_const = fold_convert_loc (loc, lntype, l_const);
5631 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5632 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5633 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5634 fold_build1_loc (loc, BIT_NOT_EXPR,
5637 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5639 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5644 r_const = fold_convert_loc (loc, lntype, r_const);
5645 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5646 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5647 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5648 fold_build1_loc (loc, BIT_NOT_EXPR,
5651 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5653 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5657 /* If the right sides are not constant, do the same for it. Also,
5658 disallow this optimization if a size or signedness mismatch occurs
5659 between the left and right sides. */
5662 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5663 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5664 /* Make sure the two fields on the right
5665 correspond to the left without being swapped. */
5666 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5669 first_bit = MIN (lr_bitpos, rr_bitpos);
5670 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5671 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5672 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5674 if (rnmode == VOIDmode)
5677 rnbitsize = GET_MODE_BITSIZE (rnmode);
5678 rnbitpos = first_bit & ~ (rnbitsize - 1);
5679 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5680 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5682 if (BYTES_BIG_ENDIAN)
5684 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5685 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5688 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5690 size_int (xlr_bitpos));
5691 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5693 size_int (xrr_bitpos));
5695 /* Make a mask that corresponds to both fields being compared.
5696 Do this for both items being compared. If the operands are the
5697 same size and the bits being compared are in the same position
5698 then we can do this by masking both and comparing the masked
5700 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5701 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5702 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5704 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5705 ll_unsignedp || rl_unsignedp);
5706 if (! all_ones_mask_p (ll_mask, lnbitsize))
5707 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5709 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5710 lr_unsignedp || rr_unsignedp);
5711 if (! all_ones_mask_p (lr_mask, rnbitsize))
5712 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5714 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5717 /* There is still another way we can do something: If both pairs of
5718 fields being compared are adjacent, we may be able to make a wider
5719 field containing them both.
5721 Note that we still must mask the lhs/rhs expressions. Furthermore,
5722 the mask must be shifted to account for the shift done by
5723 make_bit_field_ref. */
5724 if ((ll_bitsize + ll_bitpos == rl_bitpos
5725 && lr_bitsize + lr_bitpos == rr_bitpos)
5726 || (ll_bitpos == rl_bitpos + rl_bitsize
5727 && lr_bitpos == rr_bitpos + rr_bitsize))
5731 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5732 ll_bitsize + rl_bitsize,
5733 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5734 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5735 lr_bitsize + rr_bitsize,
5736 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5738 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5739 size_int (MIN (xll_bitpos, xrl_bitpos)));
5740 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5741 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5743 /* Convert to the smaller type before masking out unwanted bits. */
5745 if (lntype != rntype)
5747 if (lnbitsize > rnbitsize)
5749 lhs = fold_convert_loc (loc, rntype, lhs);
5750 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5753 else if (lnbitsize < rnbitsize)
5755 rhs = fold_convert_loc (loc, lntype, rhs);
5756 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5761 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5762 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5764 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5765 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5767 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5773 /* Handle the case of comparisons with constants. If there is something in
5774 common between the masks, those bits of the constants must be the same.
5775 If not, the condition is always false. Test for this to avoid generating
5776 incorrect code below. */
5777 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5778 if (! integer_zerop (result)
5779 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5780 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5782 if (wanted_code == NE_EXPR)
5784 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5785 return constant_boolean_node (true, truth_type);
5789 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5790 return constant_boolean_node (false, truth_type);
5794 /* Construct the expression we will return. First get the component
5795 reference we will make. Unless the mask is all ones the width of
5796 that field, perform the mask operation. Then compare with the
5798 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5799 ll_unsignedp || rl_unsignedp);
5801 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5802 if (! all_ones_mask_p (ll_mask, lnbitsize))
5803 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5805 return build2_loc (loc, wanted_code, truth_type, result,
5806 const_binop (BIT_IOR_EXPR, l_const, r_const));
5809 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5813 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5817 enum tree_code op_code;
5820 int consts_equal, consts_lt;
5823 STRIP_SIGN_NOPS (arg0);
5825 op_code = TREE_CODE (arg0);
5826 minmax_const = TREE_OPERAND (arg0, 1);
5827 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5828 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5829 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5830 inner = TREE_OPERAND (arg0, 0);
5832 /* If something does not permit us to optimize, return the original tree. */
5833 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5834 || TREE_CODE (comp_const) != INTEGER_CST
5835 || TREE_OVERFLOW (comp_const)
5836 || TREE_CODE (minmax_const) != INTEGER_CST
5837 || TREE_OVERFLOW (minmax_const))
5840 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5841 and GT_EXPR, doing the rest with recursive calls using logical
5845 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5848 = optimize_minmax_comparison (loc,
5849 invert_tree_comparison (code, false),
5852 return invert_truthvalue_loc (loc, tem);
5858 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5859 optimize_minmax_comparison
5860 (loc, EQ_EXPR, type, arg0, comp_const),
5861 optimize_minmax_comparison
5862 (loc, GT_EXPR, type, arg0, comp_const));
5865 if (op_code == MAX_EXPR && consts_equal)
5866 /* MAX (X, 0) == 0 -> X <= 0 */
5867 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5869 else if (op_code == MAX_EXPR && consts_lt)
5870 /* MAX (X, 0) == 5 -> X == 5 */
5871 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5873 else if (op_code == MAX_EXPR)
5874 /* MAX (X, 0) == -1 -> false */
5875 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5877 else if (consts_equal)
5878 /* MIN (X, 0) == 0 -> X >= 0 */
5879 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5882 /* MIN (X, 0) == 5 -> false */
5883 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5886 /* MIN (X, 0) == -1 -> X == -1 */
5887 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5890 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5891 /* MAX (X, 0) > 0 -> X > 0
5892 MAX (X, 0) > 5 -> X > 5 */
5893 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5895 else if (op_code == MAX_EXPR)
5896 /* MAX (X, 0) > -1 -> true */
5897 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5899 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5900 /* MIN (X, 0) > 0 -> false
5901 MIN (X, 0) > 5 -> false */
5902 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5905 /* MIN (X, 0) > -1 -> X > -1 */
5906 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5913 /* T is an integer expression that is being multiplied, divided, or taken a
5914 modulus (CODE says which and what kind of divide or modulus) by a
5915 constant C. See if we can eliminate that operation by folding it with
5916 other operations already in T. WIDE_TYPE, if non-null, is a type that
5917 should be used for the computation if wider than our type.
5919 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5920 (X * 2) + (Y * 4). We must, however, be assured that either the original
5921 expression would not overflow or that overflow is undefined for the type
5922 in the language in question.
5924 If we return a non-null expression, it is an equivalent form of the
5925 original computation, but need not be in the original type.
5927 We set *STRICT_OVERFLOW_P to true if the return values depends on
5928 signed overflow being undefined. Otherwise we do not change
5929 *STRICT_OVERFLOW_P. */
5932 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5933 bool *strict_overflow_p)
5935 /* To avoid exponential search depth, refuse to allow recursion past
5936 three levels. Beyond that (1) it's highly unlikely that we'll find
5937 something interesting and (2) we've probably processed it before
5938 when we built the inner expression. */
5947 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5954 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5955 bool *strict_overflow_p)
5957 tree type = TREE_TYPE (t);
5958 enum tree_code tcode = TREE_CODE (t);
5959 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5960 > GET_MODE_SIZE (TYPE_MODE (type)))
5961 ? wide_type : type);
5963 int same_p = tcode == code;
5964 tree op0 = NULL_TREE, op1 = NULL_TREE;
5965 bool sub_strict_overflow_p;
5967 /* Don't deal with constants of zero here; they confuse the code below. */
5968 if (integer_zerop (c))
5971 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5972 op0 = TREE_OPERAND (t, 0);
5974 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5975 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5977 /* Note that we need not handle conditional operations here since fold
5978 already handles those cases. So just do arithmetic here. */
5982 /* For a constant, we can always simplify if we are a multiply
5983 or (for divide and modulus) if it is a multiple of our constant. */
5984 if (code == MULT_EXPR
5985 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5986 return const_binop (code, fold_convert (ctype, t),
5987 fold_convert (ctype, c));
5990 CASE_CONVERT: case NON_LVALUE_EXPR:
5991 /* If op0 is an expression ... */
5992 if ((COMPARISON_CLASS_P (op0)
5993 || UNARY_CLASS_P (op0)
5994 || BINARY_CLASS_P (op0)
5995 || VL_EXP_CLASS_P (op0)
5996 || EXPRESSION_CLASS_P (op0))
5997 /* ... and has wrapping overflow, and its type is smaller
5998 than ctype, then we cannot pass through as widening. */
5999 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6000 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6001 && (TYPE_PRECISION (ctype)
6002 > TYPE_PRECISION (TREE_TYPE (op0))))
6003 /* ... or this is a truncation (t is narrower than op0),
6004 then we cannot pass through this narrowing. */
6005 || (TYPE_PRECISION (type)
6006 < TYPE_PRECISION (TREE_TYPE (op0)))
6007 /* ... or signedness changes for division or modulus,
6008 then we cannot pass through this conversion. */
6009 || (code != MULT_EXPR
6010 && (TYPE_UNSIGNED (ctype)
6011 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6012 /* ... or has undefined overflow while the converted to
6013 type has not, we cannot do the operation in the inner type
6014 as that would introduce undefined overflow. */
6015 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6016 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6017 && !TYPE_OVERFLOW_UNDEFINED (type))))
6020 /* Pass the constant down and see if we can make a simplification. If
6021 we can, replace this expression with the inner simplification for
6022 possible later conversion to our or some other type. */
6023 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6024 && TREE_CODE (t2) == INTEGER_CST
6025 && !TREE_OVERFLOW (t2)
6026 && (0 != (t1 = extract_muldiv (op0, t2, code,
6028 ? ctype : NULL_TREE,
6029 strict_overflow_p))))
6034 /* If widening the type changes it from signed to unsigned, then we
6035 must avoid building ABS_EXPR itself as unsigned. */
6036 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6038 tree cstype = (*signed_type_for) (ctype);
6039 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6042 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6043 return fold_convert (ctype, t1);
6047 /* If the constant is negative, we cannot simplify this. */
6048 if (tree_int_cst_sgn (c) == -1)
6052 /* For division and modulus, type can't be unsigned, as e.g.
6053 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6054 For signed types, even with wrapping overflow, this is fine. */
6055 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6057 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6059 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6062 case MIN_EXPR: case MAX_EXPR:
6063 /* If widening the type changes the signedness, then we can't perform
6064 this optimization as that changes the result. */
6065 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6068 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6069 sub_strict_overflow_p = false;
6070 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6071 &sub_strict_overflow_p)) != 0
6072 && (t2 = extract_muldiv (op1, c, code, wide_type,
6073 &sub_strict_overflow_p)) != 0)
6075 if (tree_int_cst_sgn (c) < 0)
6076 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6077 if (sub_strict_overflow_p)
6078 *strict_overflow_p = true;
6079 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6080 fold_convert (ctype, t2));
6084 case LSHIFT_EXPR: case RSHIFT_EXPR:
6085 /* If the second operand is constant, this is a multiplication
6086 or floor division, by a power of two, so we can treat it that
6087 way unless the multiplier or divisor overflows. Signed
6088 left-shift overflow is implementation-defined rather than
6089 undefined in C90, so do not convert signed left shift into
6091 if (TREE_CODE (op1) == INTEGER_CST
6092 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6093 /* const_binop may not detect overflow correctly,
6094 so check for it explicitly here. */
6095 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6096 && 0 != (t1 = fold_convert (ctype,
6097 const_binop (LSHIFT_EXPR,
6100 && !TREE_OVERFLOW (t1))
6101 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6102 ? MULT_EXPR : FLOOR_DIV_EXPR,
6104 fold_convert (ctype, op0),
6106 c, code, wide_type, strict_overflow_p);
6109 case PLUS_EXPR: case MINUS_EXPR:
6110 /* See if we can eliminate the operation on both sides. If we can, we
6111 can return a new PLUS or MINUS. If we can't, the only remaining
6112 cases where we can do anything are if the second operand is a
6114 sub_strict_overflow_p = false;
6115 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6116 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6117 if (t1 != 0 && t2 != 0
6118 && (code == MULT_EXPR
6119 /* If not multiplication, we can only do this if both operands
6120 are divisible by c. */
6121 || (multiple_of_p (ctype, op0, c)
6122 && multiple_of_p (ctype, op1, c))))
6124 if (sub_strict_overflow_p)
6125 *strict_overflow_p = true;
6126 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6127 fold_convert (ctype, t2));
6130 /* If this was a subtraction, negate OP1 and set it to be an addition.
6131 This simplifies the logic below. */
6132 if (tcode == MINUS_EXPR)
6134 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6135 /* If OP1 was not easily negatable, the constant may be OP0. */
6136 if (TREE_CODE (op0) == INTEGER_CST)
6147 if (TREE_CODE (op1) != INTEGER_CST)
6150 /* If either OP1 or C are negative, this optimization is not safe for
6151 some of the division and remainder types while for others we need
6152 to change the code. */
6153 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6155 if (code == CEIL_DIV_EXPR)
6156 code = FLOOR_DIV_EXPR;
6157 else if (code == FLOOR_DIV_EXPR)
6158 code = CEIL_DIV_EXPR;
6159 else if (code != MULT_EXPR
6160 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6164 /* If it's a multiply or a division/modulus operation of a multiple
6165 of our constant, do the operation and verify it doesn't overflow. */
6166 if (code == MULT_EXPR
6167 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6169 op1 = const_binop (code, fold_convert (ctype, op1),
6170 fold_convert (ctype, c));
6171 /* We allow the constant to overflow with wrapping semantics. */
6173 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6179 /* If we have an unsigned type, we cannot widen the operation since it
6180 will change the result if the original computation overflowed. */
6181 if (TYPE_UNSIGNED (ctype) && ctype != type)
6184 /* If we were able to eliminate our operation from the first side,
6185 apply our operation to the second side and reform the PLUS. */
6186 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6187 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6189 /* The last case is if we are a multiply. In that case, we can
6190 apply the distributive law to commute the multiply and addition
6191 if the multiplication of the constants doesn't overflow
6192 and overflow is defined. With undefined overflow
6193 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6194 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6195 return fold_build2 (tcode, ctype,
6196 fold_build2 (code, ctype,
6197 fold_convert (ctype, op0),
6198 fold_convert (ctype, c)),
6204 /* We have a special case here if we are doing something like
6205 (C * 8) % 4 since we know that's zero. */
6206 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6207 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6208 /* If the multiplication can overflow we cannot optimize this. */
6209 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6210 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6211 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6213 *strict_overflow_p = true;
6214 return omit_one_operand (type, integer_zero_node, op0);
6217 /* ... fall through ... */
6219 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6220 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6221 /* If we can extract our operation from the LHS, do so and return a
6222 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6223 do something only if the second operand is a constant. */
6225 && (t1 = extract_muldiv (op0, c, code, wide_type,
6226 strict_overflow_p)) != 0)
6227 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6228 fold_convert (ctype, op1));
6229 else if (tcode == MULT_EXPR && code == MULT_EXPR
6230 && (t1 = extract_muldiv (op1, c, code, wide_type,
6231 strict_overflow_p)) != 0)
6232 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6233 fold_convert (ctype, t1));
6234 else if (TREE_CODE (op1) != INTEGER_CST)
6237 /* If these are the same operation types, we can associate them
6238 assuming no overflow. */
6241 bool overflow_p = false;
6242 bool overflow_mul_p;
6243 signop sign = TYPE_SIGN (ctype);
6244 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6245 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6247 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6251 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6252 TYPE_SIGN (TREE_TYPE (op1)));
6253 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6254 wide_int_to_tree (ctype, mul));
6258 /* If these operations "cancel" each other, we have the main
6259 optimizations of this pass, which occur when either constant is a
6260 multiple of the other, in which case we replace this with either an
6261 operation or CODE or TCODE.
6263 If we have an unsigned type, we cannot do this since it will change
6264 the result if the original computation overflowed. */
6265 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6266 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6267 || (tcode == MULT_EXPR
6268 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6269 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6270 && code != MULT_EXPR)))
6272 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6274 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6275 *strict_overflow_p = true;
6276 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6277 fold_convert (ctype,
6278 const_binop (TRUNC_DIV_EXPR,
6281 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6283 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6284 *strict_overflow_p = true;
6285 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6286 fold_convert (ctype,
6287 const_binop (TRUNC_DIV_EXPR,
6300 /* Return a node which has the indicated constant VALUE (either 0 or
6301 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6302 and is of the indicated TYPE. */
6305 constant_boolean_node (bool value, tree type)
6307 if (type == integer_type_node)
6308 return value ? integer_one_node : integer_zero_node;
6309 else if (type == boolean_type_node)
6310 return value ? boolean_true_node : boolean_false_node;
6311 else if (TREE_CODE (type) == VECTOR_TYPE)
6312 return build_vector_from_val (type,
6313 build_int_cst (TREE_TYPE (type),
6316 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6320 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6321 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6322 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6323 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6324 COND is the first argument to CODE; otherwise (as in the example
6325 given here), it is the second argument. TYPE is the type of the
6326 original expression. Return NULL_TREE if no simplification is
6330 fold_binary_op_with_conditional_arg (location_t loc,
6331 enum tree_code code,
6332 tree type, tree op0, tree op1,
6333 tree cond, tree arg, int cond_first_p)
6335 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6336 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6337 tree test, true_value, false_value;
6338 tree lhs = NULL_TREE;
6339 tree rhs = NULL_TREE;
6340 enum tree_code cond_code = COND_EXPR;
6342 if (TREE_CODE (cond) == COND_EXPR
6343 || TREE_CODE (cond) == VEC_COND_EXPR)
6345 test = TREE_OPERAND (cond, 0);
6346 true_value = TREE_OPERAND (cond, 1);
6347 false_value = TREE_OPERAND (cond, 2);
6348 /* If this operand throws an expression, then it does not make
6349 sense to try to perform a logical or arithmetic operation
6351 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6353 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6358 tree testtype = TREE_TYPE (cond);
6360 true_value = constant_boolean_node (true, testtype);
6361 false_value = constant_boolean_node (false, testtype);
6364 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6365 cond_code = VEC_COND_EXPR;
6367 /* This transformation is only worthwhile if we don't have to wrap ARG
6368 in a SAVE_EXPR and the operation can be simplified without recursing
6369 on at least one of the branches once its pushed inside the COND_EXPR. */
6370 if (!TREE_CONSTANT (arg)
6371 && (TREE_SIDE_EFFECTS (arg)
6372 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6373 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6376 arg = fold_convert_loc (loc, arg_type, arg);
6379 true_value = fold_convert_loc (loc, cond_type, true_value);
6381 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6383 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6387 false_value = fold_convert_loc (loc, cond_type, false_value);
6389 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6391 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6394 /* Check that we have simplified at least one of the branches. */
6395 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6398 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6402 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6404 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6405 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6406 ADDEND is the same as X.
6408 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6409 and finite. The problematic cases are when X is zero, and its mode
6410 has signed zeros. In the case of rounding towards -infinity,
6411 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6412 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6415 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6417 if (!real_zerop (addend))
6420 /* Don't allow the fold with -fsignaling-nans. */
6421 if (HONOR_SNANS (element_mode (type)))
6424 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6425 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6428 /* In a vector or complex, we would need to check the sign of all zeros. */
6429 if (TREE_CODE (addend) != REAL_CST)
6432 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6433 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6436 /* The mode has signed zeros, and we have to honor their sign.
6437 In this situation, there is only one case we can return true for.
6438 X - 0 is the same as X unless rounding towards -infinity is
6440 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6443 /* Subroutine of fold() that checks comparisons of built-in math
6444 functions against real constants.
6446 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6447 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6448 is the type of the result and ARG0 and ARG1 are the operands of the
6449 comparison. ARG1 must be a TREE_REAL_CST.
6451 The function returns the constant folded tree if a simplification
6452 can be made, and NULL_TREE otherwise. */
6455 fold_mathfn_compare (location_t loc,
6456 enum built_in_function fcode, enum tree_code code,
6457 tree type, tree arg0, tree arg1)
6461 if (BUILTIN_SQRT_P (fcode))
6463 tree arg = CALL_EXPR_ARG (arg0, 0);
6464 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6466 c = TREE_REAL_CST (arg1);
6467 if (REAL_VALUE_NEGATIVE (c))
6469 /* sqrt(x) < y is always false, if y is negative. */
6470 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6471 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6473 /* sqrt(x) > y is always true, if y is negative and we
6474 don't care about NaNs, i.e. negative values of x. */
6475 if (code == NE_EXPR || !HONOR_NANS (mode))
6476 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6478 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6479 return fold_build2_loc (loc, GE_EXPR, type, arg,
6480 build_real (TREE_TYPE (arg), dconst0));
6482 else if (code == GT_EXPR || code == GE_EXPR)
6486 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6487 real_convert (&c2, mode, &c2);
6489 if (REAL_VALUE_ISINF (c2))
6491 /* sqrt(x) > y is x == +Inf, when y is very large. */
6492 if (HONOR_INFINITIES (mode))
6493 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6494 build_real (TREE_TYPE (arg), c2));
6496 /* sqrt(x) > y is always false, when y is very large
6497 and we don't care about infinities. */
6498 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6501 /* sqrt(x) > c is the same as x > c*c. */
6502 return fold_build2_loc (loc, code, type, arg,
6503 build_real (TREE_TYPE (arg), c2));
6505 else if (code == LT_EXPR || code == LE_EXPR)
6509 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6510 real_convert (&c2, mode, &c2);
6512 if (REAL_VALUE_ISINF (c2))
6514 /* sqrt(x) < y is always true, when y is a very large
6515 value and we don't care about NaNs or Infinities. */
6516 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6517 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6519 /* sqrt(x) < y is x != +Inf when y is very large and we
6520 don't care about NaNs. */
6521 if (! HONOR_NANS (mode))
6522 return fold_build2_loc (loc, NE_EXPR, type, arg,
6523 build_real (TREE_TYPE (arg), c2));
6525 /* sqrt(x) < y is x >= 0 when y is very large and we
6526 don't care about Infinities. */
6527 if (! HONOR_INFINITIES (mode))
6528 return fold_build2_loc (loc, GE_EXPR, type, arg,
6529 build_real (TREE_TYPE (arg), dconst0));
6531 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6532 arg = save_expr (arg);
6533 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6534 fold_build2_loc (loc, GE_EXPR, type, arg,
6535 build_real (TREE_TYPE (arg),
6537 fold_build2_loc (loc, NE_EXPR, type, arg,
6538 build_real (TREE_TYPE (arg),
6542 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6543 if (! HONOR_NANS (mode))
6544 return fold_build2_loc (loc, code, type, arg,
6545 build_real (TREE_TYPE (arg), c2));
6547 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6548 arg = save_expr (arg);
6549 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6550 fold_build2_loc (loc, GE_EXPR, type, arg,
6551 build_real (TREE_TYPE (arg),
6553 fold_build2_loc (loc, code, type, arg,
6554 build_real (TREE_TYPE (arg),
6562 /* Subroutine of fold() that optimizes comparisons against Infinities,
6563 either +Inf or -Inf.
6565 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6566 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6567 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6569 The function returns the constant folded tree if a simplification
6570 can be made, and NULL_TREE otherwise. */
6573 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6574 tree arg0, tree arg1)
6577 REAL_VALUE_TYPE max;
6581 mode = TYPE_MODE (TREE_TYPE (arg0));
6583 /* For negative infinity swap the sense of the comparison. */
6584 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6586 code = swap_tree_comparison (code);
6591 /* x > +Inf is always false, if with ignore sNANs. */
6592 if (HONOR_SNANS (mode))
6594 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6597 /* x <= +Inf is always true, if we don't case about NaNs. */
6598 if (! HONOR_NANS (mode))
6599 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6601 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6602 arg0 = save_expr (arg0);
6603 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6607 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6608 real_maxval (&max, neg, mode);
6609 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6610 arg0, build_real (TREE_TYPE (arg0), max));
6613 /* x < +Inf is always equal to x <= DBL_MAX. */
6614 real_maxval (&max, neg, mode);
6615 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6616 arg0, build_real (TREE_TYPE (arg0), max));
6619 /* x != +Inf is always equal to !(x > DBL_MAX). */
6620 real_maxval (&max, neg, mode);
6621 if (! HONOR_NANS (mode))
6622 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6623 arg0, build_real (TREE_TYPE (arg0), max));
6625 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6626 arg0, build_real (TREE_TYPE (arg0), max));
6627 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6636 /* Subroutine of fold() that optimizes comparisons of a division by
6637 a nonzero integer constant against an integer constant, i.e.
6640 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6641 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6642 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6644 The function returns the constant folded tree if a simplification
6645 can be made, and NULL_TREE otherwise. */
6648 fold_div_compare (location_t loc,
6649 enum tree_code code, tree type, tree arg0, tree arg1)
6651 tree prod, tmp, hi, lo;
6652 tree arg00 = TREE_OPERAND (arg0, 0);
6653 tree arg01 = TREE_OPERAND (arg0, 1);
6654 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6655 bool neg_overflow = false;
6658 /* We have to do this the hard way to detect unsigned overflow.
6659 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6660 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6661 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6662 neg_overflow = false;
6664 if (sign == UNSIGNED)
6666 tmp = int_const_binop (MINUS_EXPR, arg01,
6667 build_int_cst (TREE_TYPE (arg01), 1));
6670 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6671 val = wi::add (prod, tmp, sign, &overflow);
6672 hi = force_fit_type (TREE_TYPE (arg00), val,
6673 -1, overflow | TREE_OVERFLOW (prod));
6675 else if (tree_int_cst_sgn (arg01) >= 0)
6677 tmp = int_const_binop (MINUS_EXPR, arg01,
6678 build_int_cst (TREE_TYPE (arg01), 1));
6679 switch (tree_int_cst_sgn (arg1))
6682 neg_overflow = true;
6683 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6688 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6693 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6703 /* A negative divisor reverses the relational operators. */
6704 code = swap_tree_comparison (code);
6706 tmp = int_const_binop (PLUS_EXPR, arg01,
6707 build_int_cst (TREE_TYPE (arg01), 1));
6708 switch (tree_int_cst_sgn (arg1))
6711 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6716 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6721 neg_overflow = true;
6722 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6734 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6735 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6736 if (TREE_OVERFLOW (hi))
6737 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6738 if (TREE_OVERFLOW (lo))
6739 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6740 return build_range_check (loc, type, arg00, 1, lo, hi);
6743 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6744 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6745 if (TREE_OVERFLOW (hi))
6746 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6747 if (TREE_OVERFLOW (lo))
6748 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6749 return build_range_check (loc, type, arg00, 0, lo, hi);
6752 if (TREE_OVERFLOW (lo))
6754 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6755 return omit_one_operand_loc (loc, type, tmp, arg00);
6757 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6760 if (TREE_OVERFLOW (hi))
6762 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6763 return omit_one_operand_loc (loc, type, tmp, arg00);
6765 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6768 if (TREE_OVERFLOW (hi))
6770 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6771 return omit_one_operand_loc (loc, type, tmp, arg00);
6773 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6776 if (TREE_OVERFLOW (lo))
6778 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6779 return omit_one_operand_loc (loc, type, tmp, arg00);
6781 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6791 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6792 equality/inequality test, then return a simplified form of the test
6793 using a sign testing. Otherwise return NULL. TYPE is the desired
6797 fold_single_bit_test_into_sign_test (location_t loc,
6798 enum tree_code code, tree arg0, tree arg1,
6801 /* If this is testing a single bit, we can optimize the test. */
6802 if ((code == NE_EXPR || code == EQ_EXPR)
6803 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6804 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6806 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6807 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6808 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6810 if (arg00 != NULL_TREE
6811 /* This is only a win if casting to a signed type is cheap,
6812 i.e. when arg00's type is not a partial mode. */
6813 && TYPE_PRECISION (TREE_TYPE (arg00))
6814 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6816 tree stype = signed_type_for (TREE_TYPE (arg00));
6817 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6819 fold_convert_loc (loc, stype, arg00),
6820 build_int_cst (stype, 0));
6827 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6828 equality/inequality test, then return a simplified form of
6829 the test using shifts and logical operations. Otherwise return
6830 NULL. TYPE is the desired result type. */
6833 fold_single_bit_test (location_t loc, enum tree_code code,
6834 tree arg0, tree arg1, tree result_type)
6836 /* If this is testing a single bit, we can optimize the test. */
6837 if ((code == NE_EXPR || code == EQ_EXPR)
6838 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6839 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6841 tree inner = TREE_OPERAND (arg0, 0);
6842 tree type = TREE_TYPE (arg0);
6843 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6844 machine_mode operand_mode = TYPE_MODE (type);
6846 tree signed_type, unsigned_type, intermediate_type;
6849 /* First, see if we can fold the single bit test into a sign-bit
6851 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6856 /* Otherwise we have (A & C) != 0 where C is a single bit,
6857 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6858 Similarly for (A & C) == 0. */
6860 /* If INNER is a right shift of a constant and it plus BITNUM does
6861 not overflow, adjust BITNUM and INNER. */
6862 if (TREE_CODE (inner) == RSHIFT_EXPR
6863 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6864 && bitnum < TYPE_PRECISION (type)
6865 && wi::ltu_p (TREE_OPERAND (inner, 1),
6866 TYPE_PRECISION (type) - bitnum))
6868 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6869 inner = TREE_OPERAND (inner, 0);
6872 /* If we are going to be able to omit the AND below, we must do our
6873 operations as unsigned. If we must use the AND, we have a choice.
6874 Normally unsigned is faster, but for some machines signed is. */
6875 #ifdef LOAD_EXTEND_OP
6876 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6877 && !flag_syntax_only) ? 0 : 1;
6882 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6883 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6884 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6885 inner = fold_convert_loc (loc, intermediate_type, inner);
6888 inner = build2 (RSHIFT_EXPR, intermediate_type,
6889 inner, size_int (bitnum));
6891 one = build_int_cst (intermediate_type, 1);
6893 if (code == EQ_EXPR)
6894 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6896 /* Put the AND last so it can combine with more things. */
6897 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6899 /* Make sure to return the proper type. */
6900 inner = fold_convert_loc (loc, result_type, inner);
6907 /* Check whether we are allowed to reorder operands arg0 and arg1,
6908 such that the evaluation of arg1 occurs before arg0. */
6911 reorder_operands_p (const_tree arg0, const_tree arg1)
6913 if (! flag_evaluation_order)
6915 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6917 return ! TREE_SIDE_EFFECTS (arg0)
6918 && ! TREE_SIDE_EFFECTS (arg1);
6921 /* Test whether it is preferable two swap two operands, ARG0 and
6922 ARG1, for example because ARG0 is an integer constant and ARG1
6923 isn't. If REORDER is true, only recommend swapping if we can
6924 evaluate the operands in reverse order. */
6927 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6929 if (CONSTANT_CLASS_P (arg1))
6931 if (CONSTANT_CLASS_P (arg0))
6937 if (TREE_CONSTANT (arg1))
6939 if (TREE_CONSTANT (arg0))
6942 if (reorder && flag_evaluation_order
6943 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6946 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6947 for commutative and comparison operators. Ensuring a canonical
6948 form allows the optimizers to find additional redundancies without
6949 having to explicitly check for both orderings. */
6950 if (TREE_CODE (arg0) == SSA_NAME
6951 && TREE_CODE (arg1) == SSA_NAME
6952 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6955 /* Put SSA_NAMEs last. */
6956 if (TREE_CODE (arg1) == SSA_NAME)
6958 if (TREE_CODE (arg0) == SSA_NAME)
6961 /* Put variables last. */
6970 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6971 ARG0 is extended to a wider type. */
6974 fold_widened_comparison (location_t loc, enum tree_code code,
6975 tree type, tree arg0, tree arg1)
6977 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6979 tree shorter_type, outer_type;
6983 if (arg0_unw == arg0)
6985 shorter_type = TREE_TYPE (arg0_unw);
6987 #ifdef HAVE_canonicalize_funcptr_for_compare
6988 /* Disable this optimization if we're casting a function pointer
6989 type on targets that require function pointer canonicalization. */
6990 if (HAVE_canonicalize_funcptr_for_compare
6991 && TREE_CODE (shorter_type) == POINTER_TYPE
6992 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6996 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6999 arg1_unw = get_unwidened (arg1, NULL_TREE);
7001 /* If possible, express the comparison in the shorter mode. */
7002 if ((code == EQ_EXPR || code == NE_EXPR
7003 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7004 && (TREE_TYPE (arg1_unw) == shorter_type
7005 || ((TYPE_PRECISION (shorter_type)
7006 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7007 && (TYPE_UNSIGNED (shorter_type)
7008 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7009 || (TREE_CODE (arg1_unw) == INTEGER_CST
7010 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7011 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7012 && int_fits_type_p (arg1_unw, shorter_type))))
7013 return fold_build2_loc (loc, code, type, arg0_unw,
7014 fold_convert_loc (loc, shorter_type, arg1_unw));
7016 if (TREE_CODE (arg1_unw) != INTEGER_CST
7017 || TREE_CODE (shorter_type) != INTEGER_TYPE
7018 || !int_fits_type_p (arg1_unw, shorter_type))
7021 /* If we are comparing with the integer that does not fit into the range
7022 of the shorter type, the result is known. */
7023 outer_type = TREE_TYPE (arg1_unw);
7024 min = lower_bound_in_type (outer_type, shorter_type);
7025 max = upper_bound_in_type (outer_type, shorter_type);
7027 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7029 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7036 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7041 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7047 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7049 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7054 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7056 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7065 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7066 ARG0 just the signedness is changed. */
7069 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7070 tree arg0, tree arg1)
7073 tree inner_type, outer_type;
7075 if (!CONVERT_EXPR_P (arg0))
7078 outer_type = TREE_TYPE (arg0);
7079 arg0_inner = TREE_OPERAND (arg0, 0);
7080 inner_type = TREE_TYPE (arg0_inner);
7082 #ifdef HAVE_canonicalize_funcptr_for_compare
7083 /* Disable this optimization if we're casting a function pointer
7084 type on targets that require function pointer canonicalization. */
7085 if (HAVE_canonicalize_funcptr_for_compare
7086 && TREE_CODE (inner_type) == POINTER_TYPE
7087 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7091 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7094 if (TREE_CODE (arg1) != INTEGER_CST
7095 && !(CONVERT_EXPR_P (arg1)
7096 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7099 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7104 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7107 if (TREE_CODE (arg1) == INTEGER_CST)
7108 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7109 TREE_OVERFLOW (arg1));
7111 arg1 = fold_convert_loc (loc, inner_type, arg1);
7113 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7117 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7118 means A >= Y && A != MAX, but in this case we know that
7119 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7122 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7124 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7126 if (TREE_CODE (bound) == LT_EXPR)
7127 a = TREE_OPERAND (bound, 0);
7128 else if (TREE_CODE (bound) == GT_EXPR)
7129 a = TREE_OPERAND (bound, 1);
7133 typea = TREE_TYPE (a);
7134 if (!INTEGRAL_TYPE_P (typea)
7135 && !POINTER_TYPE_P (typea))
7138 if (TREE_CODE (ineq) == LT_EXPR)
7140 a1 = TREE_OPERAND (ineq, 1);
7141 y = TREE_OPERAND (ineq, 0);
7143 else if (TREE_CODE (ineq) == GT_EXPR)
7145 a1 = TREE_OPERAND (ineq, 0);
7146 y = TREE_OPERAND (ineq, 1);
7151 if (TREE_TYPE (a1) != typea)
7154 if (POINTER_TYPE_P (typea))
7156 /* Convert the pointer types into integer before taking the difference. */
7157 tree ta = fold_convert_loc (loc, ssizetype, a);
7158 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7159 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7162 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7164 if (!diff || !integer_onep (diff))
7167 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7170 /* Fold a sum or difference of at least one multiplication.
7171 Returns the folded tree or NULL if no simplification could be made. */
7174 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7175 tree arg0, tree arg1)
7177 tree arg00, arg01, arg10, arg11;
7178 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7180 /* (A * C) +- (B * C) -> (A+-B) * C.
7181 (A * C) +- A -> A * (C+-1).
7182 We are most concerned about the case where C is a constant,
7183 but other combinations show up during loop reduction. Since
7184 it is not difficult, try all four possibilities. */
7186 if (TREE_CODE (arg0) == MULT_EXPR)
7188 arg00 = TREE_OPERAND (arg0, 0);
7189 arg01 = TREE_OPERAND (arg0, 1);
7191 else if (TREE_CODE (arg0) == INTEGER_CST)
7193 arg00 = build_one_cst (type);
7198 /* We cannot generate constant 1 for fract. */
7199 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7202 arg01 = build_one_cst (type);
7204 if (TREE_CODE (arg1) == MULT_EXPR)
7206 arg10 = TREE_OPERAND (arg1, 0);
7207 arg11 = TREE_OPERAND (arg1, 1);
7209 else if (TREE_CODE (arg1) == INTEGER_CST)
7211 arg10 = build_one_cst (type);
7212 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7213 the purpose of this canonicalization. */
7214 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7215 && negate_expr_p (arg1)
7216 && code == PLUS_EXPR)
7218 arg11 = negate_expr (arg1);
7226 /* We cannot generate constant 1 for fract. */
7227 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7230 arg11 = build_one_cst (type);
7234 if (operand_equal_p (arg01, arg11, 0))
7235 same = arg01, alt0 = arg00, alt1 = arg10;
7236 else if (operand_equal_p (arg00, arg10, 0))
7237 same = arg00, alt0 = arg01, alt1 = arg11;
7238 else if (operand_equal_p (arg00, arg11, 0))
7239 same = arg00, alt0 = arg01, alt1 = arg10;
7240 else if (operand_equal_p (arg01, arg10, 0))
7241 same = arg01, alt0 = arg00, alt1 = arg11;
7243 /* No identical multiplicands; see if we can find a common
7244 power-of-two factor in non-power-of-two multiplies. This
7245 can help in multi-dimensional array access. */
7246 else if (tree_fits_shwi_p (arg01)
7247 && tree_fits_shwi_p (arg11))
7249 HOST_WIDE_INT int01, int11, tmp;
7252 int01 = tree_to_shwi (arg01);
7253 int11 = tree_to_shwi (arg11);
7255 /* Move min of absolute values to int11. */
7256 if (absu_hwi (int01) < absu_hwi (int11))
7258 tmp = int01, int01 = int11, int11 = tmp;
7259 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7266 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7267 /* The remainder should not be a constant, otherwise we
7268 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7269 increased the number of multiplications necessary. */
7270 && TREE_CODE (arg10) != INTEGER_CST)
7272 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7273 build_int_cst (TREE_TYPE (arg00),
7278 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7283 return fold_build2_loc (loc, MULT_EXPR, type,
7284 fold_build2_loc (loc, code, type,
7285 fold_convert_loc (loc, type, alt0),
7286 fold_convert_loc (loc, type, alt1)),
7287 fold_convert_loc (loc, type, same));
7292 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7293 specified by EXPR into the buffer PTR of length LEN bytes.
7294 Return the number of bytes placed in the buffer, or zero
7298 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7300 tree type = TREE_TYPE (expr);
7301 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7302 int byte, offset, word, words;
7303 unsigned char value;
7305 if ((off == -1 && total_bytes > len)
7306 || off >= total_bytes)
7310 words = total_bytes / UNITS_PER_WORD;
7312 for (byte = 0; byte < total_bytes; byte++)
7314 int bitpos = byte * BITS_PER_UNIT;
7315 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7317 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7319 if (total_bytes > UNITS_PER_WORD)
7321 word = byte / UNITS_PER_WORD;
7322 if (WORDS_BIG_ENDIAN)
7323 word = (words - 1) - word;
7324 offset = word * UNITS_PER_WORD;
7325 if (BYTES_BIG_ENDIAN)
7326 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7328 offset += byte % UNITS_PER_WORD;
7331 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7333 && offset - off < len)
7334 ptr[offset - off] = value;
7336 return MIN (len, total_bytes - off);
7340 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7341 specified by EXPR into the buffer PTR of length LEN bytes.
7342 Return the number of bytes placed in the buffer, or zero
7346 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7348 tree type = TREE_TYPE (expr);
7349 machine_mode mode = TYPE_MODE (type);
7350 int total_bytes = GET_MODE_SIZE (mode);
7351 FIXED_VALUE_TYPE value;
7352 tree i_value, i_type;
7354 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7357 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7359 if (NULL_TREE == i_type
7360 || TYPE_PRECISION (i_type) != total_bytes)
7363 value = TREE_FIXED_CST (expr);
7364 i_value = double_int_to_tree (i_type, value.data);
7366 return native_encode_int (i_value, ptr, len, off);
7370 /* Subroutine of native_encode_expr. Encode the REAL_CST
7371 specified by EXPR into the buffer PTR of length LEN bytes.
7372 Return the number of bytes placed in the buffer, or zero
7376 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7378 tree type = TREE_TYPE (expr);
7379 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7380 int byte, offset, word, words, bitpos;
7381 unsigned char value;
7383 /* There are always 32 bits in each long, no matter the size of
7384 the hosts long. We handle floating point representations with
7388 if ((off == -1 && total_bytes > len)
7389 || off >= total_bytes)
7393 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7395 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7397 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7398 bitpos += BITS_PER_UNIT)
7400 byte = (bitpos / BITS_PER_UNIT) & 3;
7401 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7403 if (UNITS_PER_WORD < 4)
7405 word = byte / UNITS_PER_WORD;
7406 if (WORDS_BIG_ENDIAN)
7407 word = (words - 1) - word;
7408 offset = word * UNITS_PER_WORD;
7409 if (BYTES_BIG_ENDIAN)
7410 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7412 offset += byte % UNITS_PER_WORD;
7415 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7416 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7418 && offset - off < len)
7419 ptr[offset - off] = value;
7421 return MIN (len, total_bytes - off);
7424 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7425 specified by EXPR into the buffer PTR of length LEN bytes.
7426 Return the number of bytes placed in the buffer, or zero
7430 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7435 part = TREE_REALPART (expr);
7436 rsize = native_encode_expr (part, ptr, len, off);
7440 part = TREE_IMAGPART (expr);
7442 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7443 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7447 return rsize + isize;
7451 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7452 specified by EXPR into the buffer PTR of length LEN bytes.
7453 Return the number of bytes placed in the buffer, or zero
7457 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7464 count = VECTOR_CST_NELTS (expr);
7465 itype = TREE_TYPE (TREE_TYPE (expr));
7466 size = GET_MODE_SIZE (TYPE_MODE (itype));
7467 for (i = 0; i < count; i++)
7474 elem = VECTOR_CST_ELT (expr, i);
7475 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7476 if ((off == -1 && res != size)
7489 /* Subroutine of native_encode_expr. Encode the STRING_CST
7490 specified by EXPR into the buffer PTR of length LEN bytes.
7491 Return the number of bytes placed in the buffer, or zero
7495 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7497 tree type = TREE_TYPE (expr);
7498 HOST_WIDE_INT total_bytes;
7500 if (TREE_CODE (type) != ARRAY_TYPE
7501 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7502 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7503 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7505 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7506 if ((off == -1 && total_bytes > len)
7507 || off >= total_bytes)
7511 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7514 if (off < TREE_STRING_LENGTH (expr))
7516 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7517 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7519 memset (ptr + written, 0,
7520 MIN (total_bytes - written, len - written));
7523 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7524 return MIN (total_bytes - off, len);
7528 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7529 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7530 buffer PTR of length LEN bytes. If OFF is not -1 then start
7531 the encoding at byte offset OFF and encode at most LEN bytes.
7532 Return the number of bytes placed in the buffer, or zero upon failure. */
7535 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7537 /* We don't support starting at negative offset and -1 is special. */
7541 switch (TREE_CODE (expr))
7544 return native_encode_int (expr, ptr, len, off);
7547 return native_encode_real (expr, ptr, len, off);
7550 return native_encode_fixed (expr, ptr, len, off);
7553 return native_encode_complex (expr, ptr, len, off);
7556 return native_encode_vector (expr, ptr, len, off);
7559 return native_encode_string (expr, ptr, len, off);
7567 /* Subroutine of native_interpret_expr. Interpret the contents of
7568 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7569 If the buffer cannot be interpreted, return NULL_TREE. */
7572 native_interpret_int (tree type, const unsigned char *ptr, int len)
7574 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7576 if (total_bytes > len
7577 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7580 wide_int result = wi::from_buffer (ptr, total_bytes);
7582 return wide_int_to_tree (type, result);
7586 /* Subroutine of native_interpret_expr. Interpret the contents of
7587 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7588 If the buffer cannot be interpreted, return NULL_TREE. */
7591 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7593 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7595 FIXED_VALUE_TYPE fixed_value;
7597 if (total_bytes > len
7598 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7601 result = double_int::from_buffer (ptr, total_bytes);
7602 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7604 return build_fixed (type, fixed_value);
7608 /* Subroutine of native_interpret_expr. Interpret the contents of
7609 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7610 If the buffer cannot be interpreted, return NULL_TREE. */
7613 native_interpret_real (tree type, const unsigned char *ptr, int len)
7615 machine_mode mode = TYPE_MODE (type);
7616 int total_bytes = GET_MODE_SIZE (mode);
7617 int byte, offset, word, words, bitpos;
7618 unsigned char value;
7619 /* There are always 32 bits in each long, no matter the size of
7620 the hosts long. We handle floating point representations with
7625 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7626 if (total_bytes > len || total_bytes > 24)
7628 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7630 memset (tmp, 0, sizeof (tmp));
7631 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7632 bitpos += BITS_PER_UNIT)
7634 byte = (bitpos / BITS_PER_UNIT) & 3;
7635 if (UNITS_PER_WORD < 4)
7637 word = byte / UNITS_PER_WORD;
7638 if (WORDS_BIG_ENDIAN)
7639 word = (words - 1) - word;
7640 offset = word * UNITS_PER_WORD;
7641 if (BYTES_BIG_ENDIAN)
7642 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7644 offset += byte % UNITS_PER_WORD;
7647 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7648 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7650 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7653 real_from_target (&r, tmp, mode);
7654 return build_real (type, r);
7658 /* Subroutine of native_interpret_expr. Interpret the contents of
7659 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7660 If the buffer cannot be interpreted, return NULL_TREE. */
7663 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7665 tree etype, rpart, ipart;
7668 etype = TREE_TYPE (type);
7669 size = GET_MODE_SIZE (TYPE_MODE (etype));
7672 rpart = native_interpret_expr (etype, ptr, size);
7675 ipart = native_interpret_expr (etype, ptr+size, size);
7678 return build_complex (type, rpart, ipart);
7682 /* Subroutine of native_interpret_expr. Interpret the contents of
7683 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7684 If the buffer cannot be interpreted, return NULL_TREE. */
7687 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7693 etype = TREE_TYPE (type);
7694 size = GET_MODE_SIZE (TYPE_MODE (etype));
7695 count = TYPE_VECTOR_SUBPARTS (type);
7696 if (size * count > len)
7699 elements = XALLOCAVEC (tree, count);
7700 for (i = count - 1; i >= 0; i--)
7702 elem = native_interpret_expr (etype, ptr+(i*size), size);
7707 return build_vector (type, elements);
7711 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7712 the buffer PTR of length LEN as a constant of type TYPE. For
7713 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7714 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7715 return NULL_TREE. */
7718 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7720 switch (TREE_CODE (type))
7726 case REFERENCE_TYPE:
7727 return native_interpret_int (type, ptr, len);
7730 return native_interpret_real (type, ptr, len);
7732 case FIXED_POINT_TYPE:
7733 return native_interpret_fixed (type, ptr, len);
7736 return native_interpret_complex (type, ptr, len);
7739 return native_interpret_vector (type, ptr, len);
7746 /* Returns true if we can interpret the contents of a native encoding
7750 can_native_interpret_type_p (tree type)
7752 switch (TREE_CODE (type))
7758 case REFERENCE_TYPE:
7759 case FIXED_POINT_TYPE:
7769 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7770 TYPE at compile-time. If we're unable to perform the conversion
7771 return NULL_TREE. */
7774 fold_view_convert_expr (tree type, tree expr)
7776 /* We support up to 512-bit values (for V8DFmode). */
7777 unsigned char buffer[64];
7780 /* Check that the host and target are sane. */
7781 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7784 len = native_encode_expr (expr, buffer, sizeof (buffer));
7788 return native_interpret_expr (type, buffer, len);
7791 /* Build an expression for the address of T. Folds away INDIRECT_REF
7792 to avoid confusing the gimplify process. */
7795 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7797 /* The size of the object is not relevant when talking about its address. */
7798 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7799 t = TREE_OPERAND (t, 0);
7801 if (TREE_CODE (t) == INDIRECT_REF)
7803 t = TREE_OPERAND (t, 0);
7805 if (TREE_TYPE (t) != ptrtype)
7806 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7808 else if (TREE_CODE (t) == MEM_REF
7809 && integer_zerop (TREE_OPERAND (t, 1)))
7810 return TREE_OPERAND (t, 0);
7811 else if (TREE_CODE (t) == MEM_REF
7812 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7813 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7814 TREE_OPERAND (t, 0),
7815 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7816 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7818 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7820 if (TREE_TYPE (t) != ptrtype)
7821 t = fold_convert_loc (loc, ptrtype, t);
7824 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7829 /* Build an expression for the address of T. */
7832 build_fold_addr_expr_loc (location_t loc, tree t)
7834 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7836 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7839 /* Fold a unary expression of code CODE and type TYPE with operand
7840 OP0. Return the folded expression if folding is successful.
7841 Otherwise, return NULL_TREE. */
7844 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7848 enum tree_code_class kind = TREE_CODE_CLASS (code);
7850 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7851 && TREE_CODE_LENGTH (code) == 1);
7856 if (CONVERT_EXPR_CODE_P (code)
7857 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7859 /* Don't use STRIP_NOPS, because signedness of argument type
7861 STRIP_SIGN_NOPS (arg0);
7865 /* Strip any conversions that don't change the mode. This
7866 is safe for every expression, except for a comparison
7867 expression because its signedness is derived from its
7870 Note that this is done as an internal manipulation within
7871 the constant folder, in order to find the simplest
7872 representation of the arguments so that their form can be
7873 studied. In any cases, the appropriate type conversions
7874 should be put back in the tree that will get out of the
7879 if (CONSTANT_CLASS_P (arg0))
7881 tree tem = const_unop (code, type, arg0);
7884 if (TREE_TYPE (tem) != type)
7885 tem = fold_convert_loc (loc, type, tem);
7891 tem = generic_simplify (loc, code, type, op0);
7895 if (TREE_CODE_CLASS (code) == tcc_unary)
7897 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7898 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7899 fold_build1_loc (loc, code, type,
7900 fold_convert_loc (loc, TREE_TYPE (op0),
7901 TREE_OPERAND (arg0, 1))));
7902 else if (TREE_CODE (arg0) == COND_EXPR)
7904 tree arg01 = TREE_OPERAND (arg0, 1);
7905 tree arg02 = TREE_OPERAND (arg0, 2);
7906 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7907 arg01 = fold_build1_loc (loc, code, type,
7908 fold_convert_loc (loc,
7909 TREE_TYPE (op0), arg01));
7910 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7911 arg02 = fold_build1_loc (loc, code, type,
7912 fold_convert_loc (loc,
7913 TREE_TYPE (op0), arg02));
7914 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7917 /* If this was a conversion, and all we did was to move into
7918 inside the COND_EXPR, bring it back out. But leave it if
7919 it is a conversion from integer to integer and the
7920 result precision is no wider than a word since such a
7921 conversion is cheap and may be optimized away by combine,
7922 while it couldn't if it were outside the COND_EXPR. Then return
7923 so we don't get into an infinite recursion loop taking the
7924 conversion out and then back in. */
7926 if ((CONVERT_EXPR_CODE_P (code)
7927 || code == NON_LVALUE_EXPR)
7928 && TREE_CODE (tem) == COND_EXPR
7929 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7930 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7931 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7932 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7933 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7934 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7935 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7937 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7938 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7939 || flag_syntax_only))
7940 tem = build1_loc (loc, code, type,
7942 TREE_TYPE (TREE_OPERAND
7943 (TREE_OPERAND (tem, 1), 0)),
7944 TREE_OPERAND (tem, 0),
7945 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7946 TREE_OPERAND (TREE_OPERAND (tem, 2),
7954 case NON_LVALUE_EXPR:
7955 if (!maybe_lvalue_p (op0))
7956 return fold_convert_loc (loc, type, op0);
7961 case FIX_TRUNC_EXPR:
7962 if (COMPARISON_CLASS_P (op0))
7964 /* If we have (type) (a CMP b) and type is an integral type, return
7965 new expression involving the new type. Canonicalize
7966 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7968 Do not fold the result as that would not simplify further, also
7969 folding again results in recursions. */
7970 if (TREE_CODE (type) == BOOLEAN_TYPE)
7971 return build2_loc (loc, TREE_CODE (op0), type,
7972 TREE_OPERAND (op0, 0),
7973 TREE_OPERAND (op0, 1));
7974 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7975 && TREE_CODE (type) != VECTOR_TYPE)
7976 return build3_loc (loc, COND_EXPR, type, op0,
7977 constant_boolean_node (true, type),
7978 constant_boolean_node (false, type));
7981 /* Handle (T *)&A.B.C for A being of type T and B and C
7982 living at offset zero. This occurs frequently in
7983 C++ upcasting and then accessing the base. */
7984 if (TREE_CODE (op0) == ADDR_EXPR
7985 && POINTER_TYPE_P (type)
7986 && handled_component_p (TREE_OPERAND (op0, 0)))
7988 HOST_WIDE_INT bitsize, bitpos;
7991 int unsignedp, volatilep;
7992 tree base = TREE_OPERAND (op0, 0);
7993 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7994 &mode, &unsignedp, &volatilep, false);
7995 /* If the reference was to a (constant) zero offset, we can use
7996 the address of the base if it has the same base type
7997 as the result type and the pointer type is unqualified. */
7998 if (! offset && bitpos == 0
7999 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8000 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8001 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8002 return fold_convert_loc (loc, type,
8003 build_fold_addr_expr_loc (loc, base));
8006 if (TREE_CODE (op0) == MODIFY_EXPR
8007 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8008 /* Detect assigning a bitfield. */
8009 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8011 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8013 /* Don't leave an assignment inside a conversion
8014 unless assigning a bitfield. */
8015 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8016 /* First do the assignment, then return converted constant. */
8017 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8018 TREE_NO_WARNING (tem) = 1;
8019 TREE_USED (tem) = 1;
8023 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8024 constants (if x has signed type, the sign bit cannot be set
8025 in c). This folds extension into the BIT_AND_EXPR.
8026 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8027 very likely don't have maximal range for their precision and this
8028 transformation effectively doesn't preserve non-maximal ranges. */
8029 if (TREE_CODE (type) == INTEGER_TYPE
8030 && TREE_CODE (op0) == BIT_AND_EXPR
8031 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8033 tree and_expr = op0;
8034 tree and0 = TREE_OPERAND (and_expr, 0);
8035 tree and1 = TREE_OPERAND (and_expr, 1);
8038 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8039 || (TYPE_PRECISION (type)
8040 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8042 else if (TYPE_PRECISION (TREE_TYPE (and1))
8043 <= HOST_BITS_PER_WIDE_INT
8044 && tree_fits_uhwi_p (and1))
8046 unsigned HOST_WIDE_INT cst;
8048 cst = tree_to_uhwi (and1);
8049 cst &= HOST_WIDE_INT_M1U
8050 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8051 change = (cst == 0);
8052 #ifdef LOAD_EXTEND_OP
8054 && !flag_syntax_only
8055 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8058 tree uns = unsigned_type_for (TREE_TYPE (and0));
8059 and0 = fold_convert_loc (loc, uns, and0);
8060 and1 = fold_convert_loc (loc, uns, and1);
8066 tem = force_fit_type (type, wi::to_widest (and1), 0,
8067 TREE_OVERFLOW (and1));
8068 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8069 fold_convert_loc (loc, type, and0), tem);
8073 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8074 when one of the new casts will fold away. Conservatively we assume
8075 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8076 if (POINTER_TYPE_P (type)
8077 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8078 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8079 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8080 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8081 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8083 tree arg00 = TREE_OPERAND (arg0, 0);
8084 tree arg01 = TREE_OPERAND (arg0, 1);
8086 return fold_build_pointer_plus_loc
8087 (loc, fold_convert_loc (loc, type, arg00), arg01);
8090 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8091 of the same precision, and X is an integer type not narrower than
8092 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8093 if (INTEGRAL_TYPE_P (type)
8094 && TREE_CODE (op0) == BIT_NOT_EXPR
8095 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8096 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8097 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8099 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8100 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8101 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8102 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8103 fold_convert_loc (loc, type, tem));
8106 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8107 type of X and Y (integer types only). */
8108 if (INTEGRAL_TYPE_P (type)
8109 && TREE_CODE (op0) == MULT_EXPR
8110 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8111 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8113 /* Be careful not to introduce new overflows. */
8115 if (TYPE_OVERFLOW_WRAPS (type))
8118 mult_type = unsigned_type_for (type);
8120 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8122 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8123 fold_convert_loc (loc, mult_type,
8124 TREE_OPERAND (op0, 0)),
8125 fold_convert_loc (loc, mult_type,
8126 TREE_OPERAND (op0, 1)));
8127 return fold_convert_loc (loc, type, tem);
8133 case VIEW_CONVERT_EXPR:
8134 if (TREE_CODE (op0) == MEM_REF)
8135 return fold_build2_loc (loc, MEM_REF, type,
8136 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8141 tem = fold_negate_expr (loc, arg0);
8143 return fold_convert_loc (loc, type, tem);
8147 /* Convert fabs((double)float) into (double)fabsf(float). */
8148 if (TREE_CODE (arg0) == NOP_EXPR
8149 && TREE_CODE (type) == REAL_TYPE)
8151 tree targ0 = strip_float_extensions (arg0);
8153 return fold_convert_loc (loc, type,
8154 fold_build1_loc (loc, ABS_EXPR,
8158 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8159 else if (TREE_CODE (arg0) == ABS_EXPR)
8162 /* Strip sign ops from argument. */
8163 if (TREE_CODE (type) == REAL_TYPE)
8165 tem = fold_strip_sign_ops (arg0);
8167 return fold_build1_loc (loc, ABS_EXPR, type,
8168 fold_convert_loc (loc, type, tem));
8173 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8174 return fold_convert_loc (loc, type, arg0);
8175 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8177 tree itype = TREE_TYPE (type);
8178 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8179 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8180 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8181 negate_expr (ipart));
8183 if (TREE_CODE (arg0) == CONJ_EXPR)
8184 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8188 /* Convert ~ (-A) to A - 1. */
8189 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8190 return fold_build2_loc (loc, MINUS_EXPR, type,
8191 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8192 build_int_cst (type, 1));
8193 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8194 else if (INTEGRAL_TYPE_P (type)
8195 && ((TREE_CODE (arg0) == MINUS_EXPR
8196 && integer_onep (TREE_OPERAND (arg0, 1)))
8197 || (TREE_CODE (arg0) == PLUS_EXPR
8198 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8200 /* Perform the negation in ARG0's type and only then convert
8201 to TYPE as to avoid introducing undefined behavior. */
8202 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8203 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8204 TREE_OPERAND (arg0, 0));
8205 return fold_convert_loc (loc, type, t);
8207 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8208 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8209 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8210 fold_convert_loc (loc, type,
8211 TREE_OPERAND (arg0, 0)))))
8212 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8213 fold_convert_loc (loc, type,
8214 TREE_OPERAND (arg0, 1)));
8215 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8216 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8217 fold_convert_loc (loc, type,
8218 TREE_OPERAND (arg0, 1)))))
8219 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8220 fold_convert_loc (loc, type,
8221 TREE_OPERAND (arg0, 0)), tem);
8225 case TRUTH_NOT_EXPR:
8226 /* Note that the operand of this must be an int
8227 and its values must be 0 or 1.
8228 ("true" is a fixed value perhaps depending on the language,
8229 but we don't handle values other than 1 correctly yet.) */
8230 tem = fold_truth_not_expr (loc, arg0);
8233 return fold_convert_loc (loc, type, tem);
8236 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8237 return fold_convert_loc (loc, type, arg0);
8238 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8240 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8241 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8242 fold_build1_loc (loc, REALPART_EXPR, itype,
8243 TREE_OPERAND (arg0, 0)),
8244 fold_build1_loc (loc, REALPART_EXPR, itype,
8245 TREE_OPERAND (arg0, 1)));
8246 return fold_convert_loc (loc, type, tem);
8248 if (TREE_CODE (arg0) == CONJ_EXPR)
8250 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8251 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8252 TREE_OPERAND (arg0, 0));
8253 return fold_convert_loc (loc, type, tem);
8255 if (TREE_CODE (arg0) == CALL_EXPR)
8257 tree fn = get_callee_fndecl (arg0);
8258 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8259 switch (DECL_FUNCTION_CODE (fn))
8261 CASE_FLT_FN (BUILT_IN_CEXPI):
8262 fn = mathfn_built_in (type, BUILT_IN_COS);
8264 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8274 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8275 return build_zero_cst (type);
8276 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8278 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8279 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8280 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8281 TREE_OPERAND (arg0, 0)),
8282 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8283 TREE_OPERAND (arg0, 1)));
8284 return fold_convert_loc (loc, type, tem);
8286 if (TREE_CODE (arg0) == CONJ_EXPR)
8288 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8289 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8290 return fold_convert_loc (loc, type, negate_expr (tem));
8292 if (TREE_CODE (arg0) == CALL_EXPR)
8294 tree fn = get_callee_fndecl (arg0);
8295 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8296 switch (DECL_FUNCTION_CODE (fn))
8298 CASE_FLT_FN (BUILT_IN_CEXPI):
8299 fn = mathfn_built_in (type, BUILT_IN_SIN);
8301 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8311 /* Fold *&X to X if X is an lvalue. */
8312 if (TREE_CODE (op0) == ADDR_EXPR)
8314 tree op00 = TREE_OPERAND (op0, 0);
8315 if ((TREE_CODE (op00) == VAR_DECL
8316 || TREE_CODE (op00) == PARM_DECL
8317 || TREE_CODE (op00) == RESULT_DECL)
8318 && !TREE_READONLY (op00))
8325 } /* switch (code) */
8329 /* If the operation was a conversion do _not_ mark a resulting constant
8330 with TREE_OVERFLOW if the original constant was not. These conversions
8331 have implementation defined behavior and retaining the TREE_OVERFLOW
8332 flag here would confuse later passes such as VRP. */
8334 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8335 tree type, tree op0)
8337 tree res = fold_unary_loc (loc, code, type, op0);
8339 && TREE_CODE (res) == INTEGER_CST
8340 && TREE_CODE (op0) == INTEGER_CST
8341 && CONVERT_EXPR_CODE_P (code))
8342 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8347 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8348 operands OP0 and OP1. LOC is the location of the resulting expression.
8349 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8350 Return the folded expression if folding is successful. Otherwise,
8351 return NULL_TREE. */
8353 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8354 tree arg0, tree arg1, tree op0, tree op1)
8358 /* We only do these simplifications if we are optimizing. */
8362 /* Check for things like (A || B) && (A || C). We can convert this
8363 to A || (B && C). Note that either operator can be any of the four
8364 truth and/or operations and the transformation will still be
8365 valid. Also note that we only care about order for the
8366 ANDIF and ORIF operators. If B contains side effects, this
8367 might change the truth-value of A. */
8368 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8369 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8370 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8371 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8372 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8373 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8375 tree a00 = TREE_OPERAND (arg0, 0);
8376 tree a01 = TREE_OPERAND (arg0, 1);
8377 tree a10 = TREE_OPERAND (arg1, 0);
8378 tree a11 = TREE_OPERAND (arg1, 1);
8379 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8380 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8381 && (code == TRUTH_AND_EXPR
8382 || code == TRUTH_OR_EXPR));
8384 if (operand_equal_p (a00, a10, 0))
8385 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8386 fold_build2_loc (loc, code, type, a01, a11));
8387 else if (commutative && operand_equal_p (a00, a11, 0))
8388 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8389 fold_build2_loc (loc, code, type, a01, a10));
8390 else if (commutative && operand_equal_p (a01, a10, 0))
8391 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8392 fold_build2_loc (loc, code, type, a00, a11));
8394 /* This case if tricky because we must either have commutative
8395 operators or else A10 must not have side-effects. */
8397 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8398 && operand_equal_p (a01, a11, 0))
8399 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8400 fold_build2_loc (loc, code, type, a00, a10),
8404 /* See if we can build a range comparison. */
8405 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8408 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8409 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8411 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8413 return fold_build2_loc (loc, code, type, tem, arg1);
8416 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8417 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8419 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8421 return fold_build2_loc (loc, code, type, arg0, tem);
8424 /* Check for the possibility of merging component references. If our
8425 lhs is another similar operation, try to merge its rhs with our
8426 rhs. Then try to merge our lhs and rhs. */
8427 if (TREE_CODE (arg0) == code
8428 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8429 TREE_OPERAND (arg0, 1), arg1)))
8430 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8432 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8435 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8436 && (code == TRUTH_AND_EXPR
8437 || code == TRUTH_ANDIF_EXPR
8438 || code == TRUTH_OR_EXPR
8439 || code == TRUTH_ORIF_EXPR))
8441 enum tree_code ncode, icode;
8443 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8444 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8445 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8447 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8448 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8449 We don't want to pack more than two leafs to a non-IF AND/OR
8451 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8452 equal to IF-CODE, then we don't want to add right-hand operand.
8453 If the inner right-hand side of left-hand operand has
8454 side-effects, or isn't simple, then we can't add to it,
8455 as otherwise we might destroy if-sequence. */
8456 if (TREE_CODE (arg0) == icode
8457 && simple_operand_p_2 (arg1)
8458 /* Needed for sequence points to handle trappings, and
8460 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8462 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8464 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8467 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8468 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8469 else if (TREE_CODE (arg1) == icode
8470 && simple_operand_p_2 (arg0)
8471 /* Needed for sequence points to handle trappings, and
8473 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8475 tem = fold_build2_loc (loc, ncode, type,
8476 arg0, TREE_OPERAND (arg1, 0));
8477 return fold_build2_loc (loc, icode, type, tem,
8478 TREE_OPERAND (arg1, 1));
8480 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8482 For sequence point consistancy, we need to check for trapping,
8483 and side-effects. */
8484 else if (code == icode && simple_operand_p_2 (arg0)
8485 && simple_operand_p_2 (arg1))
8486 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8492 /* Fold a binary expression of code CODE and type TYPE with operands
8493 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8494 Return the folded expression if folding is successful. Otherwise,
8495 return NULL_TREE. */
8498 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8500 enum tree_code compl_code;
8502 if (code == MIN_EXPR)
8503 compl_code = MAX_EXPR;
8504 else if (code == MAX_EXPR)
8505 compl_code = MIN_EXPR;
8509 /* MIN (MAX (a, b), b) == b. */
8510 if (TREE_CODE (op0) == compl_code
8511 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8512 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8514 /* MIN (MAX (b, a), b) == b. */
8515 if (TREE_CODE (op0) == compl_code
8516 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8517 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8518 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8520 /* MIN (a, MAX (a, b)) == a. */
8521 if (TREE_CODE (op1) == compl_code
8522 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8523 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8524 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8526 /* MIN (a, MAX (b, a)) == a. */
8527 if (TREE_CODE (op1) == compl_code
8528 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8529 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8530 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8535 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8536 by changing CODE to reduce the magnitude of constants involved in
8537 ARG0 of the comparison.
8538 Returns a canonicalized comparison tree if a simplification was
8539 possible, otherwise returns NULL_TREE.
8540 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8541 valid if signed overflow is undefined. */
8544 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8545 tree arg0, tree arg1,
8546 bool *strict_overflow_p)
8548 enum tree_code code0 = TREE_CODE (arg0);
8549 tree t, cst0 = NULL_TREE;
8553 /* Match A +- CST code arg1 and CST code arg1. We can change the
8554 first form only if overflow is undefined. */
8555 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8556 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8557 /* In principle pointers also have undefined overflow behavior,
8558 but that causes problems elsewhere. */
8559 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8560 && (code0 == MINUS_EXPR
8561 || code0 == PLUS_EXPR)
8562 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8563 || code0 == INTEGER_CST))
8566 /* Identify the constant in arg0 and its sign. */
8567 if (code0 == INTEGER_CST)
8570 cst0 = TREE_OPERAND (arg0, 1);
8571 sgn0 = tree_int_cst_sgn (cst0);
8573 /* Overflowed constants and zero will cause problems. */
8574 if (integer_zerop (cst0)
8575 || TREE_OVERFLOW (cst0))
8578 /* See if we can reduce the magnitude of the constant in
8579 arg0 by changing the comparison code. */
8580 if (code0 == INTEGER_CST)
8582 /* CST <= arg1 -> CST-1 < arg1. */
8583 if (code == LE_EXPR && sgn0 == 1)
8585 /* -CST < arg1 -> -CST-1 <= arg1. */
8586 else if (code == LT_EXPR && sgn0 == -1)
8588 /* CST > arg1 -> CST-1 >= arg1. */
8589 else if (code == GT_EXPR && sgn0 == 1)
8591 /* -CST >= arg1 -> -CST-1 > arg1. */
8592 else if (code == GE_EXPR && sgn0 == -1)
8596 /* arg1 code' CST' might be more canonical. */
8601 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8603 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8605 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8606 else if (code == GT_EXPR
8607 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8609 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8610 else if (code == LE_EXPR
8611 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8613 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8614 else if (code == GE_EXPR
8615 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8619 *strict_overflow_p = true;
8622 /* Now build the constant reduced in magnitude. But not if that
8623 would produce one outside of its types range. */
8624 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8626 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8627 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8629 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8630 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8631 /* We cannot swap the comparison here as that would cause us to
8632 endlessly recurse. */
8635 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8636 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8637 if (code0 != INTEGER_CST)
8638 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8639 t = fold_convert (TREE_TYPE (arg1), t);
8641 /* If swapping might yield to a more canonical form, do so. */
8643 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8645 return fold_build2_loc (loc, code, type, t, arg1);
8648 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8649 overflow further. Try to decrease the magnitude of constants involved
8650 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8651 and put sole constants at the second argument position.
8652 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8655 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8656 tree arg0, tree arg1)
8659 bool strict_overflow_p;
8660 const char * const warnmsg = G_("assuming signed overflow does not occur "
8661 "when reducing constant in comparison");
8663 /* Try canonicalization by simplifying arg0. */
8664 strict_overflow_p = false;
8665 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8666 &strict_overflow_p);
8669 if (strict_overflow_p)
8670 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8674 /* Try canonicalization by simplifying arg1 using the swapped
8676 code = swap_tree_comparison (code);
8677 strict_overflow_p = false;
8678 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8679 &strict_overflow_p);
8680 if (t && strict_overflow_p)
8681 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8685 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8686 space. This is used to avoid issuing overflow warnings for
8687 expressions like &p->x which can not wrap. */
8690 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8692 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8699 int precision = TYPE_PRECISION (TREE_TYPE (base));
8700 if (offset == NULL_TREE)
8701 wi_offset = wi::zero (precision);
8702 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8708 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8709 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8713 if (!wi::fits_uhwi_p (total))
8716 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8720 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8722 if (TREE_CODE (base) == ADDR_EXPR)
8724 HOST_WIDE_INT base_size;
8726 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8727 if (base_size > 0 && size < base_size)
8731 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8734 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8735 kind INTEGER_CST. This makes sure to properly sign-extend the
8738 static HOST_WIDE_INT
8739 size_low_cst (const_tree t)
8741 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8742 int prec = TYPE_PRECISION (TREE_TYPE (t));
8743 if (prec < HOST_BITS_PER_WIDE_INT)
8744 return sext_hwi (w, prec);
8748 /* Subroutine of fold_binary. This routine performs all of the
8749 transformations that are common to the equality/inequality
8750 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8751 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8752 fold_binary should call fold_binary. Fold a comparison with
8753 tree code CODE and type TYPE with operands OP0 and OP1. Return
8754 the folded comparison or NULL_TREE. */
8757 fold_comparison (location_t loc, enum tree_code code, tree type,
8760 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8761 tree arg0, arg1, tem;
8766 STRIP_SIGN_NOPS (arg0);
8767 STRIP_SIGN_NOPS (arg1);
8769 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8770 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8772 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8773 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8774 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8775 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8776 && TREE_CODE (arg1) == INTEGER_CST
8777 && !TREE_OVERFLOW (arg1))
8779 const enum tree_code
8780 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8781 tree const1 = TREE_OPERAND (arg0, 1);
8782 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8783 tree variable = TREE_OPERAND (arg0, 0);
8784 tree new_const = int_const_binop (reverse_op, const2, const1);
8786 /* If the constant operation overflowed this can be
8787 simplified as a comparison against INT_MAX/INT_MIN. */
8788 if (TREE_OVERFLOW (new_const)
8789 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8791 int const1_sgn = tree_int_cst_sgn (const1);
8792 enum tree_code code2 = code;
8794 /* Get the sign of the constant on the lhs if the
8795 operation were VARIABLE + CONST1. */
8796 if (TREE_CODE (arg0) == MINUS_EXPR)
8797 const1_sgn = -const1_sgn;
8799 /* The sign of the constant determines if we overflowed
8800 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8801 Canonicalize to the INT_MIN overflow by swapping the comparison
8803 if (const1_sgn == -1)
8804 code2 = swap_tree_comparison (code);
8806 /* We now can look at the canonicalized case
8807 VARIABLE + 1 CODE2 INT_MIN
8808 and decide on the result. */
8815 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8821 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8830 fold_overflow_warning ("assuming signed overflow does not occur "
8831 "when changing X +- C1 cmp C2 to "
8833 WARN_STRICT_OVERFLOW_COMPARISON);
8834 return fold_build2_loc (loc, code, type, variable, new_const);
8838 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8839 if (TREE_CODE (arg0) == MINUS_EXPR
8841 && integer_zerop (arg1))
8843 /* ??? The transformation is valid for the other operators if overflow
8844 is undefined for the type, but performing it here badly interacts
8845 with the transformation in fold_cond_expr_with_comparison which
8846 attempts to synthetize ABS_EXPR. */
8848 fold_overflow_warning ("assuming signed overflow does not occur "
8849 "when changing X - Y cmp 0 to X cmp Y",
8850 WARN_STRICT_OVERFLOW_COMPARISON);
8851 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8852 TREE_OPERAND (arg0, 1));
8855 /* For comparisons of pointers we can decompose it to a compile time
8856 comparison of the base objects and the offsets into the object.
8857 This requires at least one operand being an ADDR_EXPR or a
8858 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8859 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8860 && (TREE_CODE (arg0) == ADDR_EXPR
8861 || TREE_CODE (arg1) == ADDR_EXPR
8862 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8863 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8865 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8866 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8868 int volatilep, unsignedp;
8869 bool indirect_base0 = false, indirect_base1 = false;
8871 /* Get base and offset for the access. Strip ADDR_EXPR for
8872 get_inner_reference, but put it back by stripping INDIRECT_REF
8873 off the base object if possible. indirect_baseN will be true
8874 if baseN is not an address but refers to the object itself. */
8876 if (TREE_CODE (arg0) == ADDR_EXPR)
8878 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8879 &bitsize, &bitpos0, &offset0, &mode,
8880 &unsignedp, &volatilep, false);
8881 if (TREE_CODE (base0) == INDIRECT_REF)
8882 base0 = TREE_OPERAND (base0, 0);
8884 indirect_base0 = true;
8886 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8888 base0 = TREE_OPERAND (arg0, 0);
8889 STRIP_SIGN_NOPS (base0);
8890 if (TREE_CODE (base0) == ADDR_EXPR)
8892 base0 = TREE_OPERAND (base0, 0);
8893 indirect_base0 = true;
8895 offset0 = TREE_OPERAND (arg0, 1);
8896 if (tree_fits_shwi_p (offset0))
8898 HOST_WIDE_INT off = size_low_cst (offset0);
8899 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8901 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8903 bitpos0 = off * BITS_PER_UNIT;
8904 offset0 = NULL_TREE;
8910 if (TREE_CODE (arg1) == ADDR_EXPR)
8912 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8913 &bitsize, &bitpos1, &offset1, &mode,
8914 &unsignedp, &volatilep, false);
8915 if (TREE_CODE (base1) == INDIRECT_REF)
8916 base1 = TREE_OPERAND (base1, 0);
8918 indirect_base1 = true;
8920 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8922 base1 = TREE_OPERAND (arg1, 0);
8923 STRIP_SIGN_NOPS (base1);
8924 if (TREE_CODE (base1) == ADDR_EXPR)
8926 base1 = TREE_OPERAND (base1, 0);
8927 indirect_base1 = true;
8929 offset1 = TREE_OPERAND (arg1, 1);
8930 if (tree_fits_shwi_p (offset1))
8932 HOST_WIDE_INT off = size_low_cst (offset1);
8933 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8935 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8937 bitpos1 = off * BITS_PER_UNIT;
8938 offset1 = NULL_TREE;
8943 /* A local variable can never be pointed to by
8944 the default SSA name of an incoming parameter. */
8945 if ((TREE_CODE (arg0) == ADDR_EXPR
8947 && TREE_CODE (base0) == VAR_DECL
8948 && auto_var_in_fn_p (base0, current_function_decl)
8950 && TREE_CODE (base1) == SSA_NAME
8951 && SSA_NAME_IS_DEFAULT_DEF (base1)
8952 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8953 || (TREE_CODE (arg1) == ADDR_EXPR
8955 && TREE_CODE (base1) == VAR_DECL
8956 && auto_var_in_fn_p (base1, current_function_decl)
8958 && TREE_CODE (base0) == SSA_NAME
8959 && SSA_NAME_IS_DEFAULT_DEF (base0)
8960 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8962 if (code == NE_EXPR)
8963 return constant_boolean_node (1, type);
8964 else if (code == EQ_EXPR)
8965 return constant_boolean_node (0, type);
8967 /* If we have equivalent bases we might be able to simplify. */
8968 else if (indirect_base0 == indirect_base1
8969 && operand_equal_p (base0, base1, 0))
8971 /* We can fold this expression to a constant if the non-constant
8972 offset parts are equal. */
8973 if ((offset0 == offset1
8974 || (offset0 && offset1
8975 && operand_equal_p (offset0, offset1, 0)))
8978 || (indirect_base0 && DECL_P (base0))
8979 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8983 && bitpos0 != bitpos1
8984 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8985 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8986 fold_overflow_warning (("assuming pointer wraparound does not "
8987 "occur when comparing P +- C1 with "
8989 WARN_STRICT_OVERFLOW_CONDITIONAL);
8994 return constant_boolean_node (bitpos0 == bitpos1, type);
8996 return constant_boolean_node (bitpos0 != bitpos1, type);
8998 return constant_boolean_node (bitpos0 < bitpos1, type);
9000 return constant_boolean_node (bitpos0 <= bitpos1, type);
9002 return constant_boolean_node (bitpos0 >= bitpos1, type);
9004 return constant_boolean_node (bitpos0 > bitpos1, type);
9008 /* We can simplify the comparison to a comparison of the variable
9009 offset parts if the constant offset parts are equal.
9010 Be careful to use signed sizetype here because otherwise we
9011 mess with array offsets in the wrong way. This is possible
9012 because pointer arithmetic is restricted to retain within an
9013 object and overflow on pointer differences is undefined as of
9014 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9015 else if (bitpos0 == bitpos1
9017 || (indirect_base0 && DECL_P (base0))
9018 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9020 /* By converting to signed sizetype we cover middle-end pointer
9021 arithmetic which operates on unsigned pointer types of size
9022 type size and ARRAY_REF offsets which are properly sign or
9023 zero extended from their type in case it is narrower than
9025 if (offset0 == NULL_TREE)
9026 offset0 = build_int_cst (ssizetype, 0);
9028 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9029 if (offset1 == NULL_TREE)
9030 offset1 = build_int_cst (ssizetype, 0);
9032 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9035 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9036 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9037 fold_overflow_warning (("assuming pointer wraparound does not "
9038 "occur when comparing P +- C1 with "
9040 WARN_STRICT_OVERFLOW_COMPARISON);
9042 return fold_build2_loc (loc, code, type, offset0, offset1);
9045 /* For non-equal bases we can simplify if they are addresses
9046 declarations with different addresses. */
9047 else if (indirect_base0 && indirect_base1
9048 /* We know that !operand_equal_p (base0, base1, 0)
9049 because the if condition was false. But make
9050 sure two decls are not the same. */
9052 && TREE_CODE (arg0) == ADDR_EXPR
9053 && TREE_CODE (arg1) == ADDR_EXPR
9056 /* Watch for aliases. */
9057 && (!decl_in_symtab_p (base0)
9058 || !decl_in_symtab_p (base1)
9059 || !symtab_node::get_create (base0)->equal_address_to
9060 (symtab_node::get_create (base1))))
9062 if (code == EQ_EXPR)
9063 return omit_two_operands_loc (loc, type, boolean_false_node,
9065 else if (code == NE_EXPR)
9066 return omit_two_operands_loc (loc, type, boolean_true_node,
9069 /* For equal offsets we can simplify to a comparison of the
9071 else if (bitpos0 == bitpos1
9073 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9075 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9076 && ((offset0 == offset1)
9077 || (offset0 && offset1
9078 && operand_equal_p (offset0, offset1, 0))))
9081 base0 = build_fold_addr_expr_loc (loc, base0);
9083 base1 = build_fold_addr_expr_loc (loc, base1);
9084 return fold_build2_loc (loc, code, type, base0, base1);
9088 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9089 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9090 the resulting offset is smaller in absolute value than the
9091 original one and has the same sign. */
9092 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9093 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9094 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9095 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9096 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9097 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9098 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9099 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9101 tree const1 = TREE_OPERAND (arg0, 1);
9102 tree const2 = TREE_OPERAND (arg1, 1);
9103 tree variable1 = TREE_OPERAND (arg0, 0);
9104 tree variable2 = TREE_OPERAND (arg1, 0);
9106 const char * const warnmsg = G_("assuming signed overflow does not "
9107 "occur when combining constants around "
9110 /* Put the constant on the side where it doesn't overflow and is
9111 of lower absolute value and of same sign than before. */
9112 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9113 ? MINUS_EXPR : PLUS_EXPR,
9115 if (!TREE_OVERFLOW (cst)
9116 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9117 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9119 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9120 return fold_build2_loc (loc, code, type,
9122 fold_build2_loc (loc, TREE_CODE (arg1),
9127 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9128 ? MINUS_EXPR : PLUS_EXPR,
9130 if (!TREE_OVERFLOW (cst)
9131 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9132 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9134 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9135 return fold_build2_loc (loc, code, type,
9136 fold_build2_loc (loc, TREE_CODE (arg0),
9143 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9144 signed arithmetic case. That form is created by the compiler
9145 often enough for folding it to be of value. One example is in
9146 computing loop trip counts after Operator Strength Reduction. */
9147 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9148 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9149 && TREE_CODE (arg0) == MULT_EXPR
9150 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9151 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9152 && integer_zerop (arg1))
9154 tree const1 = TREE_OPERAND (arg0, 1);
9155 tree const2 = arg1; /* zero */
9156 tree variable1 = TREE_OPERAND (arg0, 0);
9157 enum tree_code cmp_code = code;
9159 /* Handle unfolded multiplication by zero. */
9160 if (integer_zerop (const1))
9161 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9163 fold_overflow_warning (("assuming signed overflow does not occur when "
9164 "eliminating multiplication in comparison "
9166 WARN_STRICT_OVERFLOW_COMPARISON);
9168 /* If const1 is negative we swap the sense of the comparison. */
9169 if (tree_int_cst_sgn (const1) < 0)
9170 cmp_code = swap_tree_comparison (cmp_code);
9172 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9175 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9179 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9181 tree targ0 = strip_float_extensions (arg0);
9182 tree targ1 = strip_float_extensions (arg1);
9183 tree newtype = TREE_TYPE (targ0);
9185 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9186 newtype = TREE_TYPE (targ1);
9188 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9189 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9190 return fold_build2_loc (loc, code, type,
9191 fold_convert_loc (loc, newtype, targ0),
9192 fold_convert_loc (loc, newtype, targ1));
9194 /* (-a) CMP (-b) -> b CMP a */
9195 if (TREE_CODE (arg0) == NEGATE_EXPR
9196 && TREE_CODE (arg1) == NEGATE_EXPR)
9197 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9198 TREE_OPERAND (arg0, 0));
9200 if (TREE_CODE (arg1) == REAL_CST)
9202 REAL_VALUE_TYPE cst;
9203 cst = TREE_REAL_CST (arg1);
9205 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9206 if (TREE_CODE (arg0) == NEGATE_EXPR)
9207 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9208 TREE_OPERAND (arg0, 0),
9209 build_real (TREE_TYPE (arg1),
9210 real_value_negate (&cst)));
9212 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9213 /* a CMP (-0) -> a CMP 0 */
9214 if (REAL_VALUE_MINUS_ZERO (cst))
9215 return fold_build2_loc (loc, code, type, arg0,
9216 build_real (TREE_TYPE (arg1), dconst0));
9218 /* x != NaN is always true, other ops are always false. */
9219 if (REAL_VALUE_ISNAN (cst)
9220 && ! HONOR_SNANS (arg1))
9222 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9223 return omit_one_operand_loc (loc, type, tem, arg0);
9226 /* Fold comparisons against infinity. */
9227 if (REAL_VALUE_ISINF (cst)
9228 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9230 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9231 if (tem != NULL_TREE)
9236 /* If this is a comparison of a real constant with a PLUS_EXPR
9237 or a MINUS_EXPR of a real constant, we can convert it into a
9238 comparison with a revised real constant as long as no overflow
9239 occurs when unsafe_math_optimizations are enabled. */
9240 if (flag_unsafe_math_optimizations
9241 && TREE_CODE (arg1) == REAL_CST
9242 && (TREE_CODE (arg0) == PLUS_EXPR
9243 || TREE_CODE (arg0) == MINUS_EXPR)
9244 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9245 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9246 ? MINUS_EXPR : PLUS_EXPR,
9247 arg1, TREE_OPERAND (arg0, 1)))
9248 && !TREE_OVERFLOW (tem))
9249 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9251 /* Likewise, we can simplify a comparison of a real constant with
9252 a MINUS_EXPR whose first operand is also a real constant, i.e.
9253 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9254 floating-point types only if -fassociative-math is set. */
9255 if (flag_associative_math
9256 && TREE_CODE (arg1) == REAL_CST
9257 && TREE_CODE (arg0) == MINUS_EXPR
9258 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9259 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9261 && !TREE_OVERFLOW (tem))
9262 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9263 TREE_OPERAND (arg0, 1), tem);
9265 /* Fold comparisons against built-in math functions. */
9266 if (TREE_CODE (arg1) == REAL_CST
9267 && flag_unsafe_math_optimizations
9268 && ! flag_errno_math)
9270 enum built_in_function fcode = builtin_mathfn_code (arg0);
9272 if (fcode != END_BUILTINS)
9274 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9275 if (tem != NULL_TREE)
9281 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9282 && CONVERT_EXPR_P (arg0))
9284 /* If we are widening one operand of an integer comparison,
9285 see if the other operand is similarly being widened. Perhaps we
9286 can do the comparison in the narrower type. */
9287 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9291 /* Or if we are changing signedness. */
9292 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9297 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9298 constant, we can simplify it. */
9299 if (TREE_CODE (arg1) == INTEGER_CST
9300 && (TREE_CODE (arg0) == MIN_EXPR
9301 || TREE_CODE (arg0) == MAX_EXPR)
9302 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9304 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9309 /* Simplify comparison of something with itself. (For IEEE
9310 floating-point, we can only do some of these simplifications.) */
9311 if (operand_equal_p (arg0, arg1, 0))
9316 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9317 || ! HONOR_NANS (arg0))
9318 return constant_boolean_node (1, type);
9323 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9324 || ! HONOR_NANS (arg0))
9325 return constant_boolean_node (1, type);
9326 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9329 /* For NE, we can only do this simplification if integer
9330 or we don't honor IEEE floating point NaNs. */
9331 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9332 && HONOR_NANS (arg0))
9334 /* ... fall through ... */
9337 return constant_boolean_node (0, type);
9343 /* If we are comparing an expression that just has comparisons
9344 of two integer values, arithmetic expressions of those comparisons,
9345 and constants, we can simplify it. There are only three cases
9346 to check: the two values can either be equal, the first can be
9347 greater, or the second can be greater. Fold the expression for
9348 those three values. Since each value must be 0 or 1, we have
9349 eight possibilities, each of which corresponds to the constant 0
9350 or 1 or one of the six possible comparisons.
9352 This handles common cases like (a > b) == 0 but also handles
9353 expressions like ((x > y) - (y > x)) > 0, which supposedly
9354 occur in macroized code. */
9356 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9358 tree cval1 = 0, cval2 = 0;
9361 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9362 /* Don't handle degenerate cases here; they should already
9363 have been handled anyway. */
9364 && cval1 != 0 && cval2 != 0
9365 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9366 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9367 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9368 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9369 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9370 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9371 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9373 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9374 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9376 /* We can't just pass T to eval_subst in case cval1 or cval2
9377 was the same as ARG1. */
9380 = fold_build2_loc (loc, code, type,
9381 eval_subst (loc, arg0, cval1, maxval,
9385 = fold_build2_loc (loc, code, type,
9386 eval_subst (loc, arg0, cval1, maxval,
9390 = fold_build2_loc (loc, code, type,
9391 eval_subst (loc, arg0, cval1, minval,
9395 /* All three of these results should be 0 or 1. Confirm they are.
9396 Then use those values to select the proper code to use. */
9398 if (TREE_CODE (high_result) == INTEGER_CST
9399 && TREE_CODE (equal_result) == INTEGER_CST
9400 && TREE_CODE (low_result) == INTEGER_CST)
9402 /* Make a 3-bit mask with the high-order bit being the
9403 value for `>', the next for '=', and the low for '<'. */
9404 switch ((integer_onep (high_result) * 4)
9405 + (integer_onep (equal_result) * 2)
9406 + integer_onep (low_result))
9410 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9431 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9436 tem = save_expr (build2 (code, type, cval1, cval2));
9437 SET_EXPR_LOCATION (tem, loc);
9440 return fold_build2_loc (loc, code, type, cval1, cval2);
9445 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9446 into a single range test. */
9447 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9448 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9449 && TREE_CODE (arg1) == INTEGER_CST
9450 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9451 && !integer_zerop (TREE_OPERAND (arg0, 1))
9452 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9453 && !TREE_OVERFLOW (arg1))
9455 tem = fold_div_compare (loc, code, type, arg0, arg1);
9456 if (tem != NULL_TREE)
9460 /* Fold ~X op ~Y as Y op X. */
9461 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9462 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9464 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9465 return fold_build2_loc (loc, code, type,
9466 fold_convert_loc (loc, cmp_type,
9467 TREE_OPERAND (arg1, 0)),
9468 TREE_OPERAND (arg0, 0));
9471 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9472 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9473 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9475 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9476 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9477 TREE_OPERAND (arg0, 0),
9478 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9479 fold_convert_loc (loc, cmp_type, arg1)));
9486 /* Subroutine of fold_binary. Optimize complex multiplications of the
9487 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9488 argument EXPR represents the expression "z" of type TYPE. */
9491 fold_mult_zconjz (location_t loc, tree type, tree expr)
9493 tree itype = TREE_TYPE (type);
9494 tree rpart, ipart, tem;
9496 if (TREE_CODE (expr) == COMPLEX_EXPR)
9498 rpart = TREE_OPERAND (expr, 0);
9499 ipart = TREE_OPERAND (expr, 1);
9501 else if (TREE_CODE (expr) == COMPLEX_CST)
9503 rpart = TREE_REALPART (expr);
9504 ipart = TREE_IMAGPART (expr);
9508 expr = save_expr (expr);
9509 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9510 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9513 rpart = save_expr (rpart);
9514 ipart = save_expr (ipart);
9515 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9516 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9517 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9518 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9519 build_zero_cst (itype));
9523 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9524 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9525 guarantees that P and N have the same least significant log2(M) bits.
9526 N is not otherwise constrained. In particular, N is not normalized to
9527 0 <= N < M as is common. In general, the precise value of P is unknown.
9528 M is chosen as large as possible such that constant N can be determined.
9530 Returns M and sets *RESIDUE to N.
9532 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9533 account. This is not always possible due to PR 35705.
9536 static unsigned HOST_WIDE_INT
9537 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9538 bool allow_func_align)
9540 enum tree_code code;
9544 code = TREE_CODE (expr);
9545 if (code == ADDR_EXPR)
9547 unsigned int bitalign;
9548 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9549 *residue /= BITS_PER_UNIT;
9550 return bitalign / BITS_PER_UNIT;
9552 else if (code == POINTER_PLUS_EXPR)
9555 unsigned HOST_WIDE_INT modulus;
9556 enum tree_code inner_code;
9558 op0 = TREE_OPERAND (expr, 0);
9560 modulus = get_pointer_modulus_and_residue (op0, residue,
9563 op1 = TREE_OPERAND (expr, 1);
9565 inner_code = TREE_CODE (op1);
9566 if (inner_code == INTEGER_CST)
9568 *residue += TREE_INT_CST_LOW (op1);
9571 else if (inner_code == MULT_EXPR)
9573 op1 = TREE_OPERAND (op1, 1);
9574 if (TREE_CODE (op1) == INTEGER_CST)
9576 unsigned HOST_WIDE_INT align;
9578 /* Compute the greatest power-of-2 divisor of op1. */
9579 align = TREE_INT_CST_LOW (op1);
9582 /* If align is non-zero and less than *modulus, replace
9583 *modulus with align., If align is 0, then either op1 is 0
9584 or the greatest power-of-2 divisor of op1 doesn't fit in an
9585 unsigned HOST_WIDE_INT. In either case, no additional
9586 constraint is imposed. */
9588 modulus = MIN (modulus, align);
9595 /* If we get here, we were unable to determine anything useful about the
9600 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9601 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9604 vec_cst_ctor_to_array (tree arg, tree *elts)
9606 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9608 if (TREE_CODE (arg) == VECTOR_CST)
9610 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9611 elts[i] = VECTOR_CST_ELT (arg, i);
9613 else if (TREE_CODE (arg) == CONSTRUCTOR)
9615 constructor_elt *elt;
9617 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9618 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9621 elts[i] = elt->value;
9625 for (; i < nelts; i++)
9627 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9631 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9632 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9633 NULL_TREE otherwise. */
9636 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9638 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9640 bool need_ctor = false;
9642 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9643 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9644 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9645 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9648 elts = XALLOCAVEC (tree, nelts * 3);
9649 if (!vec_cst_ctor_to_array (arg0, elts)
9650 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9653 for (i = 0; i < nelts; i++)
9655 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9657 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9662 vec<constructor_elt, va_gc> *v;
9663 vec_alloc (v, nelts);
9664 for (i = 0; i < nelts; i++)
9665 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9666 return build_constructor (type, v);
9669 return build_vector (type, &elts[2 * nelts]);
9672 /* Try to fold a pointer difference of type TYPE two address expressions of
9673 array references AREF0 and AREF1 using location LOC. Return a
9674 simplified expression for the difference or NULL_TREE. */
9677 fold_addr_of_array_ref_difference (location_t loc, tree type,
9678 tree aref0, tree aref1)
9680 tree base0 = TREE_OPERAND (aref0, 0);
9681 tree base1 = TREE_OPERAND (aref1, 0);
9682 tree base_offset = build_int_cst (type, 0);
9684 /* If the bases are array references as well, recurse. If the bases
9685 are pointer indirections compute the difference of the pointers.
9686 If the bases are equal, we are set. */
9687 if ((TREE_CODE (base0) == ARRAY_REF
9688 && TREE_CODE (base1) == ARRAY_REF
9690 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9691 || (INDIRECT_REF_P (base0)
9692 && INDIRECT_REF_P (base1)
9693 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9694 TREE_OPERAND (base0, 0),
9695 TREE_OPERAND (base1, 0))))
9696 || operand_equal_p (base0, base1, 0))
9698 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9699 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9700 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9701 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9702 return fold_build2_loc (loc, PLUS_EXPR, type,
9704 fold_build2_loc (loc, MULT_EXPR, type,
9710 /* If the real or vector real constant CST of type TYPE has an exact
9711 inverse, return it, else return NULL. */
9714 exact_inverse (tree type, tree cst)
9717 tree unit_type, *elts;
9719 unsigned vec_nelts, i;
9721 switch (TREE_CODE (cst))
9724 r = TREE_REAL_CST (cst);
9726 if (exact_real_inverse (TYPE_MODE (type), &r))
9727 return build_real (type, r);
9732 vec_nelts = VECTOR_CST_NELTS (cst);
9733 elts = XALLOCAVEC (tree, vec_nelts);
9734 unit_type = TREE_TYPE (type);
9735 mode = TYPE_MODE (unit_type);
9737 for (i = 0; i < vec_nelts; i++)
9739 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9740 if (!exact_real_inverse (mode, &r))
9742 elts[i] = build_real (unit_type, r);
9745 return build_vector (type, elts);
9752 /* Mask out the tz least significant bits of X of type TYPE where
9753 tz is the number of trailing zeroes in Y. */
9755 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9757 int tz = wi::ctz (y);
9759 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9763 /* Return true when T is an address and is known to be nonzero.
9764 For floating point we further ensure that T is not denormal.
9765 Similar logic is present in nonzero_address in rtlanal.h.
9767 If the return value is based on the assumption that signed overflow
9768 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9769 change *STRICT_OVERFLOW_P. */
9772 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9774 tree type = TREE_TYPE (t);
9775 enum tree_code code;
9777 /* Doing something useful for floating point would need more work. */
9778 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9781 code = TREE_CODE (t);
9782 switch (TREE_CODE_CLASS (code))
9785 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9788 case tcc_comparison:
9789 return tree_binary_nonzero_warnv_p (code, type,
9790 TREE_OPERAND (t, 0),
9791 TREE_OPERAND (t, 1),
9794 case tcc_declaration:
9796 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9804 case TRUTH_NOT_EXPR:
9805 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9808 case TRUTH_AND_EXPR:
9810 case TRUTH_XOR_EXPR:
9811 return tree_binary_nonzero_warnv_p (code, type,
9812 TREE_OPERAND (t, 0),
9813 TREE_OPERAND (t, 1),
9821 case WITH_SIZE_EXPR:
9823 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9828 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9832 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9837 tree fndecl = get_callee_fndecl (t);
9838 if (!fndecl) return false;
9839 if (flag_delete_null_pointer_checks && !flag_check_new
9840 && DECL_IS_OPERATOR_NEW (fndecl)
9841 && !TREE_NOTHROW (fndecl))
9843 if (flag_delete_null_pointer_checks
9844 && lookup_attribute ("returns_nonnull",
9845 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9847 return alloca_call_p (t);
9856 /* Return true when T is an address and is known to be nonzero.
9857 Handle warnings about undefined signed overflow. */
9860 tree_expr_nonzero_p (tree t)
9862 bool ret, strict_overflow_p;
9864 strict_overflow_p = false;
9865 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9866 if (strict_overflow_p)
9867 fold_overflow_warning (("assuming signed overflow does not occur when "
9868 "determining that expression is always "
9870 WARN_STRICT_OVERFLOW_MISC);
9874 /* Fold a binary expression of code CODE and type TYPE with operands
9875 OP0 and OP1. LOC is the location of the resulting expression.
9876 Return the folded expression if folding is successful. Otherwise,
9877 return NULL_TREE. */
9880 fold_binary_loc (location_t loc,
9881 enum tree_code code, tree type, tree op0, tree op1)
9883 enum tree_code_class kind = TREE_CODE_CLASS (code);
9884 tree arg0, arg1, tem;
9885 tree t1 = NULL_TREE;
9886 bool strict_overflow_p;
9889 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9890 && TREE_CODE_LENGTH (code) == 2
9892 && op1 != NULL_TREE);
9897 /* Strip any conversions that don't change the mode. This is
9898 safe for every expression, except for a comparison expression
9899 because its signedness is derived from its operands. So, in
9900 the latter case, only strip conversions that don't change the
9901 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9904 Note that this is done as an internal manipulation within the
9905 constant folder, in order to find the simplest representation
9906 of the arguments so that their form can be studied. In any
9907 cases, the appropriate type conversions should be put back in
9908 the tree that will get out of the constant folder. */
9910 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9912 STRIP_SIGN_NOPS (arg0);
9913 STRIP_SIGN_NOPS (arg1);
9921 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9922 constant but we can't do arithmetic on them. */
9923 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9925 tem = const_binop (code, type, arg0, arg1);
9926 if (tem != NULL_TREE)
9928 if (TREE_TYPE (tem) != type)
9929 tem = fold_convert_loc (loc, type, tem);
9934 /* If this is a commutative operation, and ARG0 is a constant, move it
9935 to ARG1 to reduce the number of tests below. */
9936 if (commutative_tree_code (code)
9937 && tree_swap_operands_p (arg0, arg1, true))
9938 return fold_build2_loc (loc, code, type, op1, op0);
9940 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9941 to ARG1 to reduce the number of tests below. */
9942 if (kind == tcc_comparison
9943 && tree_swap_operands_p (arg0, arg1, true))
9944 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9946 tem = generic_simplify (loc, code, type, op0, op1);
9950 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9952 First check for cases where an arithmetic operation is applied to a
9953 compound, conditional, or comparison operation. Push the arithmetic
9954 operation inside the compound or conditional to see if any folding
9955 can then be done. Convert comparison to conditional for this purpose.
9956 The also optimizes non-constant cases that used to be done in
9959 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9960 one of the operands is a comparison and the other is a comparison, a
9961 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9962 code below would make the expression more complex. Change it to a
9963 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9964 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9966 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9967 || code == EQ_EXPR || code == NE_EXPR)
9968 && TREE_CODE (type) != VECTOR_TYPE
9969 && ((truth_value_p (TREE_CODE (arg0))
9970 && (truth_value_p (TREE_CODE (arg1))
9971 || (TREE_CODE (arg1) == BIT_AND_EXPR
9972 && integer_onep (TREE_OPERAND (arg1, 1)))))
9973 || (truth_value_p (TREE_CODE (arg1))
9974 && (truth_value_p (TREE_CODE (arg0))
9975 || (TREE_CODE (arg0) == BIT_AND_EXPR
9976 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9978 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9979 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9982 fold_convert_loc (loc, boolean_type_node, arg0),
9983 fold_convert_loc (loc, boolean_type_node, arg1));
9985 if (code == EQ_EXPR)
9986 tem = invert_truthvalue_loc (loc, tem);
9988 return fold_convert_loc (loc, type, tem);
9991 if (TREE_CODE_CLASS (code) == tcc_binary
9992 || TREE_CODE_CLASS (code) == tcc_comparison)
9994 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9996 tem = fold_build2_loc (loc, code, type,
9997 fold_convert_loc (loc, TREE_TYPE (op0),
9998 TREE_OPERAND (arg0, 1)), op1);
9999 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10002 if (TREE_CODE (arg1) == COMPOUND_EXPR
10003 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10005 tem = fold_build2_loc (loc, code, type, op0,
10006 fold_convert_loc (loc, TREE_TYPE (op1),
10007 TREE_OPERAND (arg1, 1)));
10008 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10012 if (TREE_CODE (arg0) == COND_EXPR
10013 || TREE_CODE (arg0) == VEC_COND_EXPR
10014 || COMPARISON_CLASS_P (arg0))
10016 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10018 /*cond_first_p=*/1);
10019 if (tem != NULL_TREE)
10023 if (TREE_CODE (arg1) == COND_EXPR
10024 || TREE_CODE (arg1) == VEC_COND_EXPR
10025 || COMPARISON_CLASS_P (arg1))
10027 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10029 /*cond_first_p=*/0);
10030 if (tem != NULL_TREE)
10038 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10039 if (TREE_CODE (arg0) == ADDR_EXPR
10040 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10042 tree iref = TREE_OPERAND (arg0, 0);
10043 return fold_build2 (MEM_REF, type,
10044 TREE_OPERAND (iref, 0),
10045 int_const_binop (PLUS_EXPR, arg1,
10046 TREE_OPERAND (iref, 1)));
10049 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10050 if (TREE_CODE (arg0) == ADDR_EXPR
10051 && handled_component_p (TREE_OPERAND (arg0, 0)))
10054 HOST_WIDE_INT coffset;
10055 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10059 return fold_build2 (MEM_REF, type,
10060 build_fold_addr_expr (base),
10061 int_const_binop (PLUS_EXPR, arg1,
10062 size_int (coffset)));
10067 case POINTER_PLUS_EXPR:
10068 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10069 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10070 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10071 return fold_convert_loc (loc, type,
10072 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10073 fold_convert_loc (loc, sizetype,
10075 fold_convert_loc (loc, sizetype,
10081 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10083 /* X + (X / CST) * -CST is X % CST. */
10084 if (TREE_CODE (arg1) == MULT_EXPR
10085 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10086 && operand_equal_p (arg0,
10087 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10089 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10090 tree cst1 = TREE_OPERAND (arg1, 1);
10091 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10093 if (sum && integer_zerop (sum))
10094 return fold_convert_loc (loc, type,
10095 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10096 TREE_TYPE (arg0), arg0,
10101 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10102 one. Make sure the type is not saturating and has the signedness of
10103 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10104 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10105 if ((TREE_CODE (arg0) == MULT_EXPR
10106 || TREE_CODE (arg1) == MULT_EXPR)
10107 && !TYPE_SATURATING (type)
10108 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10109 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10110 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10112 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10117 if (! FLOAT_TYPE_P (type))
10119 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10120 with a constant, and the two constants have no bits in common,
10121 we should treat this as a BIT_IOR_EXPR since this may produce more
10122 simplifications. */
10123 if (TREE_CODE (arg0) == BIT_AND_EXPR
10124 && TREE_CODE (arg1) == BIT_AND_EXPR
10125 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10126 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10127 && wi::bit_and (TREE_OPERAND (arg0, 1),
10128 TREE_OPERAND (arg1, 1)) == 0)
10130 code = BIT_IOR_EXPR;
10134 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10135 (plus (plus (mult) (mult)) (foo)) so that we can
10136 take advantage of the factoring cases below. */
10137 if (ANY_INTEGRAL_TYPE_P (type)
10138 && TYPE_OVERFLOW_WRAPS (type)
10139 && (((TREE_CODE (arg0) == PLUS_EXPR
10140 || TREE_CODE (arg0) == MINUS_EXPR)
10141 && TREE_CODE (arg1) == MULT_EXPR)
10142 || ((TREE_CODE (arg1) == PLUS_EXPR
10143 || TREE_CODE (arg1) == MINUS_EXPR)
10144 && TREE_CODE (arg0) == MULT_EXPR)))
10146 tree parg0, parg1, parg, marg;
10147 enum tree_code pcode;
10149 if (TREE_CODE (arg1) == MULT_EXPR)
10150 parg = arg0, marg = arg1;
10152 parg = arg1, marg = arg0;
10153 pcode = TREE_CODE (parg);
10154 parg0 = TREE_OPERAND (parg, 0);
10155 parg1 = TREE_OPERAND (parg, 1);
10156 STRIP_NOPS (parg0);
10157 STRIP_NOPS (parg1);
10159 if (TREE_CODE (parg0) == MULT_EXPR
10160 && TREE_CODE (parg1) != MULT_EXPR)
10161 return fold_build2_loc (loc, pcode, type,
10162 fold_build2_loc (loc, PLUS_EXPR, type,
10163 fold_convert_loc (loc, type,
10165 fold_convert_loc (loc, type,
10167 fold_convert_loc (loc, type, parg1));
10168 if (TREE_CODE (parg0) != MULT_EXPR
10169 && TREE_CODE (parg1) == MULT_EXPR)
10171 fold_build2_loc (loc, PLUS_EXPR, type,
10172 fold_convert_loc (loc, type, parg0),
10173 fold_build2_loc (loc, pcode, type,
10174 fold_convert_loc (loc, type, marg),
10175 fold_convert_loc (loc, type,
10181 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10182 to __complex__ ( x, y ). This is not the same for SNaNs or
10183 if signed zeros are involved. */
10184 if (!HONOR_SNANS (element_mode (arg0))
10185 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10186 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10188 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10189 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10190 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10191 bool arg0rz = false, arg0iz = false;
10192 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10193 || (arg0i && (arg0iz = real_zerop (arg0i))))
10195 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10196 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10197 if (arg0rz && arg1i && real_zerop (arg1i))
10199 tree rp = arg1r ? arg1r
10200 : build1 (REALPART_EXPR, rtype, arg1);
10201 tree ip = arg0i ? arg0i
10202 : build1 (IMAGPART_EXPR, rtype, arg0);
10203 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10205 else if (arg0iz && arg1r && real_zerop (arg1r))
10207 tree rp = arg0r ? arg0r
10208 : build1 (REALPART_EXPR, rtype, arg0);
10209 tree ip = arg1i ? arg1i
10210 : build1 (IMAGPART_EXPR, rtype, arg1);
10211 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10216 if (flag_unsafe_math_optimizations
10217 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10218 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10219 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10222 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10223 We associate floats only if the user has specified
10224 -fassociative-math. */
10225 if (flag_associative_math
10226 && TREE_CODE (arg1) == PLUS_EXPR
10227 && TREE_CODE (arg0) != MULT_EXPR)
10229 tree tree10 = TREE_OPERAND (arg1, 0);
10230 tree tree11 = TREE_OPERAND (arg1, 1);
10231 if (TREE_CODE (tree11) == MULT_EXPR
10232 && TREE_CODE (tree10) == MULT_EXPR)
10235 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10236 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10239 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10240 We associate floats only if the user has specified
10241 -fassociative-math. */
10242 if (flag_associative_math
10243 && TREE_CODE (arg0) == PLUS_EXPR
10244 && TREE_CODE (arg1) != MULT_EXPR)
10246 tree tree00 = TREE_OPERAND (arg0, 0);
10247 tree tree01 = TREE_OPERAND (arg0, 1);
10248 if (TREE_CODE (tree01) == MULT_EXPR
10249 && TREE_CODE (tree00) == MULT_EXPR)
10252 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10253 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10259 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10260 is a rotate of A by C1 bits. */
10261 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10262 is a rotate of A by B bits. */
10264 enum tree_code code0, code1;
10266 code0 = TREE_CODE (arg0);
10267 code1 = TREE_CODE (arg1);
10268 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10269 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10270 && operand_equal_p (TREE_OPERAND (arg0, 0),
10271 TREE_OPERAND (arg1, 0), 0)
10272 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10273 TYPE_UNSIGNED (rtype))
10274 /* Only create rotates in complete modes. Other cases are not
10275 expanded properly. */
10276 && (element_precision (rtype)
10277 == element_precision (TYPE_MODE (rtype))))
10279 tree tree01, tree11;
10280 enum tree_code code01, code11;
10282 tree01 = TREE_OPERAND (arg0, 1);
10283 tree11 = TREE_OPERAND (arg1, 1);
10284 STRIP_NOPS (tree01);
10285 STRIP_NOPS (tree11);
10286 code01 = TREE_CODE (tree01);
10287 code11 = TREE_CODE (tree11);
10288 if (code01 == INTEGER_CST
10289 && code11 == INTEGER_CST
10290 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10291 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10293 tem = build2_loc (loc, LROTATE_EXPR,
10294 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10295 TREE_OPERAND (arg0, 0),
10296 code0 == LSHIFT_EXPR
10297 ? TREE_OPERAND (arg0, 1)
10298 : TREE_OPERAND (arg1, 1));
10299 return fold_convert_loc (loc, type, tem);
10301 else if (code11 == MINUS_EXPR)
10303 tree tree110, tree111;
10304 tree110 = TREE_OPERAND (tree11, 0);
10305 tree111 = TREE_OPERAND (tree11, 1);
10306 STRIP_NOPS (tree110);
10307 STRIP_NOPS (tree111);
10308 if (TREE_CODE (tree110) == INTEGER_CST
10309 && 0 == compare_tree_int (tree110,
10311 (TREE_TYPE (TREE_OPERAND
10313 && operand_equal_p (tree01, tree111, 0))
10315 fold_convert_loc (loc, type,
10316 build2 ((code0 == LSHIFT_EXPR
10319 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10320 TREE_OPERAND (arg0, 0),
10321 TREE_OPERAND (arg0, 1)));
10323 else if (code01 == MINUS_EXPR)
10325 tree tree010, tree011;
10326 tree010 = TREE_OPERAND (tree01, 0);
10327 tree011 = TREE_OPERAND (tree01, 1);
10328 STRIP_NOPS (tree010);
10329 STRIP_NOPS (tree011);
10330 if (TREE_CODE (tree010) == INTEGER_CST
10331 && 0 == compare_tree_int (tree010,
10333 (TREE_TYPE (TREE_OPERAND
10335 && operand_equal_p (tree11, tree011, 0))
10336 return fold_convert_loc
10338 build2 ((code0 != LSHIFT_EXPR
10341 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10342 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
10348 /* In most languages, can't associate operations on floats through
10349 parentheses. Rather than remember where the parentheses were, we
10350 don't associate floats at all, unless the user has specified
10351 -fassociative-math.
10352 And, we need to make sure type is not saturating. */
10354 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10355 && !TYPE_SATURATING (type))
10357 tree var0, con0, lit0, minus_lit0;
10358 tree var1, con1, lit1, minus_lit1;
10362 /* Split both trees into variables, constants, and literals. Then
10363 associate each group together, the constants with literals,
10364 then the result with variables. This increases the chances of
10365 literals being recombined later and of generating relocatable
10366 expressions for the sum of a constant and literal. */
10367 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10368 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10369 code == MINUS_EXPR);
10371 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10372 if (code == MINUS_EXPR)
10375 /* With undefined overflow prefer doing association in a type
10376 which wraps on overflow, if that is one of the operand types. */
10377 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10378 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10380 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10381 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10382 atype = TREE_TYPE (arg0);
10383 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10384 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10385 atype = TREE_TYPE (arg1);
10386 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10389 /* With undefined overflow we can only associate constants with one
10390 variable, and constants whose association doesn't overflow. */
10391 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10392 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10398 bool one_neg = false;
10400 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10402 tmp0 = TREE_OPERAND (tmp0, 0);
10403 one_neg = !one_neg;
10405 if (CONVERT_EXPR_P (tmp0)
10406 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10407 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10408 <= TYPE_PRECISION (atype)))
10409 tmp0 = TREE_OPERAND (tmp0, 0);
10410 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10412 tmp1 = TREE_OPERAND (tmp1, 0);
10413 one_neg = !one_neg;
10415 if (CONVERT_EXPR_P (tmp1)
10416 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10417 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10418 <= TYPE_PRECISION (atype)))
10419 tmp1 = TREE_OPERAND (tmp1, 0);
10420 /* The only case we can still associate with two variables
10421 is if they cancel out. */
10423 || !operand_equal_p (tmp0, tmp1, 0))
10428 /* Only do something if we found more than two objects. Otherwise,
10429 nothing has changed and we risk infinite recursion. */
10431 && (2 < ((var0 != 0) + (var1 != 0)
10432 + (con0 != 0) + (con1 != 0)
10433 + (lit0 != 0) + (lit1 != 0)
10434 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10436 bool any_overflows = false;
10437 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10438 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10439 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10440 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10441 var0 = associate_trees (loc, var0, var1, code, atype);
10442 con0 = associate_trees (loc, con0, con1, code, atype);
10443 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10444 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10447 /* Preserve the MINUS_EXPR if the negative part of the literal is
10448 greater than the positive part. Otherwise, the multiplicative
10449 folding code (i.e extract_muldiv) may be fooled in case
10450 unsigned constants are subtracted, like in the following
10451 example: ((X*2 + 4) - 8U)/2. */
10452 if (minus_lit0 && lit0)
10454 if (TREE_CODE (lit0) == INTEGER_CST
10455 && TREE_CODE (minus_lit0) == INTEGER_CST
10456 && tree_int_cst_lt (lit0, minus_lit0))
10458 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10459 MINUS_EXPR, atype);
10464 lit0 = associate_trees (loc, lit0, minus_lit0,
10465 MINUS_EXPR, atype);
10470 /* Don't introduce overflows through reassociation. */
10472 && ((lit0 && TREE_OVERFLOW_P (lit0))
10473 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10480 fold_convert_loc (loc, type,
10481 associate_trees (loc, var0, minus_lit0,
10482 MINUS_EXPR, atype));
10485 con0 = associate_trees (loc, con0, minus_lit0,
10486 MINUS_EXPR, atype);
10488 fold_convert_loc (loc, type,
10489 associate_trees (loc, var0, con0,
10490 PLUS_EXPR, atype));
10494 con0 = associate_trees (loc, con0, lit0, code, atype);
10496 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10504 /* Pointer simplifications for subtraction, simple reassociations. */
10505 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10507 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10508 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10509 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10511 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10512 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10513 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10514 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10515 return fold_build2_loc (loc, PLUS_EXPR, type,
10516 fold_build2_loc (loc, MINUS_EXPR, type,
10518 fold_build2_loc (loc, MINUS_EXPR, type,
10521 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10522 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10524 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10525 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10526 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10527 fold_convert_loc (loc, type, arg1));
10529 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10531 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10533 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10535 tree arg10 = fold_convert_loc (loc, type,
10536 TREE_OPERAND (arg1, 0));
10537 tree arg11 = fold_convert_loc (loc, type,
10538 TREE_OPERAND (arg1, 1));
10539 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10540 fold_convert_loc (loc, type, arg0),
10543 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10546 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10547 if (TREE_CODE (arg0) == NEGATE_EXPR
10548 && negate_expr_p (arg1)
10549 && reorder_operands_p (arg0, arg1))
10550 return fold_build2_loc (loc, MINUS_EXPR, type,
10551 fold_convert_loc (loc, type,
10552 negate_expr (arg1)),
10553 fold_convert_loc (loc, type,
10554 TREE_OPERAND (arg0, 0)));
10556 /* X - (X / Y) * Y is X % Y. */
10557 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10558 && TREE_CODE (arg1) == MULT_EXPR
10559 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10560 && operand_equal_p (arg0,
10561 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10562 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10563 TREE_OPERAND (arg1, 1), 0))
10565 fold_convert_loc (loc, type,
10566 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10567 arg0, TREE_OPERAND (arg1, 1)));
10569 if (! FLOAT_TYPE_P (type))
10571 /* Fold A - (A & B) into ~B & A. */
10572 if (!TREE_SIDE_EFFECTS (arg0)
10573 && TREE_CODE (arg1) == BIT_AND_EXPR)
10575 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10577 tree arg10 = fold_convert_loc (loc, type,
10578 TREE_OPERAND (arg1, 0));
10579 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10580 fold_build1_loc (loc, BIT_NOT_EXPR,
10582 fold_convert_loc (loc, type, arg0));
10584 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10586 tree arg11 = fold_convert_loc (loc,
10587 type, TREE_OPERAND (arg1, 1));
10588 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10589 fold_build1_loc (loc, BIT_NOT_EXPR,
10591 fold_convert_loc (loc, type, arg0));
10595 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10596 any power of 2 minus 1. */
10597 if (TREE_CODE (arg0) == BIT_AND_EXPR
10598 && TREE_CODE (arg1) == BIT_AND_EXPR
10599 && operand_equal_p (TREE_OPERAND (arg0, 0),
10600 TREE_OPERAND (arg1, 0), 0))
10602 tree mask0 = TREE_OPERAND (arg0, 1);
10603 tree mask1 = TREE_OPERAND (arg1, 1);
10604 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10606 if (operand_equal_p (tem, mask1, 0))
10608 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10609 TREE_OPERAND (arg0, 0), mask1);
10610 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10615 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10616 __complex__ ( x, -y ). This is not the same for SNaNs or if
10617 signed zeros are involved. */
10618 if (!HONOR_SNANS (element_mode (arg0))
10619 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10620 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10622 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10623 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10624 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10625 bool arg0rz = false, arg0iz = false;
10626 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10627 || (arg0i && (arg0iz = real_zerop (arg0i))))
10629 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10630 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10631 if (arg0rz && arg1i && real_zerop (arg1i))
10633 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10635 : build1 (REALPART_EXPR, rtype, arg1));
10636 tree ip = arg0i ? arg0i
10637 : build1 (IMAGPART_EXPR, rtype, arg0);
10638 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10640 else if (arg0iz && arg1r && real_zerop (arg1r))
10642 tree rp = arg0r ? arg0r
10643 : build1 (REALPART_EXPR, rtype, arg0);
10644 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10646 : build1 (IMAGPART_EXPR, rtype, arg1));
10647 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10652 /* A - B -> A + (-B) if B is easily negatable. */
10653 if (negate_expr_p (arg1)
10654 && !TYPE_OVERFLOW_SANITIZED (type)
10655 && ((FLOAT_TYPE_P (type)
10656 /* Avoid this transformation if B is a positive REAL_CST. */
10657 && (TREE_CODE (arg1) != REAL_CST
10658 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10659 || INTEGRAL_TYPE_P (type)))
10660 return fold_build2_loc (loc, PLUS_EXPR, type,
10661 fold_convert_loc (loc, type, arg0),
10662 fold_convert_loc (loc, type,
10663 negate_expr (arg1)));
10665 /* Try folding difference of addresses. */
10667 HOST_WIDE_INT diff;
10669 if ((TREE_CODE (arg0) == ADDR_EXPR
10670 || TREE_CODE (arg1) == ADDR_EXPR)
10671 && ptr_difference_const (arg0, arg1, &diff))
10672 return build_int_cst_type (type, diff);
10675 /* Fold &a[i] - &a[j] to i-j. */
10676 if (TREE_CODE (arg0) == ADDR_EXPR
10677 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10678 && TREE_CODE (arg1) == ADDR_EXPR
10679 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10681 tree tem = fold_addr_of_array_ref_difference (loc, type,
10682 TREE_OPERAND (arg0, 0),
10683 TREE_OPERAND (arg1, 0));
10688 if (FLOAT_TYPE_P (type)
10689 && flag_unsafe_math_optimizations
10690 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10691 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10692 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10695 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10696 one. Make sure the type is not saturating and has the signedness of
10697 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10698 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10699 if ((TREE_CODE (arg0) == MULT_EXPR
10700 || TREE_CODE (arg1) == MULT_EXPR)
10701 && !TYPE_SATURATING (type)
10702 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10703 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10704 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10706 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10714 /* (-A) * (-B) -> A * B */
10715 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10716 return fold_build2_loc (loc, MULT_EXPR, type,
10717 fold_convert_loc (loc, type,
10718 TREE_OPERAND (arg0, 0)),
10719 fold_convert_loc (loc, type,
10720 negate_expr (arg1)));
10721 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10722 return fold_build2_loc (loc, MULT_EXPR, type,
10723 fold_convert_loc (loc, type,
10724 negate_expr (arg0)),
10725 fold_convert_loc (loc, type,
10726 TREE_OPERAND (arg1, 0)));
10728 if (! FLOAT_TYPE_P (type))
10730 /* Transform x * -C into -x * C if x is easily negatable. */
10731 if (TREE_CODE (arg1) == INTEGER_CST
10732 && tree_int_cst_sgn (arg1) == -1
10733 && negate_expr_p (arg0)
10734 && (tem = negate_expr (arg1)) != arg1
10735 && !TREE_OVERFLOW (tem))
10736 return fold_build2_loc (loc, MULT_EXPR, type,
10737 fold_convert_loc (loc, type,
10738 negate_expr (arg0)),
10741 /* (a * (1 << b)) is (a << b) */
10742 if (TREE_CODE (arg1) == LSHIFT_EXPR
10743 && integer_onep (TREE_OPERAND (arg1, 0)))
10744 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10745 TREE_OPERAND (arg1, 1));
10746 if (TREE_CODE (arg0) == LSHIFT_EXPR
10747 && integer_onep (TREE_OPERAND (arg0, 0)))
10748 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10749 TREE_OPERAND (arg0, 1));
10751 /* (A + A) * C -> A * 2 * C */
10752 if (TREE_CODE (arg0) == PLUS_EXPR
10753 && TREE_CODE (arg1) == INTEGER_CST
10754 && operand_equal_p (TREE_OPERAND (arg0, 0),
10755 TREE_OPERAND (arg0, 1), 0))
10756 return fold_build2_loc (loc, MULT_EXPR, type,
10757 omit_one_operand_loc (loc, type,
10758 TREE_OPERAND (arg0, 0),
10759 TREE_OPERAND (arg0, 1)),
10760 fold_build2_loc (loc, MULT_EXPR, type,
10761 build_int_cst (type, 2) , arg1));
10763 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10764 sign-changing only. */
10765 if (TREE_CODE (arg1) == INTEGER_CST
10766 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10767 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10768 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10770 strict_overflow_p = false;
10771 if (TREE_CODE (arg1) == INTEGER_CST
10772 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10773 &strict_overflow_p)))
10775 if (strict_overflow_p)
10776 fold_overflow_warning (("assuming signed overflow does not "
10777 "occur when simplifying "
10779 WARN_STRICT_OVERFLOW_MISC);
10780 return fold_convert_loc (loc, type, tem);
10783 /* Optimize z * conj(z) for integer complex numbers. */
10784 if (TREE_CODE (arg0) == CONJ_EXPR
10785 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10786 return fold_mult_zconjz (loc, type, arg1);
10787 if (TREE_CODE (arg1) == CONJ_EXPR
10788 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10789 return fold_mult_zconjz (loc, type, arg0);
10793 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10794 the result for floating point types due to rounding so it is applied
10795 only if -fassociative-math was specify. */
10796 if (flag_associative_math
10797 && TREE_CODE (arg0) == RDIV_EXPR
10798 && TREE_CODE (arg1) == REAL_CST
10799 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10801 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10804 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10805 TREE_OPERAND (arg0, 1));
10808 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10809 if (operand_equal_p (arg0, arg1, 0))
10811 tree tem = fold_strip_sign_ops (arg0);
10812 if (tem != NULL_TREE)
10814 tem = fold_convert_loc (loc, type, tem);
10815 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10819 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10820 This is not the same for NaNs or if signed zeros are
10822 if (!HONOR_NANS (arg0)
10823 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10824 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10825 && TREE_CODE (arg1) == COMPLEX_CST
10826 && real_zerop (TREE_REALPART (arg1)))
10828 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10829 if (real_onep (TREE_IMAGPART (arg1)))
10831 fold_build2_loc (loc, COMPLEX_EXPR, type,
10832 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10834 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10835 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10837 fold_build2_loc (loc, COMPLEX_EXPR, type,
10838 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10839 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10843 /* Optimize z * conj(z) for floating point complex numbers.
10844 Guarded by flag_unsafe_math_optimizations as non-finite
10845 imaginary components don't produce scalar results. */
10846 if (flag_unsafe_math_optimizations
10847 && TREE_CODE (arg0) == CONJ_EXPR
10848 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10849 return fold_mult_zconjz (loc, type, arg1);
10850 if (flag_unsafe_math_optimizations
10851 && TREE_CODE (arg1) == CONJ_EXPR
10852 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10853 return fold_mult_zconjz (loc, type, arg0);
10855 if (flag_unsafe_math_optimizations)
10857 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10858 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10860 /* Optimizations of root(...)*root(...). */
10861 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10864 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10865 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10867 /* Optimize sqrt(x)*sqrt(x) as x. */
10868 if (BUILTIN_SQRT_P (fcode0)
10869 && operand_equal_p (arg00, arg10, 0)
10870 && ! HONOR_SNANS (element_mode (type)))
10873 /* Optimize root(x)*root(y) as root(x*y). */
10874 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10875 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10876 return build_call_expr_loc (loc, rootfn, 1, arg);
10879 /* Optimize expN(x)*expN(y) as expN(x+y). */
10880 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10882 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10883 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10884 CALL_EXPR_ARG (arg0, 0),
10885 CALL_EXPR_ARG (arg1, 0));
10886 return build_call_expr_loc (loc, expfn, 1, arg);
10889 /* Optimizations of pow(...)*pow(...). */
10890 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10891 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10892 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10894 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10895 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10896 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10897 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10899 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10900 if (operand_equal_p (arg01, arg11, 0))
10902 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10903 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10905 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10908 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10909 if (operand_equal_p (arg00, arg10, 0))
10911 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10912 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10914 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10918 /* Optimize tan(x)*cos(x) as sin(x). */
10919 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10920 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10921 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10922 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10923 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10924 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10925 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10926 CALL_EXPR_ARG (arg1, 0), 0))
10928 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10930 if (sinfn != NULL_TREE)
10931 return build_call_expr_loc (loc, sinfn, 1,
10932 CALL_EXPR_ARG (arg0, 0));
10935 /* Optimize x*pow(x,c) as pow(x,c+1). */
10936 if (fcode1 == BUILT_IN_POW
10937 || fcode1 == BUILT_IN_POWF
10938 || fcode1 == BUILT_IN_POWL)
10940 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10941 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10942 if (TREE_CODE (arg11) == REAL_CST
10943 && !TREE_OVERFLOW (arg11)
10944 && operand_equal_p (arg0, arg10, 0))
10946 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10950 c = TREE_REAL_CST (arg11);
10951 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10952 arg = build_real (type, c);
10953 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10957 /* Optimize pow(x,c)*x as pow(x,c+1). */
10958 if (fcode0 == BUILT_IN_POW
10959 || fcode0 == BUILT_IN_POWF
10960 || fcode0 == BUILT_IN_POWL)
10962 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10963 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10964 if (TREE_CODE (arg01) == REAL_CST
10965 && !TREE_OVERFLOW (arg01)
10966 && operand_equal_p (arg1, arg00, 0))
10968 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10972 c = TREE_REAL_CST (arg01);
10973 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10974 arg = build_real (type, c);
10975 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10979 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10980 if (!in_gimple_form
10982 && operand_equal_p (arg0, arg1, 0))
10984 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10988 tree arg = build_real (type, dconst2);
10989 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10998 /* ~X | X is -1. */
10999 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11000 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11002 t1 = build_zero_cst (type);
11003 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11004 return omit_one_operand_loc (loc, type, t1, arg1);
11007 /* X | ~X is -1. */
11008 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11009 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11011 t1 = build_zero_cst (type);
11012 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11013 return omit_one_operand_loc (loc, type, t1, arg0);
11016 /* Canonicalize (X & C1) | C2. */
11017 if (TREE_CODE (arg0) == BIT_AND_EXPR
11018 && TREE_CODE (arg1) == INTEGER_CST
11019 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11021 int width = TYPE_PRECISION (type), w;
11022 wide_int c1 = TREE_OPERAND (arg0, 1);
11023 wide_int c2 = arg1;
11025 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11026 if ((c1 & c2) == c1)
11027 return omit_one_operand_loc (loc, type, arg1,
11028 TREE_OPERAND (arg0, 0));
11030 wide_int msk = wi::mask (width, false,
11031 TYPE_PRECISION (TREE_TYPE (arg1)));
11033 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11034 if (msk.and_not (c1 | c2) == 0)
11035 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11036 TREE_OPERAND (arg0, 0), arg1);
11038 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11039 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11040 mode which allows further optimizations. */
11043 wide_int c3 = c1.and_not (c2);
11044 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11046 wide_int mask = wi::mask (w, false,
11047 TYPE_PRECISION (type));
11048 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11056 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11057 fold_build2_loc (loc, BIT_AND_EXPR, type,
11058 TREE_OPERAND (arg0, 0),
11059 wide_int_to_tree (type,
11064 /* (X & ~Y) | (~X & Y) is X ^ Y */
11065 if (TREE_CODE (arg0) == BIT_AND_EXPR
11066 && TREE_CODE (arg1) == BIT_AND_EXPR)
11068 tree a0, a1, l0, l1, n0, n1;
11070 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11071 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11073 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11074 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11076 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11077 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11079 if ((operand_equal_p (n0, a0, 0)
11080 && operand_equal_p (n1, a1, 0))
11081 || (operand_equal_p (n0, a1, 0)
11082 && operand_equal_p (n1, a0, 0)))
11083 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11086 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11087 if (t1 != NULL_TREE)
11090 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11092 This results in more efficient code for machines without a NAND
11093 instruction. Combine will canonicalize to the first form
11094 which will allow use of NAND instructions provided by the
11095 backend if they exist. */
11096 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11097 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11100 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11101 build2 (BIT_AND_EXPR, type,
11102 fold_convert_loc (loc, type,
11103 TREE_OPERAND (arg0, 0)),
11104 fold_convert_loc (loc, type,
11105 TREE_OPERAND (arg1, 0))));
11108 /* See if this can be simplified into a rotate first. If that
11109 is unsuccessful continue in the association code. */
11113 /* ~X ^ X is -1. */
11114 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11115 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11117 t1 = build_zero_cst (type);
11118 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11119 return omit_one_operand_loc (loc, type, t1, arg1);
11122 /* X ^ ~X is -1. */
11123 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11124 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11126 t1 = build_zero_cst (type);
11127 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11128 return omit_one_operand_loc (loc, type, t1, arg0);
11131 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11132 with a constant, and the two constants have no bits in common,
11133 we should treat this as a BIT_IOR_EXPR since this may produce more
11134 simplifications. */
11135 if (TREE_CODE (arg0) == BIT_AND_EXPR
11136 && TREE_CODE (arg1) == BIT_AND_EXPR
11137 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11138 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11139 && wi::bit_and (TREE_OPERAND (arg0, 1),
11140 TREE_OPERAND (arg1, 1)) == 0)
11142 code = BIT_IOR_EXPR;
11146 /* (X | Y) ^ X -> Y & ~ X*/
11147 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11148 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11150 tree t2 = TREE_OPERAND (arg0, 1);
11151 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11153 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11154 fold_convert_loc (loc, type, t2),
11155 fold_convert_loc (loc, type, t1));
11159 /* (Y | X) ^ X -> Y & ~ X*/
11160 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11161 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11163 tree t2 = TREE_OPERAND (arg0, 0);
11164 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11166 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11167 fold_convert_loc (loc, type, t2),
11168 fold_convert_loc (loc, type, t1));
11172 /* X ^ (X | Y) -> Y & ~ X*/
11173 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11174 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11176 tree t2 = TREE_OPERAND (arg1, 1);
11177 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11179 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11180 fold_convert_loc (loc, type, t2),
11181 fold_convert_loc (loc, type, t1));
11185 /* X ^ (Y | X) -> Y & ~ X*/
11186 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11187 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11189 tree t2 = TREE_OPERAND (arg1, 0);
11190 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11192 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11193 fold_convert_loc (loc, type, t2),
11194 fold_convert_loc (loc, type, t1));
11198 /* Convert ~X ^ ~Y to X ^ Y. */
11199 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11200 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11201 return fold_build2_loc (loc, code, type,
11202 fold_convert_loc (loc, type,
11203 TREE_OPERAND (arg0, 0)),
11204 fold_convert_loc (loc, type,
11205 TREE_OPERAND (arg1, 0)));
11207 /* Convert ~X ^ C to X ^ ~C. */
11208 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11209 && TREE_CODE (arg1) == INTEGER_CST)
11210 return fold_build2_loc (loc, code, type,
11211 fold_convert_loc (loc, type,
11212 TREE_OPERAND (arg0, 0)),
11213 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11215 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11216 if (TREE_CODE (arg0) == BIT_AND_EXPR
11217 && INTEGRAL_TYPE_P (type)
11218 && integer_onep (TREE_OPERAND (arg0, 1))
11219 && integer_onep (arg1))
11220 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11221 build_zero_cst (TREE_TYPE (arg0)));
11223 /* Fold (X & Y) ^ Y as ~X & Y. */
11224 if (TREE_CODE (arg0) == BIT_AND_EXPR
11225 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11227 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11228 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11229 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11230 fold_convert_loc (loc, type, arg1));
11232 /* Fold (X & Y) ^ X as ~Y & X. */
11233 if (TREE_CODE (arg0) == BIT_AND_EXPR
11234 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11235 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11237 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11238 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11239 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11240 fold_convert_loc (loc, type, arg1));
11242 /* Fold X ^ (X & Y) as X & ~Y. */
11243 if (TREE_CODE (arg1) == BIT_AND_EXPR
11244 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11246 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11247 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11248 fold_convert_loc (loc, type, arg0),
11249 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11251 /* Fold X ^ (Y & X) as ~Y & X. */
11252 if (TREE_CODE (arg1) == BIT_AND_EXPR
11253 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11254 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11256 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11257 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11258 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11259 fold_convert_loc (loc, type, arg0));
11262 /* See if this can be simplified into a rotate first. If that
11263 is unsuccessful continue in the association code. */
11267 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11268 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11269 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11270 || (TREE_CODE (arg0) == EQ_EXPR
11271 && integer_zerop (TREE_OPERAND (arg0, 1))))
11272 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11273 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11275 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11276 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11277 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11278 || (TREE_CODE (arg1) == EQ_EXPR
11279 && integer_zerop (TREE_OPERAND (arg1, 1))))
11280 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11281 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11283 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11284 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11285 && INTEGRAL_TYPE_P (type)
11286 && integer_onep (TREE_OPERAND (arg0, 1))
11287 && integer_onep (arg1))
11290 tem = TREE_OPERAND (arg0, 0);
11291 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11292 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11294 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11295 build_zero_cst (TREE_TYPE (tem)));
11297 /* Fold ~X & 1 as (X & 1) == 0. */
11298 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11299 && INTEGRAL_TYPE_P (type)
11300 && integer_onep (arg1))
11303 tem = TREE_OPERAND (arg0, 0);
11304 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11305 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11307 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11308 build_zero_cst (TREE_TYPE (tem)));
11310 /* Fold !X & 1 as X == 0. */
11311 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11312 && integer_onep (arg1))
11314 tem = TREE_OPERAND (arg0, 0);
11315 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11316 build_zero_cst (TREE_TYPE (tem)));
11319 /* Fold (X ^ Y) & Y as ~X & Y. */
11320 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11321 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11323 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11324 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11325 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11326 fold_convert_loc (loc, type, arg1));
11328 /* Fold (X ^ Y) & X as ~Y & X. */
11329 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11330 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11331 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11333 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11334 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11335 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11336 fold_convert_loc (loc, type, arg1));
11338 /* Fold X & (X ^ Y) as X & ~Y. */
11339 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11340 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11342 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11343 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11344 fold_convert_loc (loc, type, arg0),
11345 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11347 /* Fold X & (Y ^ X) as ~Y & X. */
11348 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11349 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11350 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11352 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11353 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11354 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11355 fold_convert_loc (loc, type, arg0));
11358 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11359 multiple of 1 << CST. */
11360 if (TREE_CODE (arg1) == INTEGER_CST)
11362 wide_int cst1 = arg1;
11363 wide_int ncst1 = -cst1;
11364 if ((cst1 & ncst1) == ncst1
11365 && multiple_of_p (type, arg0,
11366 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11367 return fold_convert_loc (loc, type, arg0);
11370 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11372 if (TREE_CODE (arg1) == INTEGER_CST
11373 && TREE_CODE (arg0) == MULT_EXPR
11374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11376 wide_int warg1 = arg1;
11377 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11380 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11382 else if (masked != warg1)
11384 /* Avoid the transform if arg1 is a mask of some
11385 mode which allows further optimizations. */
11386 int pop = wi::popcount (warg1);
11387 if (!(pop >= BITS_PER_UNIT
11388 && exact_log2 (pop) != -1
11389 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11390 return fold_build2_loc (loc, code, type, op0,
11391 wide_int_to_tree (type, masked));
11395 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11396 ((A & N) + B) & M -> (A + B) & M
11397 Similarly if (N & M) == 0,
11398 ((A | N) + B) & M -> (A + B) & M
11399 and for - instead of + (or unary - instead of +)
11400 and/or ^ instead of |.
11401 If B is constant and (B & M) == 0, fold into A & M. */
11402 if (TREE_CODE (arg1) == INTEGER_CST)
11404 wide_int cst1 = arg1;
11405 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11406 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11407 && (TREE_CODE (arg0) == PLUS_EXPR
11408 || TREE_CODE (arg0) == MINUS_EXPR
11409 || TREE_CODE (arg0) == NEGATE_EXPR)
11410 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11411 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11417 /* Now we know that arg0 is (C + D) or (C - D) or
11418 -C and arg1 (M) is == (1LL << cst) - 1.
11419 Store C into PMOP[0] and D into PMOP[1]. */
11420 pmop[0] = TREE_OPERAND (arg0, 0);
11422 if (TREE_CODE (arg0) != NEGATE_EXPR)
11424 pmop[1] = TREE_OPERAND (arg0, 1);
11428 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11431 for (; which >= 0; which--)
11432 switch (TREE_CODE (pmop[which]))
11437 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11440 cst0 = TREE_OPERAND (pmop[which], 1);
11442 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11447 else if (cst0 != 0)
11449 /* If C or D is of the form (A & N) where
11450 (N & M) == M, or of the form (A | N) or
11451 (A ^ N) where (N & M) == 0, replace it with A. */
11452 pmop[which] = TREE_OPERAND (pmop[which], 0);
11455 /* If C or D is a N where (N & M) == 0, it can be
11456 omitted (assumed 0). */
11457 if ((TREE_CODE (arg0) == PLUS_EXPR
11458 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11459 && (cst1 & pmop[which]) == 0)
11460 pmop[which] = NULL;
11466 /* Only build anything new if we optimized one or both arguments
11468 if (pmop[0] != TREE_OPERAND (arg0, 0)
11469 || (TREE_CODE (arg0) != NEGATE_EXPR
11470 && pmop[1] != TREE_OPERAND (arg0, 1)))
11472 tree utype = TREE_TYPE (arg0);
11473 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11475 /* Perform the operations in a type that has defined
11476 overflow behavior. */
11477 utype = unsigned_type_for (TREE_TYPE (arg0));
11478 if (pmop[0] != NULL)
11479 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11480 if (pmop[1] != NULL)
11481 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11484 if (TREE_CODE (arg0) == NEGATE_EXPR)
11485 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11486 else if (TREE_CODE (arg0) == PLUS_EXPR)
11488 if (pmop[0] != NULL && pmop[1] != NULL)
11489 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11491 else if (pmop[0] != NULL)
11493 else if (pmop[1] != NULL)
11496 return build_int_cst (type, 0);
11498 else if (pmop[0] == NULL)
11499 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11501 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11503 /* TEM is now the new binary +, - or unary - replacement. */
11504 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11505 fold_convert_loc (loc, utype, arg1));
11506 return fold_convert_loc (loc, type, tem);
11511 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11512 if (t1 != NULL_TREE)
11514 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11515 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11516 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11518 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11520 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11523 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11526 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11528 This results in more efficient code for machines without a NOR
11529 instruction. Combine will canonicalize to the first form
11530 which will allow use of NOR instructions provided by the
11531 backend if they exist. */
11532 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11533 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11535 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11536 build2 (BIT_IOR_EXPR, type,
11537 fold_convert_loc (loc, type,
11538 TREE_OPERAND (arg0, 0)),
11539 fold_convert_loc (loc, type,
11540 TREE_OPERAND (arg1, 0))));
11543 /* If arg0 is derived from the address of an object or function, we may
11544 be able to fold this expression using the object or function's
11546 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11548 unsigned HOST_WIDE_INT modulus, residue;
11549 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11551 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11552 integer_onep (arg1));
11554 /* This works because modulus is a power of 2. If this weren't the
11555 case, we'd have to replace it by its greatest power-of-2
11556 divisor: modulus & -modulus. */
11558 return build_int_cst (type, residue & low);
11561 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11562 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11563 if the new mask might be further optimized. */
11564 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11565 || TREE_CODE (arg0) == RSHIFT_EXPR)
11566 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11567 && TREE_CODE (arg1) == INTEGER_CST
11568 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11569 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11570 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11571 < TYPE_PRECISION (TREE_TYPE (arg0))))
11573 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11574 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11575 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11576 tree shift_type = TREE_TYPE (arg0);
11578 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11579 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11580 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11581 && TYPE_PRECISION (TREE_TYPE (arg0))
11582 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11584 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11585 tree arg00 = TREE_OPERAND (arg0, 0);
11586 /* See if more bits can be proven as zero because of
11588 if (TREE_CODE (arg00) == NOP_EXPR
11589 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11591 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11592 if (TYPE_PRECISION (inner_type)
11593 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11594 && TYPE_PRECISION (inner_type) < prec)
11596 prec = TYPE_PRECISION (inner_type);
11597 /* See if we can shorten the right shift. */
11599 shift_type = inner_type;
11600 /* Otherwise X >> C1 is all zeros, so we'll optimize
11601 it into (X, 0) later on by making sure zerobits
11605 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11608 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11609 zerobits <<= prec - shiftc;
11611 /* For arithmetic shift if sign bit could be set, zerobits
11612 can contain actually sign bits, so no transformation is
11613 possible, unless MASK masks them all away. In that
11614 case the shift needs to be converted into logical shift. */
11615 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11616 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11618 if ((mask & zerobits) == 0)
11619 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11625 /* ((X << 16) & 0xff00) is (X, 0). */
11626 if ((mask & zerobits) == mask)
11627 return omit_one_operand_loc (loc, type,
11628 build_int_cst (type, 0), arg0);
11630 newmask = mask | zerobits;
11631 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11633 /* Only do the transformation if NEWMASK is some integer
11635 for (prec = BITS_PER_UNIT;
11636 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11637 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11639 if (prec < HOST_BITS_PER_WIDE_INT
11640 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11644 if (shift_type != TREE_TYPE (arg0))
11646 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11647 fold_convert_loc (loc, shift_type,
11648 TREE_OPERAND (arg0, 0)),
11649 TREE_OPERAND (arg0, 1));
11650 tem = fold_convert_loc (loc, type, tem);
11654 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11655 if (!tree_int_cst_equal (newmaskt, arg1))
11656 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11664 /* Don't touch a floating-point divide by zero unless the mode
11665 of the constant can represent infinity. */
11666 if (TREE_CODE (arg1) == REAL_CST
11667 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11668 && real_zerop (arg1))
11671 /* (-A) / (-B) -> A / B */
11672 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11673 return fold_build2_loc (loc, RDIV_EXPR, type,
11674 TREE_OPERAND (arg0, 0),
11675 negate_expr (arg1));
11676 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11677 return fold_build2_loc (loc, RDIV_EXPR, type,
11678 negate_expr (arg0),
11679 TREE_OPERAND (arg1, 0));
11681 /* Convert A/B/C to A/(B*C). */
11682 if (flag_reciprocal_math
11683 && TREE_CODE (arg0) == RDIV_EXPR)
11684 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11685 fold_build2_loc (loc, MULT_EXPR, type,
11686 TREE_OPERAND (arg0, 1), arg1));
11688 /* Convert A/(B/C) to (A/B)*C. */
11689 if (flag_reciprocal_math
11690 && TREE_CODE (arg1) == RDIV_EXPR)
11691 return fold_build2_loc (loc, MULT_EXPR, type,
11692 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11693 TREE_OPERAND (arg1, 0)),
11694 TREE_OPERAND (arg1, 1));
11696 /* Convert C1/(X*C2) into (C1/C2)/X. */
11697 if (flag_reciprocal_math
11698 && TREE_CODE (arg1) == MULT_EXPR
11699 && TREE_CODE (arg0) == REAL_CST
11700 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11702 tree tem = const_binop (RDIV_EXPR, arg0,
11703 TREE_OPERAND (arg1, 1));
11705 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11706 TREE_OPERAND (arg1, 0));
11709 if (flag_unsafe_math_optimizations)
11711 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11712 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11714 /* Optimize sin(x)/cos(x) as tan(x). */
11715 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11716 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11717 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11718 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11719 CALL_EXPR_ARG (arg1, 0), 0))
11721 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11723 if (tanfn != NULL_TREE)
11724 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11727 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11728 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11729 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11730 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11731 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11732 CALL_EXPR_ARG (arg1, 0), 0))
11734 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11736 if (tanfn != NULL_TREE)
11738 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11739 CALL_EXPR_ARG (arg0, 0));
11740 return fold_build2_loc (loc, RDIV_EXPR, type,
11741 build_real (type, dconst1), tmp);
11745 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11746 NaNs or Infinities. */
11747 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11748 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11749 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11751 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11752 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11754 if (! HONOR_NANS (arg00)
11755 && ! HONOR_INFINITIES (element_mode (arg00))
11756 && operand_equal_p (arg00, arg01, 0))
11758 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11760 if (cosfn != NULL_TREE)
11761 return build_call_expr_loc (loc, cosfn, 1, arg00);
11765 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11766 NaNs or Infinities. */
11767 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11768 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11769 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11771 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11772 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11774 if (! HONOR_NANS (arg00)
11775 && ! HONOR_INFINITIES (element_mode (arg00))
11776 && operand_equal_p (arg00, arg01, 0))
11778 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11780 if (cosfn != NULL_TREE)
11782 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11783 return fold_build2_loc (loc, RDIV_EXPR, type,
11784 build_real (type, dconst1),
11790 /* Optimize pow(x,c)/x as pow(x,c-1). */
11791 if (fcode0 == BUILT_IN_POW
11792 || fcode0 == BUILT_IN_POWF
11793 || fcode0 == BUILT_IN_POWL)
11795 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11796 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11797 if (TREE_CODE (arg01) == REAL_CST
11798 && !TREE_OVERFLOW (arg01)
11799 && operand_equal_p (arg1, arg00, 0))
11801 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11805 c = TREE_REAL_CST (arg01);
11806 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11807 arg = build_real (type, c);
11808 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11812 /* Optimize a/root(b/c) into a*root(c/b). */
11813 if (BUILTIN_ROOT_P (fcode1))
11815 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11817 if (TREE_CODE (rootarg) == RDIV_EXPR)
11819 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11820 tree b = TREE_OPERAND (rootarg, 0);
11821 tree c = TREE_OPERAND (rootarg, 1);
11823 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11825 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11826 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11830 /* Optimize x/expN(y) into x*expN(-y). */
11831 if (BUILTIN_EXPONENT_P (fcode1))
11833 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11834 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11835 arg1 = build_call_expr_loc (loc,
11837 fold_convert_loc (loc, type, arg));
11838 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11841 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11842 if (fcode1 == BUILT_IN_POW
11843 || fcode1 == BUILT_IN_POWF
11844 || fcode1 == BUILT_IN_POWL)
11846 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11847 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11848 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11849 tree neg11 = fold_convert_loc (loc, type,
11850 negate_expr (arg11));
11851 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11852 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11857 case TRUNC_DIV_EXPR:
11858 /* Optimize (X & (-A)) / A where A is a power of 2,
11860 if (TREE_CODE (arg0) == BIT_AND_EXPR
11861 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11862 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11864 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11865 arg1, TREE_OPERAND (arg0, 1));
11866 if (sum && integer_zerop (sum)) {
11867 tree pow2 = build_int_cst (integer_type_node,
11868 wi::exact_log2 (arg1));
11869 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11870 TREE_OPERAND (arg0, 0), pow2);
11876 case FLOOR_DIV_EXPR:
11877 /* Simplify A / (B << N) where A and B are positive and B is
11878 a power of 2, to A >> (N + log2(B)). */
11879 strict_overflow_p = false;
11880 if (TREE_CODE (arg1) == LSHIFT_EXPR
11881 && (TYPE_UNSIGNED (type)
11882 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11884 tree sval = TREE_OPERAND (arg1, 0);
11885 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11887 tree sh_cnt = TREE_OPERAND (arg1, 1);
11888 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11889 wi::exact_log2 (sval));
11891 if (strict_overflow_p)
11892 fold_overflow_warning (("assuming signed overflow does not "
11893 "occur when simplifying A / (B << N)"),
11894 WARN_STRICT_OVERFLOW_MISC);
11896 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11898 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11899 fold_convert_loc (loc, type, arg0), sh_cnt);
11905 case ROUND_DIV_EXPR:
11906 case CEIL_DIV_EXPR:
11907 case EXACT_DIV_EXPR:
11908 if (integer_zerop (arg1))
11911 /* Convert -A / -B to A / B when the type is signed and overflow is
11913 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11914 && TREE_CODE (arg0) == NEGATE_EXPR
11915 && negate_expr_p (arg1))
11917 if (INTEGRAL_TYPE_P (type))
11918 fold_overflow_warning (("assuming signed overflow does not occur "
11919 "when distributing negation across "
11921 WARN_STRICT_OVERFLOW_MISC);
11922 return fold_build2_loc (loc, code, type,
11923 fold_convert_loc (loc, type,
11924 TREE_OPERAND (arg0, 0)),
11925 fold_convert_loc (loc, type,
11926 negate_expr (arg1)));
11928 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11929 && TREE_CODE (arg1) == NEGATE_EXPR
11930 && negate_expr_p (arg0))
11932 if (INTEGRAL_TYPE_P (type))
11933 fold_overflow_warning (("assuming signed overflow does not occur "
11934 "when distributing negation across "
11936 WARN_STRICT_OVERFLOW_MISC);
11937 return fold_build2_loc (loc, code, type,
11938 fold_convert_loc (loc, type,
11939 negate_expr (arg0)),
11940 fold_convert_loc (loc, type,
11941 TREE_OPERAND (arg1, 0)));
11944 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11945 operation, EXACT_DIV_EXPR.
11947 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11948 At one time others generated faster code, it's not clear if they do
11949 after the last round to changes to the DIV code in expmed.c. */
11950 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11951 && multiple_of_p (type, arg0, arg1))
11952 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11954 strict_overflow_p = false;
11955 if (TREE_CODE (arg1) == INTEGER_CST
11956 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11957 &strict_overflow_p)))
11959 if (strict_overflow_p)
11960 fold_overflow_warning (("assuming signed overflow does not occur "
11961 "when simplifying division"),
11962 WARN_STRICT_OVERFLOW_MISC);
11963 return fold_convert_loc (loc, type, tem);
11968 case CEIL_MOD_EXPR:
11969 case FLOOR_MOD_EXPR:
11970 case ROUND_MOD_EXPR:
11971 case TRUNC_MOD_EXPR:
11972 /* X % -Y is the same as X % Y. */
11973 if (code == TRUNC_MOD_EXPR
11974 && !TYPE_UNSIGNED (type)
11975 && TREE_CODE (arg1) == NEGATE_EXPR
11976 && !TYPE_OVERFLOW_TRAPS (type))
11977 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11978 fold_convert_loc (loc, type,
11979 TREE_OPERAND (arg1, 0)));
11981 strict_overflow_p = false;
11982 if (TREE_CODE (arg1) == INTEGER_CST
11983 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11984 &strict_overflow_p)))
11986 if (strict_overflow_p)
11987 fold_overflow_warning (("assuming signed overflow does not occur "
11988 "when simplifying modulus"),
11989 WARN_STRICT_OVERFLOW_MISC);
11990 return fold_convert_loc (loc, type, tem);
11993 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11994 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11995 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11996 && (TYPE_UNSIGNED (type)
11997 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12000 /* Also optimize A % (C << N) where C is a power of 2,
12001 to A & ((C << N) - 1). */
12002 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12003 c = TREE_OPERAND (arg1, 0);
12005 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12008 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12009 build_int_cst (TREE_TYPE (arg1), 1));
12010 if (strict_overflow_p)
12011 fold_overflow_warning (("assuming signed overflow does not "
12012 "occur when simplifying "
12013 "X % (power of two)"),
12014 WARN_STRICT_OVERFLOW_MISC);
12015 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12016 fold_convert_loc (loc, type, arg0),
12017 fold_convert_loc (loc, type, mask));
12027 /* Since negative shift count is not well-defined,
12028 don't try to compute it in the compiler. */
12029 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12032 prec = element_precision (type);
12034 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12035 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12036 && tree_to_uhwi (arg1) < prec
12037 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12038 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12040 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12041 + tree_to_uhwi (arg1));
12043 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12044 being well defined. */
12047 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12049 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12050 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12051 TREE_OPERAND (arg0, 0));
12056 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12057 build_int_cst (TREE_TYPE (arg1), low));
12060 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12061 into x & ((unsigned)-1 >> c) for unsigned types. */
12062 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12063 || (TYPE_UNSIGNED (type)
12064 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12065 && tree_fits_uhwi_p (arg1)
12066 && tree_to_uhwi (arg1) < prec
12067 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12068 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12070 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12071 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12077 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12079 lshift = build_minus_one_cst (type);
12080 lshift = const_binop (code, lshift, arg1);
12082 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12086 /* If we have a rotate of a bit operation with the rotate count and
12087 the second operand of the bit operation both constant,
12088 permute the two operations. */
12089 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12090 && (TREE_CODE (arg0) == BIT_AND_EXPR
12091 || TREE_CODE (arg0) == BIT_IOR_EXPR
12092 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12093 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12094 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12095 fold_build2_loc (loc, code, type,
12096 TREE_OPERAND (arg0, 0), arg1),
12097 fold_build2_loc (loc, code, type,
12098 TREE_OPERAND (arg0, 1), arg1));
12100 /* Two consecutive rotates adding up to the some integer
12101 multiple of the precision of the type can be ignored. */
12102 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12103 && TREE_CODE (arg0) == RROTATE_EXPR
12104 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12105 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12107 return TREE_OPERAND (arg0, 0);
12109 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12110 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12111 if the latter can be further optimized. */
12112 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12113 && TREE_CODE (arg0) == BIT_AND_EXPR
12114 && TREE_CODE (arg1) == INTEGER_CST
12115 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12117 tree mask = fold_build2_loc (loc, code, type,
12118 fold_convert_loc (loc, type,
12119 TREE_OPERAND (arg0, 1)),
12121 tree shift = fold_build2_loc (loc, code, type,
12122 fold_convert_loc (loc, type,
12123 TREE_OPERAND (arg0, 0)),
12125 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12133 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12139 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12144 case TRUTH_ANDIF_EXPR:
12145 /* Note that the operands of this must be ints
12146 and their values must be 0 or 1.
12147 ("true" is a fixed value perhaps depending on the language.) */
12148 /* If first arg is constant zero, return it. */
12149 if (integer_zerop (arg0))
12150 return fold_convert_loc (loc, type, arg0);
12151 case TRUTH_AND_EXPR:
12152 /* If either arg is constant true, drop it. */
12153 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12154 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12155 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12156 /* Preserve sequence points. */
12157 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12158 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12159 /* If second arg is constant zero, result is zero, but first arg
12160 must be evaluated. */
12161 if (integer_zerop (arg1))
12162 return omit_one_operand_loc (loc, type, arg1, arg0);
12163 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12164 case will be handled here. */
12165 if (integer_zerop (arg0))
12166 return omit_one_operand_loc (loc, type, arg0, arg1);
12168 /* !X && X is always false. */
12169 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12170 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12171 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12172 /* X && !X is always false. */
12173 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12174 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12175 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12177 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12178 means A >= Y && A != MAX, but in this case we know that
12181 if (!TREE_SIDE_EFFECTS (arg0)
12182 && !TREE_SIDE_EFFECTS (arg1))
12184 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12185 if (tem && !operand_equal_p (tem, arg0, 0))
12186 return fold_build2_loc (loc, code, type, tem, arg1);
12188 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12189 if (tem && !operand_equal_p (tem, arg1, 0))
12190 return fold_build2_loc (loc, code, type, arg0, tem);
12193 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12199 case TRUTH_ORIF_EXPR:
12200 /* Note that the operands of this must be ints
12201 and their values must be 0 or true.
12202 ("true" is a fixed value perhaps depending on the language.) */
12203 /* If first arg is constant true, return it. */
12204 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12205 return fold_convert_loc (loc, type, arg0);
12206 case TRUTH_OR_EXPR:
12207 /* If either arg is constant zero, drop it. */
12208 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12209 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12210 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12211 /* Preserve sequence points. */
12212 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12213 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12214 /* If second arg is constant true, result is true, but we must
12215 evaluate first arg. */
12216 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12217 return omit_one_operand_loc (loc, type, arg1, arg0);
12218 /* Likewise for first arg, but note this only occurs here for
12220 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12221 return omit_one_operand_loc (loc, type, arg0, arg1);
12223 /* !X || X is always true. */
12224 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12225 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12226 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12227 /* X || !X is always true. */
12228 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12229 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12230 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12232 /* (X && !Y) || (!X && Y) is X ^ Y */
12233 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12234 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12236 tree a0, a1, l0, l1, n0, n1;
12238 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12239 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12241 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12242 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12244 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12245 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12247 if ((operand_equal_p (n0, a0, 0)
12248 && operand_equal_p (n1, a1, 0))
12249 || (operand_equal_p (n0, a1, 0)
12250 && operand_equal_p (n1, a0, 0)))
12251 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12254 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12260 case TRUTH_XOR_EXPR:
12261 /* If the second arg is constant zero, drop it. */
12262 if (integer_zerop (arg1))
12263 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12264 /* If the second arg is constant true, this is a logical inversion. */
12265 if (integer_onep (arg1))
12267 tem = invert_truthvalue_loc (loc, arg0);
12268 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12270 /* Identical arguments cancel to zero. */
12271 if (operand_equal_p (arg0, arg1, 0))
12272 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12274 /* !X ^ X is always true. */
12275 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12276 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12277 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12279 /* X ^ !X is always true. */
12280 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12281 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12282 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12291 tem = fold_comparison (loc, code, type, op0, op1);
12292 if (tem != NULL_TREE)
12295 /* bool_var != 0 becomes bool_var. */
12296 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12297 && code == NE_EXPR)
12298 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12300 /* bool_var == 1 becomes bool_var. */
12301 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12302 && code == EQ_EXPR)
12303 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12305 /* bool_var != 1 becomes !bool_var. */
12306 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12307 && code == NE_EXPR)
12308 return fold_convert_loc (loc, type,
12309 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12310 TREE_TYPE (arg0), arg0));
12312 /* bool_var == 0 becomes !bool_var. */
12313 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12314 && code == EQ_EXPR)
12315 return fold_convert_loc (loc, type,
12316 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12317 TREE_TYPE (arg0), arg0));
12319 /* !exp != 0 becomes !exp */
12320 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12321 && code == NE_EXPR)
12322 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12324 /* If this is an equality comparison of the address of two non-weak,
12325 unaliased symbols neither of which are extern (since we do not
12326 have access to attributes for externs), then we know the result. */
12327 if (TREE_CODE (arg0) == ADDR_EXPR
12328 && DECL_P (TREE_OPERAND (arg0, 0))
12329 && TREE_CODE (arg1) == ADDR_EXPR
12330 && DECL_P (TREE_OPERAND (arg1, 0)))
12334 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12335 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12336 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12337 ->equal_address_to (symtab_node::get_create
12338 (TREE_OPERAND (arg1, 0)));
12340 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12342 return constant_boolean_node (equal
12343 ? code == EQ_EXPR : code != EQ_EXPR,
12347 /* Similarly for a NEGATE_EXPR. */
12348 if (TREE_CODE (arg0) == NEGATE_EXPR
12349 && TREE_CODE (arg1) == INTEGER_CST
12350 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12352 && TREE_CODE (tem) == INTEGER_CST
12353 && !TREE_OVERFLOW (tem))
12354 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12356 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12357 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12358 && TREE_CODE (arg1) == INTEGER_CST
12359 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12360 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12361 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12362 fold_convert_loc (loc,
12365 TREE_OPERAND (arg0, 1)));
12367 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12368 if ((TREE_CODE (arg0) == PLUS_EXPR
12369 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12370 || TREE_CODE (arg0) == MINUS_EXPR)
12371 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12374 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12375 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12377 tree val = TREE_OPERAND (arg0, 1);
12378 return omit_two_operands_loc (loc, type,
12379 fold_build2_loc (loc, code, type,
12381 build_int_cst (TREE_TYPE (val),
12383 TREE_OPERAND (arg0, 0), arg1);
12386 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12387 if (TREE_CODE (arg0) == MINUS_EXPR
12388 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12389 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12392 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12394 return omit_two_operands_loc (loc, type,
12396 ? boolean_true_node : boolean_false_node,
12397 TREE_OPERAND (arg0, 1), arg1);
12400 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12401 if (TREE_CODE (arg0) == ABS_EXPR
12402 && (integer_zerop (arg1) || real_zerop (arg1)))
12403 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12405 /* If this is an EQ or NE comparison with zero and ARG0 is
12406 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12407 two operations, but the latter can be done in one less insn
12408 on machines that have only two-operand insns or on which a
12409 constant cannot be the first operand. */
12410 if (TREE_CODE (arg0) == BIT_AND_EXPR
12411 && integer_zerop (arg1))
12413 tree arg00 = TREE_OPERAND (arg0, 0);
12414 tree arg01 = TREE_OPERAND (arg0, 1);
12415 if (TREE_CODE (arg00) == LSHIFT_EXPR
12416 && integer_onep (TREE_OPERAND (arg00, 0)))
12418 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12419 arg01, TREE_OPERAND (arg00, 1));
12420 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12421 build_int_cst (TREE_TYPE (arg0), 1));
12422 return fold_build2_loc (loc, code, type,
12423 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12426 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12427 && integer_onep (TREE_OPERAND (arg01, 0)))
12429 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12430 arg00, TREE_OPERAND (arg01, 1));
12431 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12432 build_int_cst (TREE_TYPE (arg0), 1));
12433 return fold_build2_loc (loc, code, type,
12434 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12439 /* If this is an NE or EQ comparison of zero against the result of a
12440 signed MOD operation whose second operand is a power of 2, make
12441 the MOD operation unsigned since it is simpler and equivalent. */
12442 if (integer_zerop (arg1)
12443 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12444 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12445 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12446 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12447 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12448 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12450 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12451 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12452 fold_convert_loc (loc, newtype,
12453 TREE_OPERAND (arg0, 0)),
12454 fold_convert_loc (loc, newtype,
12455 TREE_OPERAND (arg0, 1)));
12457 return fold_build2_loc (loc, code, type, newmod,
12458 fold_convert_loc (loc, newtype, arg1));
12461 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12462 C1 is a valid shift constant, and C2 is a power of two, i.e.
12464 if (TREE_CODE (arg0) == BIT_AND_EXPR
12465 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12466 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12468 && integer_pow2p (TREE_OPERAND (arg0, 1))
12469 && integer_zerop (arg1))
12471 tree itype = TREE_TYPE (arg0);
12472 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12473 prec = TYPE_PRECISION (itype);
12475 /* Check for a valid shift count. */
12476 if (wi::ltu_p (arg001, prec))
12478 tree arg01 = TREE_OPERAND (arg0, 1);
12479 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12480 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12481 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12482 can be rewritten as (X & (C2 << C1)) != 0. */
12483 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12485 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12486 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12487 return fold_build2_loc (loc, code, type, tem,
12488 fold_convert_loc (loc, itype, arg1));
12490 /* Otherwise, for signed (arithmetic) shifts,
12491 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12492 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12493 else if (!TYPE_UNSIGNED (itype))
12494 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12495 arg000, build_int_cst (itype, 0));
12496 /* Otherwise, of unsigned (logical) shifts,
12497 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12498 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12500 return omit_one_operand_loc (loc, type,
12501 code == EQ_EXPR ? integer_one_node
12502 : integer_zero_node,
12507 /* If we have (A & C) == C where C is a power of 2, convert this into
12508 (A & C) != 0. Similarly for NE_EXPR. */
12509 if (TREE_CODE (arg0) == BIT_AND_EXPR
12510 && integer_pow2p (TREE_OPERAND (arg0, 1))
12511 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12512 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12513 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12514 integer_zero_node));
12516 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12517 bit, then fold the expression into A < 0 or A >= 0. */
12518 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12522 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12523 Similarly for NE_EXPR. */
12524 if (TREE_CODE (arg0) == BIT_AND_EXPR
12525 && TREE_CODE (arg1) == INTEGER_CST
12526 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12528 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12529 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12530 TREE_OPERAND (arg0, 1));
12532 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12533 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12535 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12536 if (integer_nonzerop (dandnotc))
12537 return omit_one_operand_loc (loc, type, rslt, arg0);
12540 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12541 Similarly for NE_EXPR. */
12542 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12543 && TREE_CODE (arg1) == INTEGER_CST
12544 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12546 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12548 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12549 TREE_OPERAND (arg0, 1),
12550 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12551 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12552 if (integer_nonzerop (candnotd))
12553 return omit_one_operand_loc (loc, type, rslt, arg0);
12556 /* If this is a comparison of a field, we may be able to simplify it. */
12557 if ((TREE_CODE (arg0) == COMPONENT_REF
12558 || TREE_CODE (arg0) == BIT_FIELD_REF)
12559 /* Handle the constant case even without -O
12560 to make sure the warnings are given. */
12561 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12563 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12568 /* Optimize comparisons of strlen vs zero to a compare of the
12569 first character of the string vs zero. To wit,
12570 strlen(ptr) == 0 => *ptr == 0
12571 strlen(ptr) != 0 => *ptr != 0
12572 Other cases should reduce to one of these two (or a constant)
12573 due to the return value of strlen being unsigned. */
12574 if (TREE_CODE (arg0) == CALL_EXPR
12575 && integer_zerop (arg1))
12577 tree fndecl = get_callee_fndecl (arg0);
12580 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12581 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12582 && call_expr_nargs (arg0) == 1
12583 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12585 tree iref = build_fold_indirect_ref_loc (loc,
12586 CALL_EXPR_ARG (arg0, 0));
12587 return fold_build2_loc (loc, code, type, iref,
12588 build_int_cst (TREE_TYPE (iref), 0));
12592 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12593 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12594 if (TREE_CODE (arg0) == RSHIFT_EXPR
12595 && integer_zerop (arg1)
12596 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12598 tree arg00 = TREE_OPERAND (arg0, 0);
12599 tree arg01 = TREE_OPERAND (arg0, 1);
12600 tree itype = TREE_TYPE (arg00);
12601 if (wi::eq_p (arg01, element_precision (itype) - 1))
12603 if (TYPE_UNSIGNED (itype))
12605 itype = signed_type_for (itype);
12606 arg00 = fold_convert_loc (loc, itype, arg00);
12608 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12609 type, arg00, build_zero_cst (itype));
12613 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12614 if (integer_zerop (arg1)
12615 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12616 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12617 TREE_OPERAND (arg0, 1));
12619 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12620 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12621 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12622 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12623 build_zero_cst (TREE_TYPE (arg0)));
12624 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12625 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12626 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12627 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12628 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12629 build_zero_cst (TREE_TYPE (arg0)));
12631 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12632 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12633 && TREE_CODE (arg1) == INTEGER_CST
12634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12635 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12636 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12637 TREE_OPERAND (arg0, 1), arg1));
12639 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12640 (X & C) == 0 when C is a single bit. */
12641 if (TREE_CODE (arg0) == BIT_AND_EXPR
12642 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12643 && integer_zerop (arg1)
12644 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12646 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12647 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12648 TREE_OPERAND (arg0, 1));
12649 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12651 fold_convert_loc (loc, TREE_TYPE (arg0),
12655 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12656 constant C is a power of two, i.e. a single bit. */
12657 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12658 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12659 && integer_zerop (arg1)
12660 && integer_pow2p (TREE_OPERAND (arg0, 1))
12661 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12662 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12664 tree arg00 = TREE_OPERAND (arg0, 0);
12665 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12666 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12669 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12670 when is C is a power of two, i.e. a single bit. */
12671 if (TREE_CODE (arg0) == BIT_AND_EXPR
12672 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12673 && integer_zerop (arg1)
12674 && integer_pow2p (TREE_OPERAND (arg0, 1))
12675 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12676 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12678 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12679 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12680 arg000, TREE_OPERAND (arg0, 1));
12681 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12682 tem, build_int_cst (TREE_TYPE (tem), 0));
12685 if (integer_zerop (arg1)
12686 && tree_expr_nonzero_p (arg0))
12688 tree res = constant_boolean_node (code==NE_EXPR, type);
12689 return omit_one_operand_loc (loc, type, res, arg0);
12692 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12693 if (TREE_CODE (arg0) == NEGATE_EXPR
12694 && TREE_CODE (arg1) == NEGATE_EXPR)
12695 return fold_build2_loc (loc, code, type,
12696 TREE_OPERAND (arg0, 0),
12697 fold_convert_loc (loc, TREE_TYPE (arg0),
12698 TREE_OPERAND (arg1, 0)));
12700 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12701 if (TREE_CODE (arg0) == BIT_AND_EXPR
12702 && TREE_CODE (arg1) == BIT_AND_EXPR)
12704 tree arg00 = TREE_OPERAND (arg0, 0);
12705 tree arg01 = TREE_OPERAND (arg0, 1);
12706 tree arg10 = TREE_OPERAND (arg1, 0);
12707 tree arg11 = TREE_OPERAND (arg1, 1);
12708 tree itype = TREE_TYPE (arg0);
12710 if (operand_equal_p (arg01, arg11, 0))
12711 return fold_build2_loc (loc, code, type,
12712 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12713 fold_build2_loc (loc,
12714 BIT_XOR_EXPR, itype,
12717 build_zero_cst (itype));
12719 if (operand_equal_p (arg01, arg10, 0))
12720 return fold_build2_loc (loc, code, type,
12721 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12722 fold_build2_loc (loc,
12723 BIT_XOR_EXPR, itype,
12726 build_zero_cst (itype));
12728 if (operand_equal_p (arg00, arg11, 0))
12729 return fold_build2_loc (loc, code, type,
12730 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12731 fold_build2_loc (loc,
12732 BIT_XOR_EXPR, itype,
12735 build_zero_cst (itype));
12737 if (operand_equal_p (arg00, arg10, 0))
12738 return fold_build2_loc (loc, code, type,
12739 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12740 fold_build2_loc (loc,
12741 BIT_XOR_EXPR, itype,
12744 build_zero_cst (itype));
12747 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12748 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12750 tree arg00 = TREE_OPERAND (arg0, 0);
12751 tree arg01 = TREE_OPERAND (arg0, 1);
12752 tree arg10 = TREE_OPERAND (arg1, 0);
12753 tree arg11 = TREE_OPERAND (arg1, 1);
12754 tree itype = TREE_TYPE (arg0);
12756 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12757 operand_equal_p guarantees no side-effects so we don't need
12758 to use omit_one_operand on Z. */
12759 if (operand_equal_p (arg01, arg11, 0))
12760 return fold_build2_loc (loc, code, type, arg00,
12761 fold_convert_loc (loc, TREE_TYPE (arg00),
12763 if (operand_equal_p (arg01, arg10, 0))
12764 return fold_build2_loc (loc, code, type, arg00,
12765 fold_convert_loc (loc, TREE_TYPE (arg00),
12767 if (operand_equal_p (arg00, arg11, 0))
12768 return fold_build2_loc (loc, code, type, arg01,
12769 fold_convert_loc (loc, TREE_TYPE (arg01),
12771 if (operand_equal_p (arg00, arg10, 0))
12772 return fold_build2_loc (loc, code, type, arg01,
12773 fold_convert_loc (loc, TREE_TYPE (arg01),
12776 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12777 if (TREE_CODE (arg01) == INTEGER_CST
12778 && TREE_CODE (arg11) == INTEGER_CST)
12780 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12781 fold_convert_loc (loc, itype, arg11));
12782 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12783 return fold_build2_loc (loc, code, type, tem,
12784 fold_convert_loc (loc, itype, arg10));
12788 /* Attempt to simplify equality/inequality comparisons of complex
12789 values. Only lower the comparison if the result is known or
12790 can be simplified to a single scalar comparison. */
12791 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12792 || TREE_CODE (arg0) == COMPLEX_CST)
12793 && (TREE_CODE (arg1) == COMPLEX_EXPR
12794 || TREE_CODE (arg1) == COMPLEX_CST))
12796 tree real0, imag0, real1, imag1;
12799 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12801 real0 = TREE_OPERAND (arg0, 0);
12802 imag0 = TREE_OPERAND (arg0, 1);
12806 real0 = TREE_REALPART (arg0);
12807 imag0 = TREE_IMAGPART (arg0);
12810 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12812 real1 = TREE_OPERAND (arg1, 0);
12813 imag1 = TREE_OPERAND (arg1, 1);
12817 real1 = TREE_REALPART (arg1);
12818 imag1 = TREE_IMAGPART (arg1);
12821 rcond = fold_binary_loc (loc, code, type, real0, real1);
12822 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12824 if (integer_zerop (rcond))
12826 if (code == EQ_EXPR)
12827 return omit_two_operands_loc (loc, type, boolean_false_node,
12829 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12833 if (code == NE_EXPR)
12834 return omit_two_operands_loc (loc, type, boolean_true_node,
12836 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12840 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12841 if (icond && TREE_CODE (icond) == INTEGER_CST)
12843 if (integer_zerop (icond))
12845 if (code == EQ_EXPR)
12846 return omit_two_operands_loc (loc, type, boolean_false_node,
12848 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12852 if (code == NE_EXPR)
12853 return omit_two_operands_loc (loc, type, boolean_true_node,
12855 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12866 tem = fold_comparison (loc, code, type, op0, op1);
12867 if (tem != NULL_TREE)
12870 /* Transform comparisons of the form X +- C CMP X. */
12871 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12872 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12873 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12874 && !HONOR_SNANS (arg0))
12875 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12876 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12878 tree arg01 = TREE_OPERAND (arg0, 1);
12879 enum tree_code code0 = TREE_CODE (arg0);
12882 if (TREE_CODE (arg01) == REAL_CST)
12883 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12885 is_positive = tree_int_cst_sgn (arg01);
12887 /* (X - c) > X becomes false. */
12888 if (code == GT_EXPR
12889 && ((code0 == MINUS_EXPR && is_positive >= 0)
12890 || (code0 == PLUS_EXPR && is_positive <= 0)))
12892 if (TREE_CODE (arg01) == INTEGER_CST
12893 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12894 fold_overflow_warning (("assuming signed overflow does not "
12895 "occur when assuming that (X - c) > X "
12896 "is always false"),
12897 WARN_STRICT_OVERFLOW_ALL);
12898 return constant_boolean_node (0, type);
12901 /* Likewise (X + c) < X becomes false. */
12902 if (code == LT_EXPR
12903 && ((code0 == PLUS_EXPR && is_positive >= 0)
12904 || (code0 == MINUS_EXPR && is_positive <= 0)))
12906 if (TREE_CODE (arg01) == INTEGER_CST
12907 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12908 fold_overflow_warning (("assuming signed overflow does not "
12909 "occur when assuming that "
12910 "(X + c) < X is always false"),
12911 WARN_STRICT_OVERFLOW_ALL);
12912 return constant_boolean_node (0, type);
12915 /* Convert (X - c) <= X to true. */
12916 if (!HONOR_NANS (arg1)
12918 && ((code0 == MINUS_EXPR && is_positive >= 0)
12919 || (code0 == PLUS_EXPR && is_positive <= 0)))
12921 if (TREE_CODE (arg01) == INTEGER_CST
12922 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12923 fold_overflow_warning (("assuming signed overflow does not "
12924 "occur when assuming that "
12925 "(X - c) <= X is always true"),
12926 WARN_STRICT_OVERFLOW_ALL);
12927 return constant_boolean_node (1, type);
12930 /* Convert (X + c) >= X to true. */
12931 if (!HONOR_NANS (arg1)
12933 && ((code0 == PLUS_EXPR && is_positive >= 0)
12934 || (code0 == MINUS_EXPR && is_positive <= 0)))
12936 if (TREE_CODE (arg01) == INTEGER_CST
12937 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12938 fold_overflow_warning (("assuming signed overflow does not "
12939 "occur when assuming that "
12940 "(X + c) >= X is always true"),
12941 WARN_STRICT_OVERFLOW_ALL);
12942 return constant_boolean_node (1, type);
12945 if (TREE_CODE (arg01) == INTEGER_CST)
12947 /* Convert X + c > X and X - c < X to true for integers. */
12948 if (code == GT_EXPR
12949 && ((code0 == PLUS_EXPR && is_positive > 0)
12950 || (code0 == MINUS_EXPR && is_positive < 0)))
12952 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12953 fold_overflow_warning (("assuming signed overflow does "
12954 "not occur when assuming that "
12955 "(X + c) > X is always true"),
12956 WARN_STRICT_OVERFLOW_ALL);
12957 return constant_boolean_node (1, type);
12960 if (code == LT_EXPR
12961 && ((code0 == MINUS_EXPR && is_positive > 0)
12962 || (code0 == PLUS_EXPR && is_positive < 0)))
12964 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12965 fold_overflow_warning (("assuming signed overflow does "
12966 "not occur when assuming that "
12967 "(X - c) < X is always true"),
12968 WARN_STRICT_OVERFLOW_ALL);
12969 return constant_boolean_node (1, type);
12972 /* Convert X + c <= X and X - c >= X to false for integers. */
12973 if (code == LE_EXPR
12974 && ((code0 == PLUS_EXPR && is_positive > 0)
12975 || (code0 == MINUS_EXPR && is_positive < 0)))
12977 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12978 fold_overflow_warning (("assuming signed overflow does "
12979 "not occur when assuming that "
12980 "(X + c) <= X is always false"),
12981 WARN_STRICT_OVERFLOW_ALL);
12982 return constant_boolean_node (0, type);
12985 if (code == GE_EXPR
12986 && ((code0 == MINUS_EXPR && is_positive > 0)
12987 || (code0 == PLUS_EXPR && is_positive < 0)))
12989 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12990 fold_overflow_warning (("assuming signed overflow does "
12991 "not occur when assuming that "
12992 "(X - c) >= X is always false"),
12993 WARN_STRICT_OVERFLOW_ALL);
12994 return constant_boolean_node (0, type);
12999 /* Comparisons with the highest or lowest possible integer of
13000 the specified precision will have known values. */
13002 tree arg1_type = TREE_TYPE (arg1);
13003 unsigned int prec = TYPE_PRECISION (arg1_type);
13005 if (TREE_CODE (arg1) == INTEGER_CST
13006 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13008 wide_int max = wi::max_value (arg1_type);
13009 wide_int signed_max = wi::max_value (prec, SIGNED);
13010 wide_int min = wi::min_value (arg1_type);
13012 if (wi::eq_p (arg1, max))
13016 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13019 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13022 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13025 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13027 /* The GE_EXPR and LT_EXPR cases above are not normally
13028 reached because of previous transformations. */
13033 else if (wi::eq_p (arg1, max - 1))
13037 arg1 = const_binop (PLUS_EXPR, arg1,
13038 build_int_cst (TREE_TYPE (arg1), 1));
13039 return fold_build2_loc (loc, EQ_EXPR, type,
13040 fold_convert_loc (loc,
13041 TREE_TYPE (arg1), arg0),
13044 arg1 = const_binop (PLUS_EXPR, arg1,
13045 build_int_cst (TREE_TYPE (arg1), 1));
13046 return fold_build2_loc (loc, NE_EXPR, type,
13047 fold_convert_loc (loc, TREE_TYPE (arg1),
13053 else if (wi::eq_p (arg1, min))
13057 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13060 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13063 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13066 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13071 else if (wi::eq_p (arg1, min + 1))
13075 arg1 = const_binop (MINUS_EXPR, arg1,
13076 build_int_cst (TREE_TYPE (arg1), 1));
13077 return fold_build2_loc (loc, NE_EXPR, type,
13078 fold_convert_loc (loc,
13079 TREE_TYPE (arg1), arg0),
13082 arg1 = const_binop (MINUS_EXPR, arg1,
13083 build_int_cst (TREE_TYPE (arg1), 1));
13084 return fold_build2_loc (loc, EQ_EXPR, type,
13085 fold_convert_loc (loc, TREE_TYPE (arg1),
13092 else if (wi::eq_p (arg1, signed_max)
13093 && TYPE_UNSIGNED (arg1_type)
13094 /* We will flip the signedness of the comparison operator
13095 associated with the mode of arg1, so the sign bit is
13096 specified by this mode. Check that arg1 is the signed
13097 max associated with this sign bit. */
13098 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13099 /* signed_type does not work on pointer types. */
13100 && INTEGRAL_TYPE_P (arg1_type))
13102 /* The following case also applies to X < signed_max+1
13103 and X >= signed_max+1 because previous transformations. */
13104 if (code == LE_EXPR || code == GT_EXPR)
13106 tree st = signed_type_for (arg1_type);
13107 return fold_build2_loc (loc,
13108 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13109 type, fold_convert_loc (loc, st, arg0),
13110 build_int_cst (st, 0));
13116 /* If we are comparing an ABS_EXPR with a constant, we can
13117 convert all the cases into explicit comparisons, but they may
13118 well not be faster than doing the ABS and one comparison.
13119 But ABS (X) <= C is a range comparison, which becomes a subtraction
13120 and a comparison, and is probably faster. */
13121 if (code == LE_EXPR
13122 && TREE_CODE (arg1) == INTEGER_CST
13123 && TREE_CODE (arg0) == ABS_EXPR
13124 && ! TREE_SIDE_EFFECTS (arg0)
13125 && (0 != (tem = negate_expr (arg1)))
13126 && TREE_CODE (tem) == INTEGER_CST
13127 && !TREE_OVERFLOW (tem))
13128 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13129 build2 (GE_EXPR, type,
13130 TREE_OPERAND (arg0, 0), tem),
13131 build2 (LE_EXPR, type,
13132 TREE_OPERAND (arg0, 0), arg1));
13134 /* Convert ABS_EXPR<x> >= 0 to true. */
13135 strict_overflow_p = false;
13136 if (code == GE_EXPR
13137 && (integer_zerop (arg1)
13138 || (! HONOR_NANS (arg0)
13139 && real_zerop (arg1)))
13140 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13142 if (strict_overflow_p)
13143 fold_overflow_warning (("assuming signed overflow does not occur "
13144 "when simplifying comparison of "
13145 "absolute value and zero"),
13146 WARN_STRICT_OVERFLOW_CONDITIONAL);
13147 return omit_one_operand_loc (loc, type,
13148 constant_boolean_node (true, type),
13152 /* Convert ABS_EXPR<x> < 0 to false. */
13153 strict_overflow_p = false;
13154 if (code == LT_EXPR
13155 && (integer_zerop (arg1) || real_zerop (arg1))
13156 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13158 if (strict_overflow_p)
13159 fold_overflow_warning (("assuming signed overflow does not occur "
13160 "when simplifying comparison of "
13161 "absolute value and zero"),
13162 WARN_STRICT_OVERFLOW_CONDITIONAL);
13163 return omit_one_operand_loc (loc, type,
13164 constant_boolean_node (false, type),
13168 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13169 and similarly for >= into !=. */
13170 if ((code == LT_EXPR || code == GE_EXPR)
13171 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13172 && TREE_CODE (arg1) == LSHIFT_EXPR
13173 && integer_onep (TREE_OPERAND (arg1, 0)))
13174 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13175 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13176 TREE_OPERAND (arg1, 1)),
13177 build_zero_cst (TREE_TYPE (arg0)));
13179 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13180 otherwise Y might be >= # of bits in X's type and thus e.g.
13181 (unsigned char) (1 << Y) for Y 15 might be 0.
13182 If the cast is widening, then 1 << Y should have unsigned type,
13183 otherwise if Y is number of bits in the signed shift type minus 1,
13184 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13185 31 might be 0xffffffff80000000. */
13186 if ((code == LT_EXPR || code == GE_EXPR)
13187 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13188 && CONVERT_EXPR_P (arg1)
13189 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13190 && (element_precision (TREE_TYPE (arg1))
13191 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13192 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13193 || (element_precision (TREE_TYPE (arg1))
13194 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13195 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13197 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13198 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13199 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13200 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13201 build_zero_cst (TREE_TYPE (arg0)));
13206 case UNORDERED_EXPR:
13214 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13216 t1 = fold_relational_const (code, type, arg0, arg1);
13217 if (t1 != NULL_TREE)
13221 /* If the first operand is NaN, the result is constant. */
13222 if (TREE_CODE (arg0) == REAL_CST
13223 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13224 && (code != LTGT_EXPR || ! flag_trapping_math))
13226 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13227 ? integer_zero_node
13228 : integer_one_node;
13229 return omit_one_operand_loc (loc, type, t1, arg1);
13232 /* If the second operand is NaN, the result is constant. */
13233 if (TREE_CODE (arg1) == REAL_CST
13234 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13235 && (code != LTGT_EXPR || ! flag_trapping_math))
13237 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13238 ? integer_zero_node
13239 : integer_one_node;
13240 return omit_one_operand_loc (loc, type, t1, arg0);
13243 /* Simplify unordered comparison of something with itself. */
13244 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13245 && operand_equal_p (arg0, arg1, 0))
13246 return constant_boolean_node (1, type);
13248 if (code == LTGT_EXPR
13249 && !flag_trapping_math
13250 && operand_equal_p (arg0, arg1, 0))
13251 return constant_boolean_node (0, type);
13253 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13255 tree targ0 = strip_float_extensions (arg0);
13256 tree targ1 = strip_float_extensions (arg1);
13257 tree newtype = TREE_TYPE (targ0);
13259 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13260 newtype = TREE_TYPE (targ1);
13262 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13263 return fold_build2_loc (loc, code, type,
13264 fold_convert_loc (loc, newtype, targ0),
13265 fold_convert_loc (loc, newtype, targ1));
13270 case COMPOUND_EXPR:
13271 /* When pedantic, a compound expression can be neither an lvalue
13272 nor an integer constant expression. */
13273 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13275 /* Don't let (0, 0) be null pointer constant. */
13276 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13277 : fold_convert_loc (loc, type, arg1);
13278 return pedantic_non_lvalue_loc (loc, tem);
13281 /* An ASSERT_EXPR should never be passed to fold_binary. */
13282 gcc_unreachable ();
13286 } /* switch (code) */
13289 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13290 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13294 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13296 switch (TREE_CODE (*tp))
13302 *walk_subtrees = 0;
13304 /* ... fall through ... */
13311 /* Return whether the sub-tree ST contains a label which is accessible from
13312 outside the sub-tree. */
13315 contains_label_p (tree st)
13318 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13321 /* Fold a ternary expression of code CODE and type TYPE with operands
13322 OP0, OP1, and OP2. Return the folded expression if folding is
13323 successful. Otherwise, return NULL_TREE. */
13326 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13327 tree op0, tree op1, tree op2)
13330 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13331 enum tree_code_class kind = TREE_CODE_CLASS (code);
13333 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13334 && TREE_CODE_LENGTH (code) == 3);
13336 /* If this is a commutative operation, and OP0 is a constant, move it
13337 to OP1 to reduce the number of tests below. */
13338 if (commutative_ternary_tree_code (code)
13339 && tree_swap_operands_p (op0, op1, true))
13340 return fold_build3_loc (loc, code, type, op1, op0, op2);
13342 tem = generic_simplify (loc, code, type, op0, op1, op2);
13346 /* Strip any conversions that don't change the mode. This is safe
13347 for every expression, except for a comparison expression because
13348 its signedness is derived from its operands. So, in the latter
13349 case, only strip conversions that don't change the signedness.
13351 Note that this is done as an internal manipulation within the
13352 constant folder, in order to find the simplest representation of
13353 the arguments so that their form can be studied. In any cases,
13354 the appropriate type conversions should be put back in the tree
13355 that will get out of the constant folder. */
13376 case COMPONENT_REF:
13377 if (TREE_CODE (arg0) == CONSTRUCTOR
13378 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13380 unsigned HOST_WIDE_INT idx;
13382 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13389 case VEC_COND_EXPR:
13390 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13391 so all simple results must be passed through pedantic_non_lvalue. */
13392 if (TREE_CODE (arg0) == INTEGER_CST)
13394 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13395 tem = integer_zerop (arg0) ? op2 : op1;
13396 /* Only optimize constant conditions when the selected branch
13397 has the same type as the COND_EXPR. This avoids optimizing
13398 away "c ? x : throw", where the throw has a void type.
13399 Avoid throwing away that operand which contains label. */
13400 if ((!TREE_SIDE_EFFECTS (unused_op)
13401 || !contains_label_p (unused_op))
13402 && (! VOID_TYPE_P (TREE_TYPE (tem))
13403 || VOID_TYPE_P (type)))
13404 return pedantic_non_lvalue_loc (loc, tem);
13407 else if (TREE_CODE (arg0) == VECTOR_CST)
13409 if ((TREE_CODE (arg1) == VECTOR_CST
13410 || TREE_CODE (arg1) == CONSTRUCTOR)
13411 && (TREE_CODE (arg2) == VECTOR_CST
13412 || TREE_CODE (arg2) == CONSTRUCTOR))
13414 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13415 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13416 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13417 for (i = 0; i < nelts; i++)
13419 tree val = VECTOR_CST_ELT (arg0, i);
13420 if (integer_all_onesp (val))
13422 else if (integer_zerop (val))
13423 sel[i] = nelts + i;
13424 else /* Currently unreachable. */
13427 tree t = fold_vec_perm (type, arg1, arg2, sel);
13428 if (t != NULL_TREE)
13433 /* If we have A op B ? A : C, we may be able to convert this to a
13434 simpler expression, depending on the operation and the values
13435 of B and C. Signed zeros prevent all of these transformations,
13436 for reasons given above each one.
13438 Also try swapping the arguments and inverting the conditional. */
13439 if (COMPARISON_CLASS_P (arg0)
13440 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13441 arg1, TREE_OPERAND (arg0, 1))
13442 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13444 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13449 if (COMPARISON_CLASS_P (arg0)
13450 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13452 TREE_OPERAND (arg0, 1))
13453 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13455 location_t loc0 = expr_location_or (arg0, loc);
13456 tem = fold_invert_truthvalue (loc0, arg0);
13457 if (tem && COMPARISON_CLASS_P (tem))
13459 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13465 /* If the second operand is simpler than the third, swap them
13466 since that produces better jump optimization results. */
13467 if (truth_value_p (TREE_CODE (arg0))
13468 && tree_swap_operands_p (op1, op2, false))
13470 location_t loc0 = expr_location_or (arg0, loc);
13471 /* See if this can be inverted. If it can't, possibly because
13472 it was a floating-point inequality comparison, don't do
13474 tem = fold_invert_truthvalue (loc0, arg0);
13476 return fold_build3_loc (loc, code, type, tem, op2, op1);
13479 /* Convert A ? 1 : 0 to simply A. */
13480 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13481 : (integer_onep (op1)
13482 && !VECTOR_TYPE_P (type)))
13483 && integer_zerop (op2)
13484 /* If we try to convert OP0 to our type, the
13485 call to fold will try to move the conversion inside
13486 a COND, which will recurse. In that case, the COND_EXPR
13487 is probably the best choice, so leave it alone. */
13488 && type == TREE_TYPE (arg0))
13489 return pedantic_non_lvalue_loc (loc, arg0);
13491 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13492 over COND_EXPR in cases such as floating point comparisons. */
13493 if (integer_zerop (op1)
13494 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13495 : (integer_onep (op2)
13496 && !VECTOR_TYPE_P (type)))
13497 && truth_value_p (TREE_CODE (arg0)))
13498 return pedantic_non_lvalue_loc (loc,
13499 fold_convert_loc (loc, type,
13500 invert_truthvalue_loc (loc,
13503 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13504 if (TREE_CODE (arg0) == LT_EXPR
13505 && integer_zerop (TREE_OPERAND (arg0, 1))
13506 && integer_zerop (op2)
13507 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13509 /* sign_bit_p looks through both zero and sign extensions,
13510 but for this optimization only sign extensions are
13512 tree tem2 = TREE_OPERAND (arg0, 0);
13513 while (tem != tem2)
13515 if (TREE_CODE (tem2) != NOP_EXPR
13516 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13521 tem2 = TREE_OPERAND (tem2, 0);
13523 /* sign_bit_p only checks ARG1 bits within A's precision.
13524 If <sign bit of A> has wider type than A, bits outside
13525 of A's precision in <sign bit of A> need to be checked.
13526 If they are all 0, this optimization needs to be done
13527 in unsigned A's type, if they are all 1 in signed A's type,
13528 otherwise this can't be done. */
13530 && TYPE_PRECISION (TREE_TYPE (tem))
13531 < TYPE_PRECISION (TREE_TYPE (arg1))
13532 && TYPE_PRECISION (TREE_TYPE (tem))
13533 < TYPE_PRECISION (type))
13535 int inner_width, outer_width;
13538 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13539 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13540 if (outer_width > TYPE_PRECISION (type))
13541 outer_width = TYPE_PRECISION (type);
13543 wide_int mask = wi::shifted_mask
13544 (inner_width, outer_width - inner_width, false,
13545 TYPE_PRECISION (TREE_TYPE (arg1)));
13547 wide_int common = mask & arg1;
13548 if (common == mask)
13550 tem_type = signed_type_for (TREE_TYPE (tem));
13551 tem = fold_convert_loc (loc, tem_type, tem);
13553 else if (common == 0)
13555 tem_type = unsigned_type_for (TREE_TYPE (tem));
13556 tem = fold_convert_loc (loc, tem_type, tem);
13564 fold_convert_loc (loc, type,
13565 fold_build2_loc (loc, BIT_AND_EXPR,
13566 TREE_TYPE (tem), tem,
13567 fold_convert_loc (loc,
13572 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13573 already handled above. */
13574 if (TREE_CODE (arg0) == BIT_AND_EXPR
13575 && integer_onep (TREE_OPERAND (arg0, 1))
13576 && integer_zerop (op2)
13577 && integer_pow2p (arg1))
13579 tree tem = TREE_OPERAND (arg0, 0);
13581 if (TREE_CODE (tem) == RSHIFT_EXPR
13582 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13583 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13584 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13585 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13586 TREE_OPERAND (tem, 0), arg1);
13589 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13590 is probably obsolete because the first operand should be a
13591 truth value (that's why we have the two cases above), but let's
13592 leave it in until we can confirm this for all front-ends. */
13593 if (integer_zerop (op2)
13594 && TREE_CODE (arg0) == NE_EXPR
13595 && integer_zerop (TREE_OPERAND (arg0, 1))
13596 && integer_pow2p (arg1)
13597 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13598 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13599 arg1, OEP_ONLY_CONST))
13600 return pedantic_non_lvalue_loc (loc,
13601 fold_convert_loc (loc, type,
13602 TREE_OPERAND (arg0, 0)));
13604 /* Disable the transformations below for vectors, since
13605 fold_binary_op_with_conditional_arg may undo them immediately,
13606 yielding an infinite loop. */
13607 if (code == VEC_COND_EXPR)
13610 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13611 if (integer_zerop (op2)
13612 && truth_value_p (TREE_CODE (arg0))
13613 && truth_value_p (TREE_CODE (arg1))
13614 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13615 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13616 : TRUTH_ANDIF_EXPR,
13617 type, fold_convert_loc (loc, type, arg0), arg1);
13619 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13620 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13621 && truth_value_p (TREE_CODE (arg0))
13622 && truth_value_p (TREE_CODE (arg1))
13623 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13625 location_t loc0 = expr_location_or (arg0, loc);
13626 /* Only perform transformation if ARG0 is easily inverted. */
13627 tem = fold_invert_truthvalue (loc0, arg0);
13629 return fold_build2_loc (loc, code == VEC_COND_EXPR
13632 type, fold_convert_loc (loc, type, tem),
13636 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13637 if (integer_zerop (arg1)
13638 && truth_value_p (TREE_CODE (arg0))
13639 && truth_value_p (TREE_CODE (op2))
13640 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13642 location_t loc0 = expr_location_or (arg0, loc);
13643 /* Only perform transformation if ARG0 is easily inverted. */
13644 tem = fold_invert_truthvalue (loc0, arg0);
13646 return fold_build2_loc (loc, code == VEC_COND_EXPR
13647 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13648 type, fold_convert_loc (loc, type, tem),
13652 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13653 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13654 && truth_value_p (TREE_CODE (arg0))
13655 && truth_value_p (TREE_CODE (op2))
13656 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13657 return fold_build2_loc (loc, code == VEC_COND_EXPR
13658 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13659 type, fold_convert_loc (loc, type, arg0), op2);
13664 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13665 of fold_ternary on them. */
13666 gcc_unreachable ();
13668 case BIT_FIELD_REF:
13669 if ((TREE_CODE (arg0) == VECTOR_CST
13670 || (TREE_CODE (arg0) == CONSTRUCTOR
13671 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13672 && (type == TREE_TYPE (TREE_TYPE (arg0))
13673 || (TREE_CODE (type) == VECTOR_TYPE
13674 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13676 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13677 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13678 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13679 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13682 && (idx % width) == 0
13683 && (n % width) == 0
13684 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13689 if (TREE_CODE (arg0) == VECTOR_CST)
13692 return VECTOR_CST_ELT (arg0, idx);
13694 tree *vals = XALLOCAVEC (tree, n);
13695 for (unsigned i = 0; i < n; ++i)
13696 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13697 return build_vector (type, vals);
13700 /* Constructor elements can be subvectors. */
13701 unsigned HOST_WIDE_INT k = 1;
13702 if (CONSTRUCTOR_NELTS (arg0) != 0)
13704 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13705 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13706 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13709 /* We keep an exact subset of the constructor elements. */
13710 if ((idx % k) == 0 && (n % k) == 0)
13712 if (CONSTRUCTOR_NELTS (arg0) == 0)
13713 return build_constructor (type, NULL);
13718 if (idx < CONSTRUCTOR_NELTS (arg0))
13719 return CONSTRUCTOR_ELT (arg0, idx)->value;
13720 return build_zero_cst (type);
13723 vec<constructor_elt, va_gc> *vals;
13724 vec_alloc (vals, n);
13725 for (unsigned i = 0;
13726 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13728 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13730 (arg0, idx + i)->value);
13731 return build_constructor (type, vals);
13733 /* The bitfield references a single constructor element. */
13734 else if (idx + n <= (idx / k + 1) * k)
13736 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13737 return build_zero_cst (type);
13739 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13741 return fold_build3_loc (loc, code, type,
13742 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13743 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13748 /* A bit-field-ref that referenced the full argument can be stripped. */
13749 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13750 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13751 && integer_zerop (op2))
13752 return fold_convert_loc (loc, type, arg0);
13754 /* On constants we can use native encode/interpret to constant
13755 fold (nearly) all BIT_FIELD_REFs. */
13756 if (CONSTANT_CLASS_P (arg0)
13757 && can_native_interpret_type_p (type)
13758 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13759 /* This limitation should not be necessary, we just need to
13760 round this up to mode size. */
13761 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13762 /* Need bit-shifting of the buffer to relax the following. */
13763 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13765 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13766 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13767 unsigned HOST_WIDE_INT clen;
13768 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13769 /* ??? We cannot tell native_encode_expr to start at
13770 some random byte only. So limit us to a reasonable amount
13774 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13775 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13777 && len * BITS_PER_UNIT >= bitpos + bitsize)
13779 tree v = native_interpret_expr (type,
13780 b + bitpos / BITS_PER_UNIT,
13781 bitsize / BITS_PER_UNIT);
13791 /* For integers we can decompose the FMA if possible. */
13792 if (TREE_CODE (arg0) == INTEGER_CST
13793 && TREE_CODE (arg1) == INTEGER_CST)
13794 return fold_build2_loc (loc, PLUS_EXPR, type,
13795 const_binop (MULT_EXPR, arg0, arg1), arg2);
13796 if (integer_zerop (arg2))
13797 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13799 return fold_fma (loc, type, arg0, arg1, arg2);
13801 case VEC_PERM_EXPR:
13802 if (TREE_CODE (arg2) == VECTOR_CST)
13804 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13805 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13806 unsigned char *sel2 = sel + nelts;
13807 bool need_mask_canon = false;
13808 bool need_mask_canon2 = false;
13809 bool all_in_vec0 = true;
13810 bool all_in_vec1 = true;
13811 bool maybe_identity = true;
13812 bool single_arg = (op0 == op1);
13813 bool changed = false;
13815 mask2 = 2 * nelts - 1;
13816 mask = single_arg ? (nelts - 1) : mask2;
13817 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13818 for (i = 0; i < nelts; i++)
13820 tree val = VECTOR_CST_ELT (arg2, i);
13821 if (TREE_CODE (val) != INTEGER_CST)
13824 /* Make sure that the perm value is in an acceptable
13827 need_mask_canon |= wi::gtu_p (t, mask);
13828 need_mask_canon2 |= wi::gtu_p (t, mask2);
13829 sel[i] = t.to_uhwi () & mask;
13830 sel2[i] = t.to_uhwi () & mask2;
13832 if (sel[i] < nelts)
13833 all_in_vec1 = false;
13835 all_in_vec0 = false;
13837 if ((sel[i] & (nelts-1)) != i)
13838 maybe_identity = false;
13841 if (maybe_identity)
13851 else if (all_in_vec1)
13854 for (i = 0; i < nelts; i++)
13856 need_mask_canon = true;
13859 if ((TREE_CODE (op0) == VECTOR_CST
13860 || TREE_CODE (op0) == CONSTRUCTOR)
13861 && (TREE_CODE (op1) == VECTOR_CST
13862 || TREE_CODE (op1) == CONSTRUCTOR))
13864 tree t = fold_vec_perm (type, op0, op1, sel);
13865 if (t != NULL_TREE)
13869 if (op0 == op1 && !single_arg)
13872 /* Some targets are deficient and fail to expand a single
13873 argument permutation while still allowing an equivalent
13874 2-argument version. */
13875 if (need_mask_canon && arg2 == op2
13876 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13877 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13879 need_mask_canon = need_mask_canon2;
13883 if (need_mask_canon && arg2 == op2)
13885 tree *tsel = XALLOCAVEC (tree, nelts);
13886 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13887 for (i = 0; i < nelts; i++)
13888 tsel[i] = build_int_cst (eltype, sel[i]);
13889 op2 = build_vector (TREE_TYPE (arg2), tsel);
13894 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13900 } /* switch (code) */
13903 /* Perform constant folding and related simplification of EXPR.
13904 The related simplifications include x*1 => x, x*0 => 0, etc.,
13905 and application of the associative law.
13906 NOP_EXPR conversions may be removed freely (as long as we
13907 are careful not to change the type of the overall expression).
13908 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13909 but we can constant-fold them if they have constant operands. */
13911 #ifdef ENABLE_FOLD_CHECKING
13912 # define fold(x) fold_1 (x)
13913 static tree fold_1 (tree);
13919 const tree t = expr;
13920 enum tree_code code = TREE_CODE (t);
13921 enum tree_code_class kind = TREE_CODE_CLASS (code);
13923 location_t loc = EXPR_LOCATION (expr);
13925 /* Return right away if a constant. */
13926 if (kind == tcc_constant)
13929 /* CALL_EXPR-like objects with variable numbers of operands are
13930 treated specially. */
13931 if (kind == tcc_vl_exp)
13933 if (code == CALL_EXPR)
13935 tem = fold_call_expr (loc, expr, false);
13936 return tem ? tem : expr;
13941 if (IS_EXPR_CODE_CLASS (kind))
13943 tree type = TREE_TYPE (t);
13944 tree op0, op1, op2;
13946 switch (TREE_CODE_LENGTH (code))
13949 op0 = TREE_OPERAND (t, 0);
13950 tem = fold_unary_loc (loc, code, type, op0);
13951 return tem ? tem : expr;
13953 op0 = TREE_OPERAND (t, 0);
13954 op1 = TREE_OPERAND (t, 1);
13955 tem = fold_binary_loc (loc, code, type, op0, op1);
13956 return tem ? tem : expr;
13958 op0 = TREE_OPERAND (t, 0);
13959 op1 = TREE_OPERAND (t, 1);
13960 op2 = TREE_OPERAND (t, 2);
13961 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13962 return tem ? tem : expr;
13972 tree op0 = TREE_OPERAND (t, 0);
13973 tree op1 = TREE_OPERAND (t, 1);
13975 if (TREE_CODE (op1) == INTEGER_CST
13976 && TREE_CODE (op0) == CONSTRUCTOR
13977 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13979 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13980 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13981 unsigned HOST_WIDE_INT begin = 0;
13983 /* Find a matching index by means of a binary search. */
13984 while (begin != end)
13986 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13987 tree index = (*elts)[middle].index;
13989 if (TREE_CODE (index) == INTEGER_CST
13990 && tree_int_cst_lt (index, op1))
13991 begin = middle + 1;
13992 else if (TREE_CODE (index) == INTEGER_CST
13993 && tree_int_cst_lt (op1, index))
13995 else if (TREE_CODE (index) == RANGE_EXPR
13996 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13997 begin = middle + 1;
13998 else if (TREE_CODE (index) == RANGE_EXPR
13999 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14002 return (*elts)[middle].value;
14009 /* Return a VECTOR_CST if possible. */
14012 tree type = TREE_TYPE (t);
14013 if (TREE_CODE (type) != VECTOR_TYPE)
14016 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14017 unsigned HOST_WIDE_INT idx, pos = 0;
14020 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14022 if (!CONSTANT_CLASS_P (value))
14024 if (TREE_CODE (value) == VECTOR_CST)
14026 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14027 vec[pos++] = VECTOR_CST_ELT (value, i);
14030 vec[pos++] = value;
14032 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14033 vec[pos] = build_zero_cst (TREE_TYPE (type));
14035 return build_vector (type, vec);
14039 return fold (DECL_INITIAL (t));
14043 } /* switch (code) */
14046 #ifdef ENABLE_FOLD_CHECKING
14049 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14050 hash_table<pointer_hash<const tree_node> > *);
14051 static void fold_check_failed (const_tree, const_tree);
14052 void print_fold_checksum (const_tree);
14054 /* When --enable-checking=fold, compute a digest of expr before
14055 and after actual fold call to see if fold did not accidentally
14056 change original expr. */
14062 struct md5_ctx ctx;
14063 unsigned char checksum_before[16], checksum_after[16];
14064 hash_table<pointer_hash<const tree_node> > ht (32);
14066 md5_init_ctx (&ctx);
14067 fold_checksum_tree (expr, &ctx, &ht);
14068 md5_finish_ctx (&ctx, checksum_before);
14071 ret = fold_1 (expr);
14073 md5_init_ctx (&ctx);
14074 fold_checksum_tree (expr, &ctx, &ht);
14075 md5_finish_ctx (&ctx, checksum_after);
14077 if (memcmp (checksum_before, checksum_after, 16))
14078 fold_check_failed (expr, ret);
14084 print_fold_checksum (const_tree expr)
14086 struct md5_ctx ctx;
14087 unsigned char checksum[16], cnt;
14088 hash_table<pointer_hash<const tree_node> > ht (32);
14090 md5_init_ctx (&ctx);
14091 fold_checksum_tree (expr, &ctx, &ht);
14092 md5_finish_ctx (&ctx, checksum);
14093 for (cnt = 0; cnt < 16; ++cnt)
14094 fprintf (stderr, "%02x", checksum[cnt]);
14095 putc ('\n', stderr);
14099 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14101 internal_error ("fold check: original tree changed by fold");
14105 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14106 hash_table<pointer_hash <const tree_node> > *ht)
14108 const tree_node **slot;
14109 enum tree_code code;
14110 union tree_node buf;
14116 slot = ht->find_slot (expr, INSERT);
14120 code = TREE_CODE (expr);
14121 if (TREE_CODE_CLASS (code) == tcc_declaration
14122 && HAS_DECL_ASSEMBLER_NAME_P (expr))
14124 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14125 memcpy ((char *) &buf, expr, tree_size (expr));
14126 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14127 buf.decl_with_vis.symtab_node = NULL;
14128 expr = (tree) &buf;
14130 else if (TREE_CODE_CLASS (code) == tcc_type
14131 && (TYPE_POINTER_TO (expr)
14132 || TYPE_REFERENCE_TO (expr)
14133 || TYPE_CACHED_VALUES_P (expr)
14134 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14135 || TYPE_NEXT_VARIANT (expr)))
14137 /* Allow these fields to be modified. */
14139 memcpy ((char *) &buf, expr, tree_size (expr));
14140 expr = tmp = (tree) &buf;
14141 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14142 TYPE_POINTER_TO (tmp) = NULL;
14143 TYPE_REFERENCE_TO (tmp) = NULL;
14144 TYPE_NEXT_VARIANT (tmp) = NULL;
14145 if (TYPE_CACHED_VALUES_P (tmp))
14147 TYPE_CACHED_VALUES_P (tmp) = 0;
14148 TYPE_CACHED_VALUES (tmp) = NULL;
14151 md5_process_bytes (expr, tree_size (expr), ctx);
14152 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14153 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14154 if (TREE_CODE_CLASS (code) != tcc_type
14155 && TREE_CODE_CLASS (code) != tcc_declaration
14156 && code != TREE_LIST
14157 && code != SSA_NAME
14158 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14159 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14160 switch (TREE_CODE_CLASS (code))
14166 md5_process_bytes (TREE_STRING_POINTER (expr),
14167 TREE_STRING_LENGTH (expr), ctx);
14170 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14171 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14174 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14175 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14181 case tcc_exceptional:
14185 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14186 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14187 expr = TREE_CHAIN (expr);
14188 goto recursive_label;
14191 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14192 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14198 case tcc_expression:
14199 case tcc_reference:
14200 case tcc_comparison:
14203 case tcc_statement:
14205 len = TREE_OPERAND_LENGTH (expr);
14206 for (i = 0; i < len; ++i)
14207 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14209 case tcc_declaration:
14210 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14211 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14212 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14214 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14215 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14216 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14217 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14218 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14221 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14223 if (TREE_CODE (expr) == FUNCTION_DECL)
14225 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14226 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14228 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14232 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14233 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14234 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14235 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14236 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14237 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14238 if (INTEGRAL_TYPE_P (expr)
14239 || SCALAR_FLOAT_TYPE_P (expr))
14241 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14242 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14244 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14245 if (TREE_CODE (expr) == RECORD_TYPE
14246 || TREE_CODE (expr) == UNION_TYPE
14247 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14248 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14249 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14256 /* Helper function for outputting the checksum of a tree T. When
14257 debugging with gdb, you can "define mynext" to be "next" followed
14258 by "call debug_fold_checksum (op0)", then just trace down till the
14261 DEBUG_FUNCTION void
14262 debug_fold_checksum (const_tree t)
14265 unsigned char checksum[16];
14266 struct md5_ctx ctx;
14267 hash_table<pointer_hash<const tree_node> > ht (32);
14269 md5_init_ctx (&ctx);
14270 fold_checksum_tree (t, &ctx, &ht);
14271 md5_finish_ctx (&ctx, checksum);
14274 for (i = 0; i < 16; i++)
14275 fprintf (stderr, "%d ", checksum[i]);
14277 fprintf (stderr, "\n");
14282 /* Fold a unary tree expression with code CODE of type TYPE with an
14283 operand OP0. LOC is the location of the resulting expression.
14284 Return a folded expression if successful. Otherwise, return a tree
14285 expression with code CODE of type TYPE with an operand OP0. */
14288 fold_build1_stat_loc (location_t loc,
14289 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14292 #ifdef ENABLE_FOLD_CHECKING
14293 unsigned char checksum_before[16], checksum_after[16];
14294 struct md5_ctx ctx;
14295 hash_table<pointer_hash<const tree_node> > ht (32);
14297 md5_init_ctx (&ctx);
14298 fold_checksum_tree (op0, &ctx, &ht);
14299 md5_finish_ctx (&ctx, checksum_before);
14303 tem = fold_unary_loc (loc, code, type, op0);
14305 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14307 #ifdef ENABLE_FOLD_CHECKING
14308 md5_init_ctx (&ctx);
14309 fold_checksum_tree (op0, &ctx, &ht);
14310 md5_finish_ctx (&ctx, checksum_after);
14312 if (memcmp (checksum_before, checksum_after, 16))
14313 fold_check_failed (op0, tem);
14318 /* Fold a binary tree expression with code CODE of type TYPE with
14319 operands OP0 and OP1. LOC is the location of the resulting
14320 expression. Return a folded expression if successful. Otherwise,
14321 return a tree expression with code CODE of type TYPE with operands
14325 fold_build2_stat_loc (location_t loc,
14326 enum tree_code code, tree type, tree op0, tree op1
14330 #ifdef ENABLE_FOLD_CHECKING
14331 unsigned char checksum_before_op0[16],
14332 checksum_before_op1[16],
14333 checksum_after_op0[16],
14334 checksum_after_op1[16];
14335 struct md5_ctx ctx;
14336 hash_table<pointer_hash<const tree_node> > ht (32);
14338 md5_init_ctx (&ctx);
14339 fold_checksum_tree (op0, &ctx, &ht);
14340 md5_finish_ctx (&ctx, checksum_before_op0);
14343 md5_init_ctx (&ctx);
14344 fold_checksum_tree (op1, &ctx, &ht);
14345 md5_finish_ctx (&ctx, checksum_before_op1);
14349 tem = fold_binary_loc (loc, code, type, op0, op1);
14351 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14353 #ifdef ENABLE_FOLD_CHECKING
14354 md5_init_ctx (&ctx);
14355 fold_checksum_tree (op0, &ctx, &ht);
14356 md5_finish_ctx (&ctx, checksum_after_op0);
14359 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14360 fold_check_failed (op0, tem);
14362 md5_init_ctx (&ctx);
14363 fold_checksum_tree (op1, &ctx, &ht);
14364 md5_finish_ctx (&ctx, checksum_after_op1);
14366 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14367 fold_check_failed (op1, tem);
14372 /* Fold a ternary tree expression with code CODE of type TYPE with
14373 operands OP0, OP1, and OP2. Return a folded expression if
14374 successful. Otherwise, return a tree expression with code CODE of
14375 type TYPE with operands OP0, OP1, and OP2. */
14378 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14379 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14382 #ifdef ENABLE_FOLD_CHECKING
14383 unsigned char checksum_before_op0[16],
14384 checksum_before_op1[16],
14385 checksum_before_op2[16],
14386 checksum_after_op0[16],
14387 checksum_after_op1[16],
14388 checksum_after_op2[16];
14389 struct md5_ctx ctx;
14390 hash_table<pointer_hash<const tree_node> > ht (32);
14392 md5_init_ctx (&ctx);
14393 fold_checksum_tree (op0, &ctx, &ht);
14394 md5_finish_ctx (&ctx, checksum_before_op0);
14397 md5_init_ctx (&ctx);
14398 fold_checksum_tree (op1, &ctx, &ht);
14399 md5_finish_ctx (&ctx, checksum_before_op1);
14402 md5_init_ctx (&ctx);
14403 fold_checksum_tree (op2, &ctx, &ht);
14404 md5_finish_ctx (&ctx, checksum_before_op2);
14408 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14409 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14411 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14413 #ifdef ENABLE_FOLD_CHECKING
14414 md5_init_ctx (&ctx);
14415 fold_checksum_tree (op0, &ctx, &ht);
14416 md5_finish_ctx (&ctx, checksum_after_op0);
14419 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14420 fold_check_failed (op0, tem);
14422 md5_init_ctx (&ctx);
14423 fold_checksum_tree (op1, &ctx, &ht);
14424 md5_finish_ctx (&ctx, checksum_after_op1);
14427 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14428 fold_check_failed (op1, tem);
14430 md5_init_ctx (&ctx);
14431 fold_checksum_tree (op2, &ctx, &ht);
14432 md5_finish_ctx (&ctx, checksum_after_op2);
14434 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14435 fold_check_failed (op2, tem);
14440 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14441 arguments in ARGARRAY, and a null static chain.
14442 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14443 of type TYPE from the given operands as constructed by build_call_array. */
14446 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14447 int nargs, tree *argarray)
14450 #ifdef ENABLE_FOLD_CHECKING
14451 unsigned char checksum_before_fn[16],
14452 checksum_before_arglist[16],
14453 checksum_after_fn[16],
14454 checksum_after_arglist[16];
14455 struct md5_ctx ctx;
14456 hash_table<pointer_hash<const tree_node> > ht (32);
14459 md5_init_ctx (&ctx);
14460 fold_checksum_tree (fn, &ctx, &ht);
14461 md5_finish_ctx (&ctx, checksum_before_fn);
14464 md5_init_ctx (&ctx);
14465 for (i = 0; i < nargs; i++)
14466 fold_checksum_tree (argarray[i], &ctx, &ht);
14467 md5_finish_ctx (&ctx, checksum_before_arglist);
14471 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14473 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14475 #ifdef ENABLE_FOLD_CHECKING
14476 md5_init_ctx (&ctx);
14477 fold_checksum_tree (fn, &ctx, &ht);
14478 md5_finish_ctx (&ctx, checksum_after_fn);
14481 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14482 fold_check_failed (fn, tem);
14484 md5_init_ctx (&ctx);
14485 for (i = 0; i < nargs; i++)
14486 fold_checksum_tree (argarray[i], &ctx, &ht);
14487 md5_finish_ctx (&ctx, checksum_after_arglist);
14489 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14490 fold_check_failed (NULL_TREE, tem);
14495 /* Perform constant folding and related simplification of initializer
14496 expression EXPR. These behave identically to "fold_buildN" but ignore
14497 potential run-time traps and exceptions that fold must preserve. */
14499 #define START_FOLD_INIT \
14500 int saved_signaling_nans = flag_signaling_nans;\
14501 int saved_trapping_math = flag_trapping_math;\
14502 int saved_rounding_math = flag_rounding_math;\
14503 int saved_trapv = flag_trapv;\
14504 int saved_folding_initializer = folding_initializer;\
14505 flag_signaling_nans = 0;\
14506 flag_trapping_math = 0;\
14507 flag_rounding_math = 0;\
14509 folding_initializer = 1;
14511 #define END_FOLD_INIT \
14512 flag_signaling_nans = saved_signaling_nans;\
14513 flag_trapping_math = saved_trapping_math;\
14514 flag_rounding_math = saved_rounding_math;\
14515 flag_trapv = saved_trapv;\
14516 folding_initializer = saved_folding_initializer;
14519 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14520 tree type, tree op)
14525 result = fold_build1_loc (loc, code, type, op);
14532 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14533 tree type, tree op0, tree op1)
14538 result = fold_build2_loc (loc, code, type, op0, op1);
14545 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14546 int nargs, tree *argarray)
14551 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14557 #undef START_FOLD_INIT
14558 #undef END_FOLD_INIT
14560 /* Determine if first argument is a multiple of second argument. Return 0 if
14561 it is not, or we cannot easily determined it to be.
14563 An example of the sort of thing we care about (at this point; this routine
14564 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14565 fold cases do now) is discovering that
14567 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14573 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14575 This code also handles discovering that
14577 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14579 is a multiple of 8 so we don't have to worry about dealing with a
14580 possible remainder.
14582 Note that we *look* inside a SAVE_EXPR only to determine how it was
14583 calculated; it is not safe for fold to do much of anything else with the
14584 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14585 at run time. For example, the latter example above *cannot* be implemented
14586 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14587 evaluation time of the original SAVE_EXPR is not necessarily the same at
14588 the time the new expression is evaluated. The only optimization of this
14589 sort that would be valid is changing
14591 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14595 SAVE_EXPR (I) * SAVE_EXPR (J)
14597 (where the same SAVE_EXPR (J) is used in the original and the
14598 transformed version). */
14601 multiple_of_p (tree type, const_tree top, const_tree bottom)
14603 if (operand_equal_p (top, bottom, 0))
14606 if (TREE_CODE (type) != INTEGER_TYPE)
14609 switch (TREE_CODE (top))
14612 /* Bitwise and provides a power of two multiple. If the mask is
14613 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14614 if (!integer_pow2p (bottom))
14619 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14620 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14624 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14625 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14628 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14632 op1 = TREE_OPERAND (top, 1);
14633 /* const_binop may not detect overflow correctly,
14634 so check for it explicitly here. */
14635 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14636 && 0 != (t1 = fold_convert (type,
14637 const_binop (LSHIFT_EXPR,
14640 && !TREE_OVERFLOW (t1))
14641 return multiple_of_p (type, t1, bottom);
14646 /* Can't handle conversions from non-integral or wider integral type. */
14647 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14648 || (TYPE_PRECISION (type)
14649 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14652 /* .. fall through ... */
14655 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14658 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14659 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14662 if (TREE_CODE (bottom) != INTEGER_CST
14663 || integer_zerop (bottom)
14664 || (TYPE_UNSIGNED (type)
14665 && (tree_int_cst_sgn (top) < 0
14666 || tree_int_cst_sgn (bottom) < 0)))
14668 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14676 /* Return true if CODE or TYPE is known to be non-negative. */
14679 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14681 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14682 && truth_value_p (code))
14683 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14684 have a signed:1 type (where the value is -1 and 0). */
14689 /* Return true if (CODE OP0) is known to be non-negative. If the return
14690 value is based on the assumption that signed overflow is undefined,
14691 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14692 *STRICT_OVERFLOW_P. */
14695 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14696 bool *strict_overflow_p)
14698 if (TYPE_UNSIGNED (type))
14704 /* We can't return 1 if flag_wrapv is set because
14705 ABS_EXPR<INT_MIN> = INT_MIN. */
14706 if (!INTEGRAL_TYPE_P (type))
14708 if (TYPE_OVERFLOW_UNDEFINED (type))
14710 *strict_overflow_p = true;
14715 case NON_LVALUE_EXPR:
14717 case FIX_TRUNC_EXPR:
14718 return tree_expr_nonnegative_warnv_p (op0,
14719 strict_overflow_p);
14723 tree inner_type = TREE_TYPE (op0);
14724 tree outer_type = type;
14726 if (TREE_CODE (outer_type) == REAL_TYPE)
14728 if (TREE_CODE (inner_type) == REAL_TYPE)
14729 return tree_expr_nonnegative_warnv_p (op0,
14730 strict_overflow_p);
14731 if (INTEGRAL_TYPE_P (inner_type))
14733 if (TYPE_UNSIGNED (inner_type))
14735 return tree_expr_nonnegative_warnv_p (op0,
14736 strict_overflow_p);
14739 else if (INTEGRAL_TYPE_P (outer_type))
14741 if (TREE_CODE (inner_type) == REAL_TYPE)
14742 return tree_expr_nonnegative_warnv_p (op0,
14743 strict_overflow_p);
14744 if (INTEGRAL_TYPE_P (inner_type))
14745 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14746 && TYPE_UNSIGNED (inner_type);
14752 return tree_simple_nonnegative_warnv_p (code, type);
14755 /* We don't know sign of `t', so be conservative and return false. */
14759 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14760 value is based on the assumption that signed overflow is undefined,
14761 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14762 *STRICT_OVERFLOW_P. */
14765 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14766 tree op1, bool *strict_overflow_p)
14768 if (TYPE_UNSIGNED (type))
14773 case POINTER_PLUS_EXPR:
14775 if (FLOAT_TYPE_P (type))
14776 return (tree_expr_nonnegative_warnv_p (op0,
14778 && tree_expr_nonnegative_warnv_p (op1,
14779 strict_overflow_p));
14781 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14782 both unsigned and at least 2 bits shorter than the result. */
14783 if (TREE_CODE (type) == INTEGER_TYPE
14784 && TREE_CODE (op0) == NOP_EXPR
14785 && TREE_CODE (op1) == NOP_EXPR)
14787 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14788 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14789 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14790 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14792 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14793 TYPE_PRECISION (inner2)) + 1;
14794 return prec < TYPE_PRECISION (type);
14800 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14802 /* x * x is always non-negative for floating point x
14803 or without overflow. */
14804 if (operand_equal_p (op0, op1, 0)
14805 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14806 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14808 if (ANY_INTEGRAL_TYPE_P (type)
14809 && TYPE_OVERFLOW_UNDEFINED (type))
14810 *strict_overflow_p = true;
14815 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14816 both unsigned and their total bits is shorter than the result. */
14817 if (TREE_CODE (type) == INTEGER_TYPE
14818 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14819 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14821 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14822 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14824 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14825 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14828 bool unsigned0 = TYPE_UNSIGNED (inner0);
14829 bool unsigned1 = TYPE_UNSIGNED (inner1);
14831 if (TREE_CODE (op0) == INTEGER_CST)
14832 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14834 if (TREE_CODE (op1) == INTEGER_CST)
14835 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14837 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14838 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14840 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14841 ? tree_int_cst_min_precision (op0, UNSIGNED)
14842 : TYPE_PRECISION (inner0);
14844 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14845 ? tree_int_cst_min_precision (op1, UNSIGNED)
14846 : TYPE_PRECISION (inner1);
14848 return precision0 + precision1 < TYPE_PRECISION (type);
14855 return (tree_expr_nonnegative_warnv_p (op0,
14857 || tree_expr_nonnegative_warnv_p (op1,
14858 strict_overflow_p));
14864 case TRUNC_DIV_EXPR:
14865 case CEIL_DIV_EXPR:
14866 case FLOOR_DIV_EXPR:
14867 case ROUND_DIV_EXPR:
14868 return (tree_expr_nonnegative_warnv_p (op0,
14870 && tree_expr_nonnegative_warnv_p (op1,
14871 strict_overflow_p));
14873 case TRUNC_MOD_EXPR:
14874 case CEIL_MOD_EXPR:
14875 case FLOOR_MOD_EXPR:
14876 case ROUND_MOD_EXPR:
14877 return tree_expr_nonnegative_warnv_p (op0,
14878 strict_overflow_p);
14880 return tree_simple_nonnegative_warnv_p (code, type);
14883 /* We don't know sign of `t', so be conservative and return false. */
14887 /* Return true if T is known to be non-negative. If the return
14888 value is based on the assumption that signed overflow is undefined,
14889 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14890 *STRICT_OVERFLOW_P. */
14893 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14895 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14898 switch (TREE_CODE (t))
14901 return tree_int_cst_sgn (t) >= 0;
14904 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14907 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14910 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14912 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14913 strict_overflow_p));
14915 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14918 /* We don't know sign of `t', so be conservative and return false. */
14922 /* Return true if T is known to be non-negative. If the return
14923 value is based on the assumption that signed overflow is undefined,
14924 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14925 *STRICT_OVERFLOW_P. */
14928 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14929 tree arg0, tree arg1, bool *strict_overflow_p)
14931 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14932 switch (DECL_FUNCTION_CODE (fndecl))
14934 CASE_FLT_FN (BUILT_IN_ACOS):
14935 CASE_FLT_FN (BUILT_IN_ACOSH):
14936 CASE_FLT_FN (BUILT_IN_CABS):
14937 CASE_FLT_FN (BUILT_IN_COSH):
14938 CASE_FLT_FN (BUILT_IN_ERFC):
14939 CASE_FLT_FN (BUILT_IN_EXP):
14940 CASE_FLT_FN (BUILT_IN_EXP10):
14941 CASE_FLT_FN (BUILT_IN_EXP2):
14942 CASE_FLT_FN (BUILT_IN_FABS):
14943 CASE_FLT_FN (BUILT_IN_FDIM):
14944 CASE_FLT_FN (BUILT_IN_HYPOT):
14945 CASE_FLT_FN (BUILT_IN_POW10):
14946 CASE_INT_FN (BUILT_IN_FFS):
14947 CASE_INT_FN (BUILT_IN_PARITY):
14948 CASE_INT_FN (BUILT_IN_POPCOUNT):
14949 CASE_INT_FN (BUILT_IN_CLZ):
14950 CASE_INT_FN (BUILT_IN_CLRSB):
14951 case BUILT_IN_BSWAP32:
14952 case BUILT_IN_BSWAP64:
14956 CASE_FLT_FN (BUILT_IN_SQRT):
14957 /* sqrt(-0.0) is -0.0. */
14958 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14960 return tree_expr_nonnegative_warnv_p (arg0,
14961 strict_overflow_p);
14963 CASE_FLT_FN (BUILT_IN_ASINH):
14964 CASE_FLT_FN (BUILT_IN_ATAN):
14965 CASE_FLT_FN (BUILT_IN_ATANH):
14966 CASE_FLT_FN (BUILT_IN_CBRT):
14967 CASE_FLT_FN (BUILT_IN_CEIL):
14968 CASE_FLT_FN (BUILT_IN_ERF):
14969 CASE_FLT_FN (BUILT_IN_EXPM1):
14970 CASE_FLT_FN (BUILT_IN_FLOOR):
14971 CASE_FLT_FN (BUILT_IN_FMOD):
14972 CASE_FLT_FN (BUILT_IN_FREXP):
14973 CASE_FLT_FN (BUILT_IN_ICEIL):
14974 CASE_FLT_FN (BUILT_IN_IFLOOR):
14975 CASE_FLT_FN (BUILT_IN_IRINT):
14976 CASE_FLT_FN (BUILT_IN_IROUND):
14977 CASE_FLT_FN (BUILT_IN_LCEIL):
14978 CASE_FLT_FN (BUILT_IN_LDEXP):
14979 CASE_FLT_FN (BUILT_IN_LFLOOR):
14980 CASE_FLT_FN (BUILT_IN_LLCEIL):
14981 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14982 CASE_FLT_FN (BUILT_IN_LLRINT):
14983 CASE_FLT_FN (BUILT_IN_LLROUND):
14984 CASE_FLT_FN (BUILT_IN_LRINT):
14985 CASE_FLT_FN (BUILT_IN_LROUND):
14986 CASE_FLT_FN (BUILT_IN_MODF):
14987 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14988 CASE_FLT_FN (BUILT_IN_RINT):
14989 CASE_FLT_FN (BUILT_IN_ROUND):
14990 CASE_FLT_FN (BUILT_IN_SCALB):
14991 CASE_FLT_FN (BUILT_IN_SCALBLN):
14992 CASE_FLT_FN (BUILT_IN_SCALBN):
14993 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14994 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14995 CASE_FLT_FN (BUILT_IN_SINH):
14996 CASE_FLT_FN (BUILT_IN_TANH):
14997 CASE_FLT_FN (BUILT_IN_TRUNC):
14998 /* True if the 1st argument is nonnegative. */
14999 return tree_expr_nonnegative_warnv_p (arg0,
15000 strict_overflow_p);
15002 CASE_FLT_FN (BUILT_IN_FMAX):
15003 /* True if the 1st OR 2nd arguments are nonnegative. */
15004 return (tree_expr_nonnegative_warnv_p (arg0,
15006 || (tree_expr_nonnegative_warnv_p (arg1,
15007 strict_overflow_p)));
15009 CASE_FLT_FN (BUILT_IN_FMIN):
15010 /* True if the 1st AND 2nd arguments are nonnegative. */
15011 return (tree_expr_nonnegative_warnv_p (arg0,
15013 && (tree_expr_nonnegative_warnv_p (arg1,
15014 strict_overflow_p)));
15016 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15017 /* True if the 2nd argument is nonnegative. */
15018 return tree_expr_nonnegative_warnv_p (arg1,
15019 strict_overflow_p);
15021 CASE_FLT_FN (BUILT_IN_POWI):
15022 /* True if the 1st argument is nonnegative or the second
15023 argument is an even integer. */
15024 if (TREE_CODE (arg1) == INTEGER_CST
15025 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15027 return tree_expr_nonnegative_warnv_p (arg0,
15028 strict_overflow_p);
15030 CASE_FLT_FN (BUILT_IN_POW):
15031 /* True if the 1st argument is nonnegative or the second
15032 argument is an even integer valued real. */
15033 if (TREE_CODE (arg1) == REAL_CST)
15038 c = TREE_REAL_CST (arg1);
15039 n = real_to_integer (&c);
15042 REAL_VALUE_TYPE cint;
15043 real_from_integer (&cint, VOIDmode, n, SIGNED);
15044 if (real_identical (&c, &cint))
15048 return tree_expr_nonnegative_warnv_p (arg0,
15049 strict_overflow_p);
15054 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15058 /* Return true if T is known to be non-negative. If the return
15059 value is based on the assumption that signed overflow is undefined,
15060 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15061 *STRICT_OVERFLOW_P. */
15064 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15066 enum tree_code code = TREE_CODE (t);
15067 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15074 tree temp = TARGET_EXPR_SLOT (t);
15075 t = TARGET_EXPR_INITIAL (t);
15077 /* If the initializer is non-void, then it's a normal expression
15078 that will be assigned to the slot. */
15079 if (!VOID_TYPE_P (t))
15080 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15082 /* Otherwise, the initializer sets the slot in some way. One common
15083 way is an assignment statement at the end of the initializer. */
15086 if (TREE_CODE (t) == BIND_EXPR)
15087 t = expr_last (BIND_EXPR_BODY (t));
15088 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15089 || TREE_CODE (t) == TRY_CATCH_EXPR)
15090 t = expr_last (TREE_OPERAND (t, 0));
15091 else if (TREE_CODE (t) == STATEMENT_LIST)
15096 if (TREE_CODE (t) == MODIFY_EXPR
15097 && TREE_OPERAND (t, 0) == temp)
15098 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15099 strict_overflow_p);
15106 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15107 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15109 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15110 get_callee_fndecl (t),
15113 strict_overflow_p);
15115 case COMPOUND_EXPR:
15117 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15118 strict_overflow_p);
15120 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15121 strict_overflow_p);
15123 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15124 strict_overflow_p);
15127 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15131 /* We don't know sign of `t', so be conservative and return false. */
15135 /* Return true if T is known to be non-negative. If the return
15136 value is based on the assumption that signed overflow is undefined,
15137 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15138 *STRICT_OVERFLOW_P. */
15141 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15143 enum tree_code code;
15144 if (t == error_mark_node)
15147 code = TREE_CODE (t);
15148 switch (TREE_CODE_CLASS (code))
15151 case tcc_comparison:
15152 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15154 TREE_OPERAND (t, 0),
15155 TREE_OPERAND (t, 1),
15156 strict_overflow_p);
15159 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15161 TREE_OPERAND (t, 0),
15162 strict_overflow_p);
15165 case tcc_declaration:
15166 case tcc_reference:
15167 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15175 case TRUTH_AND_EXPR:
15176 case TRUTH_OR_EXPR:
15177 case TRUTH_XOR_EXPR:
15178 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15180 TREE_OPERAND (t, 0),
15181 TREE_OPERAND (t, 1),
15182 strict_overflow_p);
15183 case TRUTH_NOT_EXPR:
15184 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15186 TREE_OPERAND (t, 0),
15187 strict_overflow_p);
15194 case WITH_SIZE_EXPR:
15196 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15199 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15203 /* Return true if `t' is known to be non-negative. Handle warnings
15204 about undefined signed overflow. */
15207 tree_expr_nonnegative_p (tree t)
15209 bool ret, strict_overflow_p;
15211 strict_overflow_p = false;
15212 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15213 if (strict_overflow_p)
15214 fold_overflow_warning (("assuming signed overflow does not occur when "
15215 "determining that expression is always "
15217 WARN_STRICT_OVERFLOW_MISC);
15222 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15223 For floating point we further ensure that T is not denormal.
15224 Similar logic is present in nonzero_address in rtlanal.h.
15226 If the return value is based on the assumption that signed overflow
15227 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15228 change *STRICT_OVERFLOW_P. */
15231 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15232 bool *strict_overflow_p)
15237 return tree_expr_nonzero_warnv_p (op0,
15238 strict_overflow_p);
15242 tree inner_type = TREE_TYPE (op0);
15243 tree outer_type = type;
15245 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15246 && tree_expr_nonzero_warnv_p (op0,
15247 strict_overflow_p));
15251 case NON_LVALUE_EXPR:
15252 return tree_expr_nonzero_warnv_p (op0,
15253 strict_overflow_p);
15262 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15263 For floating point we further ensure that T is not denormal.
15264 Similar logic is present in nonzero_address in rtlanal.h.
15266 If the return value is based on the assumption that signed overflow
15267 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15268 change *STRICT_OVERFLOW_P. */
15271 tree_binary_nonzero_warnv_p (enum tree_code code,
15274 tree op1, bool *strict_overflow_p)
15276 bool sub_strict_overflow_p;
15279 case POINTER_PLUS_EXPR:
15281 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15283 /* With the presence of negative values it is hard
15284 to say something. */
15285 sub_strict_overflow_p = false;
15286 if (!tree_expr_nonnegative_warnv_p (op0,
15287 &sub_strict_overflow_p)
15288 || !tree_expr_nonnegative_warnv_p (op1,
15289 &sub_strict_overflow_p))
15291 /* One of operands must be positive and the other non-negative. */
15292 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15293 overflows, on a twos-complement machine the sum of two
15294 nonnegative numbers can never be zero. */
15295 return (tree_expr_nonzero_warnv_p (op0,
15297 || tree_expr_nonzero_warnv_p (op1,
15298 strict_overflow_p));
15303 if (TYPE_OVERFLOW_UNDEFINED (type))
15305 if (tree_expr_nonzero_warnv_p (op0,
15307 && tree_expr_nonzero_warnv_p (op1,
15308 strict_overflow_p))
15310 *strict_overflow_p = true;
15317 sub_strict_overflow_p = false;
15318 if (tree_expr_nonzero_warnv_p (op0,
15319 &sub_strict_overflow_p)
15320 && tree_expr_nonzero_warnv_p (op1,
15321 &sub_strict_overflow_p))
15323 if (sub_strict_overflow_p)
15324 *strict_overflow_p = true;
15329 sub_strict_overflow_p = false;
15330 if (tree_expr_nonzero_warnv_p (op0,
15331 &sub_strict_overflow_p))
15333 if (sub_strict_overflow_p)
15334 *strict_overflow_p = true;
15336 /* When both operands are nonzero, then MAX must be too. */
15337 if (tree_expr_nonzero_warnv_p (op1,
15338 strict_overflow_p))
15341 /* MAX where operand 0 is positive is positive. */
15342 return tree_expr_nonnegative_warnv_p (op0,
15343 strict_overflow_p);
15345 /* MAX where operand 1 is positive is positive. */
15346 else if (tree_expr_nonzero_warnv_p (op1,
15347 &sub_strict_overflow_p)
15348 && tree_expr_nonnegative_warnv_p (op1,
15349 &sub_strict_overflow_p))
15351 if (sub_strict_overflow_p)
15352 *strict_overflow_p = true;
15358 return (tree_expr_nonzero_warnv_p (op1,
15360 || tree_expr_nonzero_warnv_p (op0,
15361 strict_overflow_p));
15370 /* Return true when T is an address and is known to be nonzero.
15371 For floating point we further ensure that T is not denormal.
15372 Similar logic is present in nonzero_address in rtlanal.h.
15374 If the return value is based on the assumption that signed overflow
15375 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15376 change *STRICT_OVERFLOW_P. */
15379 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15381 bool sub_strict_overflow_p;
15382 switch (TREE_CODE (t))
15385 return !integer_zerop (t);
15389 tree base = TREE_OPERAND (t, 0);
15391 if (!DECL_P (base))
15392 base = get_base_address (base);
15397 /* For objects in symbol table check if we know they are non-zero.
15398 Don't do anything for variables and functions before symtab is built;
15399 it is quite possible that they will be declared weak later. */
15400 if (DECL_P (base) && decl_in_symtab_p (base))
15402 struct symtab_node *symbol;
15404 symbol = symtab_node::get_create (base);
15406 return symbol->nonzero_address ();
15411 /* Function local objects are never NULL. */
15413 && (DECL_CONTEXT (base)
15414 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15415 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15418 /* Constants are never weak. */
15419 if (CONSTANT_CLASS_P (base))
15426 sub_strict_overflow_p = false;
15427 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15428 &sub_strict_overflow_p)
15429 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15430 &sub_strict_overflow_p))
15432 if (sub_strict_overflow_p)
15433 *strict_overflow_p = true;
15444 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15445 attempt to fold the expression to a constant without modifying TYPE,
15448 If the expression could be simplified to a constant, then return
15449 the constant. If the expression would not be simplified to a
15450 constant, then return NULL_TREE. */
15453 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15455 tree tem = fold_binary (code, type, op0, op1);
15456 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15459 /* Given the components of a unary expression CODE, TYPE and OP0,
15460 attempt to fold the expression to a constant without modifying
15463 If the expression could be simplified to a constant, then return
15464 the constant. If the expression would not be simplified to a
15465 constant, then return NULL_TREE. */
15468 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15470 tree tem = fold_unary (code, type, op0);
15471 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15474 /* If EXP represents referencing an element in a constant string
15475 (either via pointer arithmetic or array indexing), return the
15476 tree representing the value accessed, otherwise return NULL. */
15479 fold_read_from_constant_string (tree exp)
15481 if ((TREE_CODE (exp) == INDIRECT_REF
15482 || TREE_CODE (exp) == ARRAY_REF)
15483 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15485 tree exp1 = TREE_OPERAND (exp, 0);
15488 location_t loc = EXPR_LOCATION (exp);
15490 if (TREE_CODE (exp) == INDIRECT_REF)
15491 string = string_constant (exp1, &index);
15494 tree low_bound = array_ref_low_bound (exp);
15495 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15497 /* Optimize the special-case of a zero lower bound.
15499 We convert the low_bound to sizetype to avoid some problems
15500 with constant folding. (E.g. suppose the lower bound is 1,
15501 and its mode is QI. Without the conversion,l (ARRAY
15502 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15503 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15504 if (! integer_zerop (low_bound))
15505 index = size_diffop_loc (loc, index,
15506 fold_convert_loc (loc, sizetype, low_bound));
15512 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15513 && TREE_CODE (string) == STRING_CST
15514 && TREE_CODE (index) == INTEGER_CST
15515 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15516 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15518 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15519 return build_int_cst_type (TREE_TYPE (exp),
15520 (TREE_STRING_POINTER (string)
15521 [TREE_INT_CST_LOW (index)]));
15526 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15527 an integer constant, real, or fixed-point constant.
15529 TYPE is the type of the result. */
15532 fold_negate_const (tree arg0, tree type)
15534 tree t = NULL_TREE;
15536 switch (TREE_CODE (arg0))
15541 wide_int val = wi::neg (arg0, &overflow);
15542 t = force_fit_type (type, val, 1,
15543 (overflow | TREE_OVERFLOW (arg0))
15544 && !TYPE_UNSIGNED (type));
15549 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15554 FIXED_VALUE_TYPE f;
15555 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15556 &(TREE_FIXED_CST (arg0)), NULL,
15557 TYPE_SATURATING (type));
15558 t = build_fixed (type, f);
15559 /* Propagate overflow flags. */
15560 if (overflow_p | TREE_OVERFLOW (arg0))
15561 TREE_OVERFLOW (t) = 1;
15566 gcc_unreachable ();
15572 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15573 an integer constant or real constant.
15575 TYPE is the type of the result. */
15578 fold_abs_const (tree arg0, tree type)
15580 tree t = NULL_TREE;
15582 switch (TREE_CODE (arg0))
15586 /* If the value is unsigned or non-negative, then the absolute value
15587 is the same as the ordinary value. */
15588 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15591 /* If the value is negative, then the absolute value is
15596 wide_int val = wi::neg (arg0, &overflow);
15597 t = force_fit_type (type, val, -1,
15598 overflow | TREE_OVERFLOW (arg0));
15604 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15605 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15611 gcc_unreachable ();
15617 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15618 constant. TYPE is the type of the result. */
15621 fold_not_const (const_tree arg0, tree type)
15623 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15625 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15628 /* Given CODE, a relational operator, the target type, TYPE and two
15629 constant operands OP0 and OP1, return the result of the
15630 relational operation. If the result is not a compile time
15631 constant, then return NULL_TREE. */
15634 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15636 int result, invert;
15638 /* From here on, the only cases we handle are when the result is
15639 known to be a constant. */
15641 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15643 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15644 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15646 /* Handle the cases where either operand is a NaN. */
15647 if (real_isnan (c0) || real_isnan (c1))
15657 case UNORDERED_EXPR:
15671 if (flag_trapping_math)
15677 gcc_unreachable ();
15680 return constant_boolean_node (result, type);
15683 return constant_boolean_node (real_compare (code, c0, c1), type);
15686 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15688 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15689 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15690 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15693 /* Handle equality/inequality of complex constants. */
15694 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15696 tree rcond = fold_relational_const (code, type,
15697 TREE_REALPART (op0),
15698 TREE_REALPART (op1));
15699 tree icond = fold_relational_const (code, type,
15700 TREE_IMAGPART (op0),
15701 TREE_IMAGPART (op1));
15702 if (code == EQ_EXPR)
15703 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15704 else if (code == NE_EXPR)
15705 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15710 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15712 unsigned count = VECTOR_CST_NELTS (op0);
15713 tree *elts = XALLOCAVEC (tree, count);
15714 gcc_assert (VECTOR_CST_NELTS (op1) == count
15715 && TYPE_VECTOR_SUBPARTS (type) == count);
15717 for (unsigned i = 0; i < count; i++)
15719 tree elem_type = TREE_TYPE (type);
15720 tree elem0 = VECTOR_CST_ELT (op0, i);
15721 tree elem1 = VECTOR_CST_ELT (op1, i);
15723 tree tem = fold_relational_const (code, elem_type,
15726 if (tem == NULL_TREE)
15729 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15732 return build_vector (type, elts);
15735 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15737 To compute GT, swap the arguments and do LT.
15738 To compute GE, do LT and invert the result.
15739 To compute LE, swap the arguments, do LT and invert the result.
15740 To compute NE, do EQ and invert the result.
15742 Therefore, the code below must handle only EQ and LT. */
15744 if (code == LE_EXPR || code == GT_EXPR)
15749 code = swap_tree_comparison (code);
15752 /* Note that it is safe to invert for real values here because we
15753 have already handled the one case that it matters. */
15756 if (code == NE_EXPR || code == GE_EXPR)
15759 code = invert_tree_comparison (code, false);
15762 /* Compute a result for LT or EQ if args permit;
15763 Otherwise return T. */
15764 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15766 if (code == EQ_EXPR)
15767 result = tree_int_cst_equal (op0, op1);
15769 result = tree_int_cst_lt (op0, op1);
15776 return constant_boolean_node (result, type);
15779 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15780 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15784 fold_build_cleanup_point_expr (tree type, tree expr)
15786 /* If the expression does not have side effects then we don't have to wrap
15787 it with a cleanup point expression. */
15788 if (!TREE_SIDE_EFFECTS (expr))
15791 /* If the expression is a return, check to see if the expression inside the
15792 return has no side effects or the right hand side of the modify expression
15793 inside the return. If either don't have side effects set we don't need to
15794 wrap the expression in a cleanup point expression. Note we don't check the
15795 left hand side of the modify because it should always be a return decl. */
15796 if (TREE_CODE (expr) == RETURN_EXPR)
15798 tree op = TREE_OPERAND (expr, 0);
15799 if (!op || !TREE_SIDE_EFFECTS (op))
15801 op = TREE_OPERAND (op, 1);
15802 if (!TREE_SIDE_EFFECTS (op))
15806 return build1 (CLEANUP_POINT_EXPR, type, expr);
15809 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15810 of an indirection through OP0, or NULL_TREE if no simplification is
15814 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15820 subtype = TREE_TYPE (sub);
15821 if (!POINTER_TYPE_P (subtype))
15824 if (TREE_CODE (sub) == ADDR_EXPR)
15826 tree op = TREE_OPERAND (sub, 0);
15827 tree optype = TREE_TYPE (op);
15828 /* *&CONST_DECL -> to the value of the const decl. */
15829 if (TREE_CODE (op) == CONST_DECL)
15830 return DECL_INITIAL (op);
15831 /* *&p => p; make sure to handle *&"str"[cst] here. */
15832 if (type == optype)
15834 tree fop = fold_read_from_constant_string (op);
15840 /* *(foo *)&fooarray => fooarray[0] */
15841 else if (TREE_CODE (optype) == ARRAY_TYPE
15842 && type == TREE_TYPE (optype)
15843 && (!in_gimple_form
15844 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15846 tree type_domain = TYPE_DOMAIN (optype);
15847 tree min_val = size_zero_node;
15848 if (type_domain && TYPE_MIN_VALUE (type_domain))
15849 min_val = TYPE_MIN_VALUE (type_domain);
15851 && TREE_CODE (min_val) != INTEGER_CST)
15853 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15854 NULL_TREE, NULL_TREE);
15856 /* *(foo *)&complexfoo => __real__ complexfoo */
15857 else if (TREE_CODE (optype) == COMPLEX_TYPE
15858 && type == TREE_TYPE (optype))
15859 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15860 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15861 else if (TREE_CODE (optype) == VECTOR_TYPE
15862 && type == TREE_TYPE (optype))
15864 tree part_width = TYPE_SIZE (type);
15865 tree index = bitsize_int (0);
15866 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15870 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15871 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15873 tree op00 = TREE_OPERAND (sub, 0);
15874 tree op01 = TREE_OPERAND (sub, 1);
15877 if (TREE_CODE (op00) == ADDR_EXPR)
15880 op00 = TREE_OPERAND (op00, 0);
15881 op00type = TREE_TYPE (op00);
15883 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15884 if (TREE_CODE (op00type) == VECTOR_TYPE
15885 && type == TREE_TYPE (op00type))
15887 HOST_WIDE_INT offset = tree_to_shwi (op01);
15888 tree part_width = TYPE_SIZE (type);
15889 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15890 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15891 tree index = bitsize_int (indexi);
15893 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15894 return fold_build3_loc (loc,
15895 BIT_FIELD_REF, type, op00,
15896 part_width, index);
15899 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15900 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15901 && type == TREE_TYPE (op00type))
15903 tree size = TYPE_SIZE_UNIT (type);
15904 if (tree_int_cst_equal (size, op01))
15905 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15907 /* ((foo *)&fooarray)[1] => fooarray[1] */
15908 else if (TREE_CODE (op00type) == ARRAY_TYPE
15909 && type == TREE_TYPE (op00type))
15911 tree type_domain = TYPE_DOMAIN (op00type);
15912 tree min_val = size_zero_node;
15913 if (type_domain && TYPE_MIN_VALUE (type_domain))
15914 min_val = TYPE_MIN_VALUE (type_domain);
15915 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15916 TYPE_SIZE_UNIT (type));
15917 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15918 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15919 NULL_TREE, NULL_TREE);
15924 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15925 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15926 && type == TREE_TYPE (TREE_TYPE (subtype))
15927 && (!in_gimple_form
15928 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15931 tree min_val = size_zero_node;
15932 sub = build_fold_indirect_ref_loc (loc, sub);
15933 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15934 if (type_domain && TYPE_MIN_VALUE (type_domain))
15935 min_val = TYPE_MIN_VALUE (type_domain);
15937 && TREE_CODE (min_val) != INTEGER_CST)
15939 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15946 /* Builds an expression for an indirection through T, simplifying some
15950 build_fold_indirect_ref_loc (location_t loc, tree t)
15952 tree type = TREE_TYPE (TREE_TYPE (t));
15953 tree sub = fold_indirect_ref_1 (loc, type, t);
15958 return build1_loc (loc, INDIRECT_REF, type, t);
15961 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15964 fold_indirect_ref_loc (location_t loc, tree t)
15966 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15974 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15975 whose result is ignored. The type of the returned tree need not be
15976 the same as the original expression. */
15979 fold_ignored_result (tree t)
15981 if (!TREE_SIDE_EFFECTS (t))
15982 return integer_zero_node;
15985 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15988 t = TREE_OPERAND (t, 0);
15992 case tcc_comparison:
15993 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15994 t = TREE_OPERAND (t, 0);
15995 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15996 t = TREE_OPERAND (t, 1);
16001 case tcc_expression:
16002 switch (TREE_CODE (t))
16004 case COMPOUND_EXPR:
16005 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16007 t = TREE_OPERAND (t, 0);
16011 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16012 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16014 t = TREE_OPERAND (t, 0);
16027 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16030 round_up_loc (location_t loc, tree value, unsigned int divisor)
16032 tree div = NULL_TREE;
16037 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16038 have to do anything. Only do this when we are not given a const,
16039 because in that case, this check is more expensive than just
16041 if (TREE_CODE (value) != INTEGER_CST)
16043 div = build_int_cst (TREE_TYPE (value), divisor);
16045 if (multiple_of_p (TREE_TYPE (value), value, div))
16049 /* If divisor is a power of two, simplify this to bit manipulation. */
16050 if (divisor == (divisor & -divisor))
16052 if (TREE_CODE (value) == INTEGER_CST)
16054 wide_int val = value;
16057 if ((val & (divisor - 1)) == 0)
16060 overflow_p = TREE_OVERFLOW (value);
16061 val += divisor - 1;
16062 val &= - (int) divisor;
16066 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16072 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16073 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16074 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16075 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16081 div = build_int_cst (TREE_TYPE (value), divisor);
16082 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16083 value = size_binop_loc (loc, MULT_EXPR, value, div);
16089 /* Likewise, but round down. */
16092 round_down_loc (location_t loc, tree value, int divisor)
16094 tree div = NULL_TREE;
16096 gcc_assert (divisor > 0);
16100 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16101 have to do anything. Only do this when we are not given a const,
16102 because in that case, this check is more expensive than just
16104 if (TREE_CODE (value) != INTEGER_CST)
16106 div = build_int_cst (TREE_TYPE (value), divisor);
16108 if (multiple_of_p (TREE_TYPE (value), value, div))
16112 /* If divisor is a power of two, simplify this to bit manipulation. */
16113 if (divisor == (divisor & -divisor))
16117 t = build_int_cst (TREE_TYPE (value), -divisor);
16118 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16123 div = build_int_cst (TREE_TYPE (value), divisor);
16124 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16125 value = size_binop_loc (loc, MULT_EXPR, value, div);
16131 /* Returns the pointer to the base of the object addressed by EXP and
16132 extracts the information about the offset of the access, storing it
16133 to PBITPOS and POFFSET. */
16136 split_address_to_core_and_offset (tree exp,
16137 HOST_WIDE_INT *pbitpos, tree *poffset)
16141 int unsignedp, volatilep;
16142 HOST_WIDE_INT bitsize;
16143 location_t loc = EXPR_LOCATION (exp);
16145 if (TREE_CODE (exp) == ADDR_EXPR)
16147 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16148 poffset, &mode, &unsignedp, &volatilep,
16150 core = build_fold_addr_expr_loc (loc, core);
16156 *poffset = NULL_TREE;
16162 /* Returns true if addresses of E1 and E2 differ by a constant, false
16163 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16166 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16169 HOST_WIDE_INT bitpos1, bitpos2;
16170 tree toffset1, toffset2, tdiff, type;
16172 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16173 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16175 if (bitpos1 % BITS_PER_UNIT != 0
16176 || bitpos2 % BITS_PER_UNIT != 0
16177 || !operand_equal_p (core1, core2, 0))
16180 if (toffset1 && toffset2)
16182 type = TREE_TYPE (toffset1);
16183 if (type != TREE_TYPE (toffset2))
16184 toffset2 = fold_convert (type, toffset2);
16186 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16187 if (!cst_and_fits_in_hwi (tdiff))
16190 *diff = int_cst_value (tdiff);
16192 else if (toffset1 || toffset2)
16194 /* If only one of the offsets is non-constant, the difference cannot
16201 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16205 /* Simplify the floating point expression EXP when the sign of the
16206 result is not significant. Return NULL_TREE if no simplification
16210 fold_strip_sign_ops (tree exp)
16213 location_t loc = EXPR_LOCATION (exp);
16215 switch (TREE_CODE (exp))
16219 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16220 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16224 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16226 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16227 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16228 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16229 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16230 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16231 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16234 case COMPOUND_EXPR:
16235 arg0 = TREE_OPERAND (exp, 0);
16236 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16238 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16242 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16243 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16245 return fold_build3_loc (loc,
16246 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16247 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16248 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16253 const enum built_in_function fcode = builtin_mathfn_code (exp);
16256 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16257 /* Strip copysign function call, return the 1st argument. */
16258 arg0 = CALL_EXPR_ARG (exp, 0);
16259 arg1 = CALL_EXPR_ARG (exp, 1);
16260 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16263 /* Strip sign ops from the argument of "odd" math functions. */
16264 if (negate_mathfn_p (fcode))
16266 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16268 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16281 /* Return OFF converted to a pointer offset type suitable as offset for
16282 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16284 convert_to_ptrofftype_loc (location_t loc, tree off)
16286 return fold_convert_loc (loc, sizetype, off);
16289 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16291 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16293 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16294 ptr, convert_to_ptrofftype_loc (loc, off));
16297 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16299 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16301 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16302 ptr, size_int (off));