1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t *const_val;
230 static void canonicalize_float_value (prop_value_t *);
232 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
235 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
237 switch (val.lattice_val)
240 fprintf (outf, "%sUNINITIALIZED", prefix);
243 fprintf (outf, "%sUNDEFINED", prefix);
246 fprintf (outf, "%sVARYING", prefix);
249 fprintf (outf, "%sCONSTANT ", prefix);
250 print_generic_expr (outf, val.value, dump_flags);
258 /* Print lattice value VAL to stderr. */
260 void debug_lattice_value (prop_value_t val);
263 debug_lattice_value (prop_value_t val)
265 dump_lattice_value (stderr, "", val);
266 fprintf (stderr, "\n");
271 /* If SYM is a constant variable with known value, return the value.
272 NULL_TREE is returned otherwise. */
275 get_symbol_constant_value (tree sym)
277 if (TREE_STATIC (sym)
278 && TREE_READONLY (sym)
281 tree val = DECL_INITIAL (sym);
284 STRIP_USELESS_TYPE_CONVERSION (val);
285 if (is_gimple_min_invariant (val))
288 /* Variables declared 'const' without an initializer
289 have zero as the initializer if they may not be
290 overridden at link or run time. */
292 && !DECL_EXTERNAL (sym)
293 && targetm.binds_local_p (sym)
294 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
295 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
296 return fold_convert (TREE_TYPE (sym), integer_zero_node);
302 /* Compute a default value for variable VAR and store it in the
303 CONST_VAL array. The following rules are used to get default
306 1- Global and static variables that are declared constant are
309 2- Any other value is considered UNDEFINED. This is useful when
310 considering PHI nodes. PHI arguments that are undefined do not
311 change the constant value of the PHI node, which allows for more
312 constants to be propagated.
314 3- Variables defined by statements other than assignments and PHI
315 nodes are considered VARYING.
317 4- Initial values of variables that are not GIMPLE registers are
318 considered VARYING. */
321 get_default_value (tree var)
323 tree sym = SSA_NAME_VAR (var);
324 prop_value_t val = { UNINITIALIZED, NULL_TREE };
327 if (!is_gimple_reg (var))
329 /* Short circuit for regular CCP. We are not interested in any
330 non-register when DO_STORE_CCP is false. */
331 val.lattice_val = VARYING;
333 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
335 /* Globals and static variables declared 'const' take their
337 val.lattice_val = CONSTANT;
342 gimple stmt = SSA_NAME_DEF_STMT (var);
344 if (gimple_nop_p (stmt))
346 /* Variables defined by an empty statement are those used
347 before being initialized. If VAR is a local variable, we
348 can assume initially that it is UNDEFINED, otherwise we must
349 consider it VARYING. */
350 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
351 val.lattice_val = UNDEFINED;
353 val.lattice_val = VARYING;
355 else if (is_gimple_assign (stmt)
356 /* Value-returning GIMPLE_CALL statements assign to
357 a variable, and are treated similarly to GIMPLE_ASSIGN. */
358 || (is_gimple_call (stmt)
359 && gimple_call_lhs (stmt) != NULL_TREE)
360 || gimple_code (stmt) == GIMPLE_PHI)
362 /* Any other variable defined by an assignment or a PHI node
363 is considered UNDEFINED. */
364 val.lattice_val = UNDEFINED;
368 /* Otherwise, VAR will never take on a constant value. */
369 val.lattice_val = VARYING;
377 /* Get the constant value associated with variable VAR. */
379 static inline prop_value_t *
384 if (const_val == NULL)
387 val = &const_val[SSA_NAME_VERSION (var)];
388 if (val->lattice_val == UNINITIALIZED)
389 *val = get_default_value (var);
391 canonicalize_float_value (val);
396 /* Sets the value associated with VAR to VARYING. */
399 set_value_varying (tree var)
401 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
403 val->lattice_val = VARYING;
404 val->value = NULL_TREE;
407 /* For float types, modify the value of VAL to make ccp work correctly
408 for non-standard values (-0, NaN):
410 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
411 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
412 This is to fix the following problem (see PR 29921): Suppose we have
416 and we set value of y to NaN. This causes value of x to be set to NaN.
417 When we later determine that y is in fact VARYING, fold uses the fact
418 that HONOR_NANS is false, and we try to change the value of x to 0,
419 causing an ICE. With HONOR_NANS being false, the real appearance of
420 NaN would cause undefined behavior, though, so claiming that y (and x)
421 are UNDEFINED initially is correct. */
424 canonicalize_float_value (prop_value_t *val)
426 enum machine_mode mode;
430 if (val->lattice_val != CONSTANT
431 || TREE_CODE (val->value) != REAL_CST)
434 d = TREE_REAL_CST (val->value);
435 type = TREE_TYPE (val->value);
436 mode = TYPE_MODE (type);
438 if (!HONOR_SIGNED_ZEROS (mode)
439 && REAL_VALUE_MINUS_ZERO (d))
441 val->value = build_real (type, dconst0);
445 if (!HONOR_NANS (mode)
446 && REAL_VALUE_ISNAN (d))
448 val->lattice_val = UNDEFINED;
454 /* Set the value for variable VAR to NEW_VAL. Return true if the new
455 value is different from VAR's previous value. */
458 set_lattice_value (tree var, prop_value_t new_val)
460 prop_value_t *old_val = get_value (var);
462 canonicalize_float_value (&new_val);
464 /* Lattice transitions must always be monotonically increasing in
465 value. If *OLD_VAL and NEW_VAL are the same, return false to
466 inform the caller that this was a non-transition. */
468 gcc_assert (old_val->lattice_val < new_val.lattice_val
469 || (old_val->lattice_val == new_val.lattice_val
470 && ((!old_val->value && !new_val.value)
471 || operand_equal_p (old_val->value, new_val.value, 0))));
473 if (old_val->lattice_val != new_val.lattice_val)
475 if (dump_file && (dump_flags & TDF_DETAILS))
477 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
478 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
483 gcc_assert (new_val.lattice_val != UNDEFINED);
491 /* Return the likely CCP lattice value for STMT.
493 If STMT has no operands, then return CONSTANT.
495 Else if undefinedness of operands of STMT cause its value to be
496 undefined, then return UNDEFINED.
498 Else if any operands of STMT are constants, then return CONSTANT.
500 Else return VARYING. */
503 likely_value (gimple stmt)
505 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
510 enum gimple_code code = gimple_code (stmt);
512 /* This function appears to be called only for assignments, calls,
513 conditionals, and switches, due to the logic in visit_stmt. */
514 gcc_assert (code == GIMPLE_ASSIGN
515 || code == GIMPLE_CALL
516 || code == GIMPLE_COND
517 || code == GIMPLE_SWITCH);
519 /* If the statement has volatile operands, it won't fold to a
521 if (gimple_has_volatile_ops (stmt))
524 /* If we are not doing store-ccp, statements with loads
525 and/or stores will never fold into a constant. */
526 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
529 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
530 is_gimple_min_invariant, so we do not consider calls or
531 other forms of assignment. */
532 if (gimple_assign_single_p (stmt)
533 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
536 if (code == GIMPLE_COND
537 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
538 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
541 if (code == GIMPLE_SWITCH
542 && is_gimple_min_invariant (gimple_switch_index (stmt)))
545 /* Arrive here for more complex cases. */
547 has_constant_operand = false;
548 has_undefined_operand = false;
549 all_undefined_operands = true;
550 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
552 prop_value_t *val = get_value (use);
554 if (val->lattice_val == UNDEFINED)
555 has_undefined_operand = true;
557 all_undefined_operands = false;
559 if (val->lattice_val == CONSTANT)
560 has_constant_operand = true;
563 /* There may be constants in regular rhs operands. For calls we
564 have to ignore lhs, fndecl and static chain, otherwise only
566 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
567 i < gimple_num_ops (stmt); ++i)
569 tree op = gimple_op (stmt, i);
570 if (!op || TREE_CODE (op) == SSA_NAME)
572 if (is_gimple_min_invariant (op))
573 has_constant_operand = true;
576 if (has_constant_operand)
577 all_undefined_operands = false;
579 /* If the operation combines operands like COMPLEX_EXPR make sure to
580 not mark the result UNDEFINED if only one part of the result is
582 if (has_undefined_operand && all_undefined_operands)
584 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
586 switch (gimple_assign_rhs_code (stmt))
588 /* Unary operators are handled with all_undefined_operands. */
591 case POINTER_PLUS_EXPR:
592 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
593 Not bitwise operators, one VARYING operand may specify the
594 result completely. Not logical operators for the same reason.
595 Not COMPLEX_EXPR as one VARYING operand makes the result partly
596 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
597 the undefined operand may be promoted. */
604 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
605 fall back to VARYING even if there were CONSTANT operands. */
606 if (has_undefined_operand)
609 if (has_constant_operand
610 /* We do not consider virtual operands here -- load from read-only
611 memory may have only VARYING virtual operands, but still be
613 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
619 /* Returns true if STMT cannot be constant. */
622 surely_varying_stmt_p (gimple stmt)
624 /* If the statement has operands that we cannot handle, it cannot be
626 if (gimple_has_volatile_ops (stmt))
629 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
632 /* If it is a call and does not return a value or is not a
633 builtin and not an indirect call, it is varying. */
634 if (is_gimple_call (stmt))
637 if (!gimple_call_lhs (stmt)
638 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
639 && !DECL_BUILT_IN (fndecl)))
643 /* Anything other than assignments and conditional jumps are not
644 interesting for CCP. */
645 if (gimple_code (stmt) != GIMPLE_ASSIGN
646 && gimple_code (stmt) != GIMPLE_COND
647 && gimple_code (stmt) != GIMPLE_SWITCH
648 && gimple_code (stmt) != GIMPLE_CALL)
654 /* Initialize local data structures for CCP. */
657 ccp_initialize (void)
661 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
663 /* Initialize simulation flags for PHI nodes and statements. */
666 gimple_stmt_iterator i;
668 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
670 gimple stmt = gsi_stmt (i);
671 bool is_varying = surely_varying_stmt_p (stmt);
678 /* If the statement will not produce a constant, mark
679 all its outputs VARYING. */
680 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
683 set_value_varying (def);
686 prop_set_simulate_again (stmt, !is_varying);
690 /* Now process PHI nodes. We never clear the simulate_again flag on
691 phi nodes, since we do not know which edges are executable yet,
692 except for phi nodes for virtual operands when we do not do store ccp. */
695 gimple_stmt_iterator i;
697 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
699 gimple phi = gsi_stmt (i);
701 if (!is_gimple_reg (gimple_phi_result (phi)))
702 prop_set_simulate_again (phi, false);
704 prop_set_simulate_again (phi, true);
710 /* Do final substitution of propagated values, cleanup the flowgraph and
711 free allocated storage.
713 Return TRUE when something was optimized. */
718 /* Perform substitutions based on the known constant values. */
719 bool something_changed = substitute_and_fold (const_val, false);
723 return something_changed;;
727 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
730 any M UNDEFINED = any
731 any M VARYING = VARYING
732 Ci M Cj = Ci if (i == j)
733 Ci M Cj = VARYING if (i != j)
737 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
739 if (val1->lattice_val == UNDEFINED)
741 /* UNDEFINED M any = any */
744 else if (val2->lattice_val == UNDEFINED)
746 /* any M UNDEFINED = any
747 Nothing to do. VAL1 already contains the value we want. */
750 else if (val1->lattice_val == VARYING
751 || val2->lattice_val == VARYING)
753 /* any M VARYING = VARYING. */
754 val1->lattice_val = VARYING;
755 val1->value = NULL_TREE;
757 else if (val1->lattice_val == CONSTANT
758 && val2->lattice_val == CONSTANT
759 && simple_cst_equal (val1->value, val2->value) == 1)
761 /* Ci M Cj = Ci if (i == j)
762 Ci M Cj = VARYING if (i != j)
764 If these two values come from memory stores, make sure that
765 they come from the same memory reference. */
766 val1->lattice_val = CONSTANT;
767 val1->value = val1->value;
771 /* Any other combination is VARYING. */
772 val1->lattice_val = VARYING;
773 val1->value = NULL_TREE;
778 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
779 lattice values to determine PHI_NODE's lattice value. The value of a
780 PHI node is determined calling ccp_lattice_meet with all the arguments
781 of the PHI node that are incoming via executable edges. */
783 static enum ssa_prop_result
784 ccp_visit_phi_node (gimple phi)
787 prop_value_t *old_val, new_val;
789 if (dump_file && (dump_flags & TDF_DETAILS))
791 fprintf (dump_file, "\nVisiting PHI node: ");
792 print_gimple_stmt (dump_file, phi, 0, dump_flags);
795 old_val = get_value (gimple_phi_result (phi));
796 switch (old_val->lattice_val)
799 return SSA_PROP_VARYING;
806 new_val.lattice_val = UNDEFINED;
807 new_val.value = NULL_TREE;
814 for (i = 0; i < gimple_phi_num_args (phi); i++)
816 /* Compute the meet operator over all the PHI arguments flowing
817 through executable edges. */
818 edge e = gimple_phi_arg_edge (phi, i);
820 if (dump_file && (dump_flags & TDF_DETAILS))
823 "\n Argument #%d (%d -> %d %sexecutable)\n",
824 i, e->src->index, e->dest->index,
825 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
828 /* If the incoming edge is executable, Compute the meet operator for
829 the existing value of the PHI node and the current PHI argument. */
830 if (e->flags & EDGE_EXECUTABLE)
832 tree arg = gimple_phi_arg (phi, i)->def;
833 prop_value_t arg_val;
835 if (is_gimple_min_invariant (arg))
837 arg_val.lattice_val = CONSTANT;
841 arg_val = *(get_value (arg));
843 ccp_lattice_meet (&new_val, &arg_val);
845 if (dump_file && (dump_flags & TDF_DETAILS))
847 fprintf (dump_file, "\t");
848 print_generic_expr (dump_file, arg, dump_flags);
849 dump_lattice_value (dump_file, "\tValue: ", arg_val);
850 fprintf (dump_file, "\n");
853 if (new_val.lattice_val == VARYING)
858 if (dump_file && (dump_flags & TDF_DETAILS))
860 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
861 fprintf (dump_file, "\n\n");
864 /* Make the transition to the new value. */
865 if (set_lattice_value (gimple_phi_result (phi), new_val))
867 if (new_val.lattice_val == VARYING)
868 return SSA_PROP_VARYING;
870 return SSA_PROP_INTERESTING;
873 return SSA_PROP_NOT_INTERESTING;
876 /* Return true if we may propagate the address expression ADDR into the
877 dereference DEREF and cancel them. */
880 may_propagate_address_into_dereference (tree addr, tree deref)
882 gcc_assert (INDIRECT_REF_P (deref)
883 && TREE_CODE (addr) == ADDR_EXPR);
885 /* Don't propagate if ADDR's operand has incomplete type. */
886 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
889 /* If the address is invariant then we do not need to preserve restrict
890 qualifications. But we do need to preserve volatile qualifiers until
891 we can annotate the folded dereference itself properly. */
892 if (is_gimple_min_invariant (addr)
893 && (!TREE_THIS_VOLATILE (deref)
894 || TYPE_VOLATILE (TREE_TYPE (addr))))
895 return useless_type_conversion_p (TREE_TYPE (deref),
896 TREE_TYPE (TREE_OPERAND (addr, 0)));
898 /* Else both the address substitution and the folding must result in
899 a valid useless type conversion sequence. */
900 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
902 && useless_type_conversion_p (TREE_TYPE (deref),
903 TREE_TYPE (TREE_OPERAND (addr, 0))));
906 /* CCP specific front-end to the non-destructive constant folding
909 Attempt to simplify the RHS of STMT knowing that one or more
910 operands are constants.
912 If simplification is possible, return the simplified RHS,
913 otherwise return the original RHS or NULL_TREE. */
916 ccp_fold (gimple stmt)
918 switch (gimple_code (stmt))
922 enum tree_code subcode = gimple_assign_rhs_code (stmt);
924 switch (get_gimple_rhs_class (subcode))
926 case GIMPLE_SINGLE_RHS:
928 tree rhs = gimple_assign_rhs1 (stmt);
929 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
931 if (TREE_CODE (rhs) == SSA_NAME)
933 /* If the RHS is an SSA_NAME, return its known constant value,
935 return get_value (rhs)->value;
937 /* Handle propagating invariant addresses into address operations.
938 The folding we do here matches that in tree-ssa-forwprop.c. */
939 else if (TREE_CODE (rhs) == ADDR_EXPR)
942 base = &TREE_OPERAND (rhs, 0);
943 while (handled_component_p (*base))
944 base = &TREE_OPERAND (*base, 0);
945 if (TREE_CODE (*base) == INDIRECT_REF
946 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
948 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
949 if (val->lattice_val == CONSTANT
950 && TREE_CODE (val->value) == ADDR_EXPR
951 && may_propagate_address_into_dereference
954 /* We need to return a new tree, not modify the IL
955 or share parts of it. So play some tricks to
956 avoid manually building it. */
957 tree ret, save = *base;
958 *base = TREE_OPERAND (val->value, 0);
959 ret = unshare_expr (rhs);
960 recompute_tree_invariant_for_addr_expr (ret);
967 if (kind == tcc_reference)
969 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
970 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
972 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
973 if (val->lattice_val == CONSTANT)
974 return fold_unary (VIEW_CONVERT_EXPR,
975 TREE_TYPE (rhs), val->value);
977 return fold_const_aggregate_ref (rhs);
979 else if (kind == tcc_declaration)
980 return get_symbol_constant_value (rhs);
984 case GIMPLE_UNARY_RHS:
986 /* Handle unary operators that can appear in GIMPLE form.
987 Note that we know the single operand must be a constant,
988 so this should almost always return a simplified RHS. */
989 tree lhs = gimple_assign_lhs (stmt);
990 tree op0 = gimple_assign_rhs1 (stmt);
992 /* Simplify the operand down to a constant. */
993 if (TREE_CODE (op0) == SSA_NAME)
995 prop_value_t *val = get_value (op0);
996 if (val->lattice_val == CONSTANT)
997 op0 = get_value (op0)->value;
1000 /* Conversions are useless for CCP purposes if they are
1001 value-preserving. Thus the restrictions that
1002 useless_type_conversion_p places for pointer type conversions
1003 do not apply here. Substitution later will only substitute to
1005 if (CONVERT_EXPR_CODE_P (subcode)
1006 && POINTER_TYPE_P (TREE_TYPE (lhs))
1007 && POINTER_TYPE_P (TREE_TYPE (op0))
1008 /* Do not allow differences in volatile qualification
1009 as this might get us confused as to whether a
1010 propagation destination statement is volatile
1011 or not. See PR36988. */
1012 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1013 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1016 /* Still try to generate a constant of correct type. */
1017 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1019 && ((tem = maybe_fold_offset_to_address
1020 (op0, integer_zero_node, TREE_TYPE (lhs)))
1026 return fold_unary_ignore_overflow (subcode,
1027 gimple_expr_type (stmt), op0);
1030 case GIMPLE_BINARY_RHS:
1032 /* Handle binary operators that can appear in GIMPLE form. */
1033 tree op0 = gimple_assign_rhs1 (stmt);
1034 tree op1 = gimple_assign_rhs2 (stmt);
1036 /* Simplify the operands down to constants when appropriate. */
1037 if (TREE_CODE (op0) == SSA_NAME)
1039 prop_value_t *val = get_value (op0);
1040 if (val->lattice_val == CONSTANT)
1044 if (TREE_CODE (op1) == SSA_NAME)
1046 prop_value_t *val = get_value (op1);
1047 if (val->lattice_val == CONSTANT)
1051 /* Fold &foo + CST into an invariant reference if possible. */
1052 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1053 && TREE_CODE (op0) == ADDR_EXPR
1054 && TREE_CODE (op1) == INTEGER_CST)
1056 tree lhs = gimple_assign_lhs (stmt);
1057 tree tem = maybe_fold_offset_to_address (op0, op1,
1059 if (tem != NULL_TREE)
1063 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1074 tree fn = gimple_call_fn (stmt);
1077 if (TREE_CODE (fn) == SSA_NAME)
1079 val = get_value (fn);
1080 if (val->lattice_val == CONSTANT)
1083 if (TREE_CODE (fn) == ADDR_EXPR
1084 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1085 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1087 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1090 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1092 args[i] = gimple_call_arg (stmt, i);
1093 if (TREE_CODE (args[i]) == SSA_NAME)
1095 val = get_value (args[i]);
1096 if (val->lattice_val == CONSTANT)
1097 args[i] = val->value;
1100 call = build_call_array (gimple_call_return_type (stmt),
1101 fn, gimple_call_num_args (stmt), args);
1102 retval = fold_call_expr (call, false);
1104 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1105 STRIP_NOPS (retval);
1113 /* Handle comparison operators that can appear in GIMPLE form. */
1114 tree op0 = gimple_cond_lhs (stmt);
1115 tree op1 = gimple_cond_rhs (stmt);
1116 enum tree_code code = gimple_cond_code (stmt);
1118 /* Simplify the operands down to constants when appropriate. */
1119 if (TREE_CODE (op0) == SSA_NAME)
1121 prop_value_t *val = get_value (op0);
1122 if (val->lattice_val == CONSTANT)
1126 if (TREE_CODE (op1) == SSA_NAME)
1128 prop_value_t *val = get_value (op1);
1129 if (val->lattice_val == CONSTANT)
1133 return fold_binary (code, boolean_type_node, op0, op1);
1138 tree rhs = gimple_switch_index (stmt);
1140 if (TREE_CODE (rhs) == SSA_NAME)
1142 /* If the RHS is an SSA_NAME, return its known constant value,
1144 return get_value (rhs)->value;
1156 /* Return the tree representing the element referenced by T if T is an
1157 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1158 NULL_TREE otherwise. */
1161 fold_const_aggregate_ref (tree t)
1163 prop_value_t *value;
1164 tree base, ctor, idx, field;
1165 unsigned HOST_WIDE_INT cnt;
1168 switch (TREE_CODE (t))
1171 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1172 DECL_INITIAL. If BASE is a nested reference into another
1173 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1174 the inner reference. */
1175 base = TREE_OPERAND (t, 0);
1176 switch (TREE_CODE (base))
1179 if (!TREE_READONLY (base)
1180 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1181 || !targetm.binds_local_p (base))
1184 ctor = DECL_INITIAL (base);
1189 ctor = fold_const_aggregate_ref (base);
1201 if (ctor == NULL_TREE
1202 || (TREE_CODE (ctor) != CONSTRUCTOR
1203 && TREE_CODE (ctor) != STRING_CST)
1204 || !TREE_STATIC (ctor))
1207 /* Get the index. If we have an SSA_NAME, try to resolve it
1208 with the current lattice value for the SSA_NAME. */
1209 idx = TREE_OPERAND (t, 1);
1210 switch (TREE_CODE (idx))
1213 if ((value = get_value (idx))
1214 && value->lattice_val == CONSTANT
1215 && TREE_CODE (value->value) == INTEGER_CST)
1228 /* Fold read from constant string. */
1229 if (TREE_CODE (ctor) == STRING_CST)
1231 if ((TYPE_MODE (TREE_TYPE (t))
1232 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1233 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1235 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1236 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1237 return build_int_cst_type (TREE_TYPE (t),
1238 (TREE_STRING_POINTER (ctor)
1239 [TREE_INT_CST_LOW (idx)]));
1243 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1244 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1245 if (tree_int_cst_equal (cfield, idx))
1247 STRIP_USELESS_TYPE_CONVERSION (cval);
1253 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1254 DECL_INITIAL. If BASE is a nested reference into another
1255 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1256 the inner reference. */
1257 base = TREE_OPERAND (t, 0);
1258 switch (TREE_CODE (base))
1261 if (!TREE_READONLY (base)
1262 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1263 || !targetm.binds_local_p (base))
1266 ctor = DECL_INITIAL (base);
1271 ctor = fold_const_aggregate_ref (base);
1278 if (ctor == NULL_TREE
1279 || TREE_CODE (ctor) != CONSTRUCTOR
1280 || !TREE_STATIC (ctor))
1283 field = TREE_OPERAND (t, 1);
1285 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1287 /* FIXME: Handle bit-fields. */
1288 && ! DECL_BIT_FIELD (cfield))
1290 STRIP_USELESS_TYPE_CONVERSION (cval);
1298 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1299 if (c && TREE_CODE (c) == COMPLEX_CST)
1300 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1306 tree base = TREE_OPERAND (t, 0);
1307 if (TREE_CODE (base) == SSA_NAME
1308 && (value = get_value (base))
1309 && value->lattice_val == CONSTANT
1310 && TREE_CODE (value->value) == ADDR_EXPR
1311 && useless_type_conversion_p (TREE_TYPE (t),
1312 TREE_TYPE (TREE_TYPE (value->value))))
1313 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1324 /* Evaluate statement STMT.
1325 Valid only for assignments, calls, conditionals, and switches. */
1328 evaluate_stmt (gimple stmt)
1331 tree simplified = NULL_TREE;
1332 ccp_lattice_t likelyvalue = likely_value (stmt);
1335 fold_defer_overflow_warnings ();
1337 /* If the statement is likely to have a CONSTANT result, then try
1338 to fold the statement to determine the constant value. */
1339 /* FIXME. This is the only place that we call ccp_fold.
1340 Since likely_value never returns CONSTANT for calls, we will
1341 not attempt to fold them, including builtins that may profit. */
1342 if (likelyvalue == CONSTANT)
1343 simplified = ccp_fold (stmt);
1344 /* If the statement is likely to have a VARYING result, then do not
1345 bother folding the statement. */
1346 else if (likelyvalue == VARYING)
1348 enum gimple_code code = gimple_code (stmt);
1349 if (code == GIMPLE_ASSIGN)
1351 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1353 /* Other cases cannot satisfy is_gimple_min_invariant
1355 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1356 simplified = gimple_assign_rhs1 (stmt);
1358 else if (code == GIMPLE_SWITCH)
1359 simplified = gimple_switch_index (stmt);
1361 /* These cannot satisfy is_gimple_min_invariant without folding. */
1362 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1365 is_constant = simplified && is_gimple_min_invariant (simplified);
1367 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1369 if (dump_file && (dump_flags & TDF_DETAILS))
1371 fprintf (dump_file, "which is likely ");
1372 switch (likelyvalue)
1375 fprintf (dump_file, "CONSTANT");
1378 fprintf (dump_file, "UNDEFINED");
1381 fprintf (dump_file, "VARYING");
1385 fprintf (dump_file, "\n");
1390 /* The statement produced a constant value. */
1391 val.lattice_val = CONSTANT;
1392 val.value = simplified;
1396 /* The statement produced a nonconstant value. If the statement
1397 had UNDEFINED operands, then the result of the statement
1398 should be UNDEFINED. Otherwise, the statement is VARYING. */
1399 if (likelyvalue == UNDEFINED)
1400 val.lattice_val = likelyvalue;
1402 val.lattice_val = VARYING;
1404 val.value = NULL_TREE;
1410 /* Visit the assignment statement STMT. Set the value of its LHS to the
1411 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1412 creates virtual definitions, set the value of each new name to that
1413 of the RHS (if we can derive a constant out of the RHS).
1414 Value-returning call statements also perform an assignment, and
1415 are handled here. */
1417 static enum ssa_prop_result
1418 visit_assignment (gimple stmt, tree *output_p)
1421 enum ssa_prop_result retval;
1423 tree lhs = gimple_get_lhs (stmt);
1425 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1426 || gimple_call_lhs (stmt) != NULL_TREE);
1428 if (gimple_assign_copy_p (stmt))
1430 tree rhs = gimple_assign_rhs1 (stmt);
1432 if (TREE_CODE (rhs) == SSA_NAME)
1434 /* For a simple copy operation, we copy the lattice values. */
1435 prop_value_t *nval = get_value (rhs);
1439 val = evaluate_stmt (stmt);
1442 /* Evaluate the statement, which could be
1443 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1444 val = evaluate_stmt (stmt);
1446 retval = SSA_PROP_NOT_INTERESTING;
1448 /* Set the lattice value of the statement's output. */
1449 if (TREE_CODE (lhs) == SSA_NAME)
1451 /* If STMT is an assignment to an SSA_NAME, we only have one
1453 if (set_lattice_value (lhs, val))
1456 if (val.lattice_val == VARYING)
1457 retval = SSA_PROP_VARYING;
1459 retval = SSA_PROP_INTERESTING;
1467 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1468 if it can determine which edge will be taken. Otherwise, return
1469 SSA_PROP_VARYING. */
1471 static enum ssa_prop_result
1472 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1477 block = gimple_bb (stmt);
1478 val = evaluate_stmt (stmt);
1480 /* Find which edge out of the conditional block will be taken and add it
1481 to the worklist. If no single edge can be determined statically,
1482 return SSA_PROP_VARYING to feed all the outgoing edges to the
1483 propagation engine. */
1484 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1486 return SSA_PROP_INTERESTING;
1488 return SSA_PROP_VARYING;
1492 /* Evaluate statement STMT. If the statement produces an output value and
1493 its evaluation changes the lattice value of its output, return
1494 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1497 If STMT is a conditional branch and we can determine its truth
1498 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1499 value, return SSA_PROP_VARYING. */
1501 static enum ssa_prop_result
1502 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1507 if (dump_file && (dump_flags & TDF_DETAILS))
1509 fprintf (dump_file, "\nVisiting statement:\n");
1510 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1513 switch (gimple_code (stmt))
1516 /* If the statement is an assignment that produces a single
1517 output value, evaluate its RHS to see if the lattice value of
1518 its output has changed. */
1519 return visit_assignment (stmt, output_p);
1522 /* A value-returning call also performs an assignment. */
1523 if (gimple_call_lhs (stmt) != NULL_TREE)
1524 return visit_assignment (stmt, output_p);
1529 /* If STMT is a conditional branch, see if we can determine
1530 which branch will be taken. */
1531 /* FIXME. It appears that we should be able to optimize
1532 computed GOTOs here as well. */
1533 return visit_cond_stmt (stmt, taken_edge_p);
1539 /* Any other kind of statement is not interesting for constant
1540 propagation and, therefore, not worth simulating. */
1541 if (dump_file && (dump_flags & TDF_DETAILS))
1542 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1544 /* Definitions made by statements other than assignments to
1545 SSA_NAMEs represent unknown modifications to their outputs.
1546 Mark them VARYING. */
1547 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1549 prop_value_t v = { VARYING, NULL_TREE };
1550 set_lattice_value (def, v);
1553 return SSA_PROP_VARYING;
1557 /* Main entry point for SSA Conditional Constant Propagation. */
1563 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1564 if (ccp_finalize ())
1565 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1574 return flag_tree_ccp != 0;
1578 struct gimple_opt_pass pass_ccp =
1583 gate_ccp, /* gate */
1584 do_ssa_ccp, /* execute */
1587 0, /* static_pass_number */
1588 TV_TREE_CCP, /* tv_id */
1589 PROP_cfg | PROP_ssa, /* properties_required */
1590 0, /* properties_provided */
1591 0, /* properties_destroyed */
1592 0, /* todo_flags_start */
1593 TODO_dump_func | TODO_verify_ssa
1594 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1599 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1600 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1601 is the desired result type. */
1604 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1605 bool allow_negative_idx)
1607 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1608 tree array_type, elt_type, elt_size;
1611 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1612 measured in units of the size of elements type) from that ARRAY_REF).
1613 We can't do anything if either is variable.
1615 The case we handle here is *(&A[N]+O). */
1616 if (TREE_CODE (base) == ARRAY_REF)
1618 tree low_bound = array_ref_low_bound (base);
1620 elt_offset = TREE_OPERAND (base, 1);
1621 if (TREE_CODE (low_bound) != INTEGER_CST
1622 || TREE_CODE (elt_offset) != INTEGER_CST)
1625 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1626 base = TREE_OPERAND (base, 0);
1629 /* Ignore stupid user tricks of indexing non-array variables. */
1630 array_type = TREE_TYPE (base);
1631 if (TREE_CODE (array_type) != ARRAY_TYPE)
1633 elt_type = TREE_TYPE (array_type);
1634 if (!useless_type_conversion_p (orig_type, elt_type))
1637 /* Use signed size type for intermediate computation on the index. */
1638 idx_type = signed_type_for (size_type_node);
1640 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1641 element type (so we can use the alignment if it's not constant).
1642 Otherwise, compute the offset as an index by using a division. If the
1643 division isn't exact, then don't do anything. */
1644 elt_size = TYPE_SIZE_UNIT (elt_type);
1647 if (integer_zerop (offset))
1649 if (TREE_CODE (elt_size) != INTEGER_CST)
1650 elt_size = size_int (TYPE_ALIGN (elt_type));
1652 idx = build_int_cst (idx_type, 0);
1656 unsigned HOST_WIDE_INT lquo, lrem;
1657 HOST_WIDE_INT hquo, hrem;
1660 /* The final array offset should be signed, so we need
1661 to sign-extend the (possibly pointer) offset here
1662 and use signed division. */
1663 soffset = double_int_sext (tree_to_double_int (offset),
1664 TYPE_PRECISION (TREE_TYPE (offset)));
1665 if (TREE_CODE (elt_size) != INTEGER_CST
1666 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1667 soffset.low, soffset.high,
1668 TREE_INT_CST_LOW (elt_size),
1669 TREE_INT_CST_HIGH (elt_size),
1670 &lquo, &hquo, &lrem, &hrem)
1674 idx = build_int_cst_wide (idx_type, lquo, hquo);
1677 /* Assume the low bound is zero. If there is a domain type, get the
1678 low bound, if any, convert the index into that type, and add the
1680 min_idx = build_int_cst (idx_type, 0);
1681 domain_type = TYPE_DOMAIN (array_type);
1684 idx_type = domain_type;
1685 if (TYPE_MIN_VALUE (idx_type))
1686 min_idx = TYPE_MIN_VALUE (idx_type);
1688 min_idx = fold_convert (idx_type, min_idx);
1690 if (TREE_CODE (min_idx) != INTEGER_CST)
1693 elt_offset = fold_convert (idx_type, elt_offset);
1696 if (!integer_zerop (min_idx))
1697 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1698 if (!integer_zerop (elt_offset))
1699 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1701 /* Make sure to possibly truncate late after offsetting. */
1702 idx = fold_convert (idx_type, idx);
1704 /* We don't want to construct access past array bounds. For example
1707 should not be simplified into (*c)[14] or tree-vrp will
1708 give false warnings. The same is true for
1709 struct A { long x; char d[0]; } *a;
1711 which should be not folded to &a->d[-8]. */
1713 && TYPE_MAX_VALUE (domain_type)
1714 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1716 tree up_bound = TYPE_MAX_VALUE (domain_type);
1718 if (tree_int_cst_lt (up_bound, idx)
1719 /* Accesses after the end of arrays of size 0 (gcc
1720 extension) and 1 are likely intentional ("struct
1722 && compare_tree_int (up_bound, 1) > 0)
1726 && TYPE_MIN_VALUE (domain_type))
1728 if (!allow_negative_idx
1729 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1730 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1733 else if (!allow_negative_idx
1734 && compare_tree_int (idx, 0) < 0)
1737 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1741 /* Attempt to fold *(S+O) to S.X.
1742 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1743 is the desired result type. */
1746 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1749 tree f, t, field_type, tail_array_field, field_offset;
1753 if (TREE_CODE (record_type) != RECORD_TYPE
1754 && TREE_CODE (record_type) != UNION_TYPE
1755 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1758 /* Short-circuit silly cases. */
1759 if (useless_type_conversion_p (record_type, orig_type))
1762 tail_array_field = NULL_TREE;
1763 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1767 if (TREE_CODE (f) != FIELD_DECL)
1769 if (DECL_BIT_FIELD (f))
1772 if (!DECL_FIELD_OFFSET (f))
1774 field_offset = byte_position (f);
1775 if (TREE_CODE (field_offset) != INTEGER_CST)
1778 /* ??? Java creates "interesting" fields for representing base classes.
1779 They have no name, and have no context. With no context, we get into
1780 trouble with nonoverlapping_component_refs_p. Skip them. */
1781 if (!DECL_FIELD_CONTEXT (f))
1784 /* The previous array field isn't at the end. */
1785 tail_array_field = NULL_TREE;
1787 /* Check to see if this offset overlaps with the field. */
1788 cmp = tree_int_cst_compare (field_offset, offset);
1792 field_type = TREE_TYPE (f);
1794 /* Here we exactly match the offset being checked. If the types match,
1795 then we can return that field. */
1797 && useless_type_conversion_p (orig_type, field_type))
1799 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1803 /* Don't care about offsets into the middle of scalars. */
1804 if (!AGGREGATE_TYPE_P (field_type))
1807 /* Check for array at the end of the struct. This is often
1808 used as for flexible array members. We should be able to
1809 turn this into an array access anyway. */
1810 if (TREE_CODE (field_type) == ARRAY_TYPE)
1811 tail_array_field = f;
1813 /* Check the end of the field against the offset. */
1814 if (!DECL_SIZE_UNIT (f)
1815 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1817 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1818 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1821 /* If we matched, then set offset to the displacement into
1823 new_base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1825 /* Recurse to possibly find the match. */
1826 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1827 f == TYPE_FIELDS (record_type));
1830 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1836 if (!tail_array_field)
1839 f = tail_array_field;
1840 field_type = TREE_TYPE (f);
1841 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1843 /* If we get here, we've got an aggregate field, and a possibly
1844 nonzero offset into them. Recurse and hope for a valid match. */
1845 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1847 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1848 f == TYPE_FIELDS (record_type));
1851 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1855 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1856 or BASE[index] or by combination of those.
1858 Before attempting the conversion strip off existing ADDR_EXPRs and
1859 handled component refs. */
1862 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1868 if (TREE_CODE (base) != ADDR_EXPR)
1871 base = TREE_OPERAND (base, 0);
1873 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1874 so it needs to be removed and new COMPONENT_REF constructed.
1875 The wrong COMPONENT_REF are often constructed by folding the
1876 (type *)&object within the expression (type *)&object+offset */
1877 if (handled_component_p (base))
1879 HOST_WIDE_INT sub_offset, size, maxsize;
1881 newbase = get_ref_base_and_extent (base, &sub_offset,
1883 gcc_assert (newbase);
1886 && !(sub_offset & (BITS_PER_UNIT - 1)))
1890 offset = int_const_binop (PLUS_EXPR, offset,
1891 build_int_cst (TREE_TYPE (offset),
1892 sub_offset / BITS_PER_UNIT), 1);
1895 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1896 && integer_zerop (offset))
1898 type = TREE_TYPE (base);
1900 ret = maybe_fold_offset_to_component_ref (type, base, offset, orig_type);
1902 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1907 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1908 or &BASE[index] or by combination of those.
1910 Before attempting the conversion strip off existing component refs. */
1913 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1917 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1918 && POINTER_TYPE_P (orig_type));
1920 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1926 /* For __builtin_object_size to function correctly we need to
1927 make sure not to fold address arithmetic so that we change
1928 reference from one array to another. This would happen for
1931 struct X { char s1[10]; char s2[10] } s;
1932 char *foo (void) { return &s.s2[-4]; }
1934 where we need to avoid generating &s.s1[6]. As the C and
1935 C++ frontends create different initial trees
1936 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1937 sophisticated comparisons here. Note that checking for the
1938 condition after the fact is easier than trying to avoid doing
1941 if (TREE_CODE (orig) == ADDR_EXPR)
1942 orig = TREE_OPERAND (orig, 0);
1943 if ((TREE_CODE (orig) == ARRAY_REF
1944 || (TREE_CODE (orig) == COMPONENT_REF
1945 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1946 && (TREE_CODE (t) == ARRAY_REF
1947 || TREE_CODE (t) == COMPONENT_REF)
1948 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1949 ? TREE_OPERAND (orig, 0) : orig,
1950 TREE_CODE (t) == ARRAY_REF
1951 ? TREE_OPERAND (t, 0) : t, 0))
1954 ptr_type = build_pointer_type (TREE_TYPE (t));
1955 if (!useless_type_conversion_p (orig_type, ptr_type))
1957 return build_fold_addr_expr_with_type (t, ptr_type);
1963 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1964 Return the simplified expression, or NULL if nothing could be done. */
1967 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1970 bool volatile_p = TREE_THIS_VOLATILE (expr);
1972 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1973 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1974 are sometimes added. */
1976 STRIP_TYPE_NOPS (base);
1977 TREE_OPERAND (expr, 0) = base;
1979 /* One possibility is that the address reduces to a string constant. */
1980 t = fold_read_from_constant_string (expr);
1984 /* Add in any offset from a POINTER_PLUS_EXPR. */
1985 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
1989 offset2 = TREE_OPERAND (base, 1);
1990 if (TREE_CODE (offset2) != INTEGER_CST)
1992 base = TREE_OPERAND (base, 0);
1994 offset = fold_convert (sizetype,
1995 int_const_binop (PLUS_EXPR, offset, offset2, 1));
1998 if (TREE_CODE (base) == ADDR_EXPR)
2000 tree base_addr = base;
2002 /* Strip the ADDR_EXPR. */
2003 base = TREE_OPERAND (base, 0);
2005 /* Fold away CONST_DECL to its value, if the type is scalar. */
2006 if (TREE_CODE (base) == CONST_DECL
2007 && is_gimple_min_invariant (DECL_INITIAL (base)))
2008 return DECL_INITIAL (base);
2010 /* Try folding *(&B+O) to B.X. */
2011 t = maybe_fold_offset_to_reference (base_addr, offset,
2015 /* Preserve volatileness of the original expression.
2016 We can end up with a plain decl here which is shared
2017 and we shouldn't mess with its flags. */
2019 TREE_THIS_VOLATILE (t) = volatile_p;
2025 /* We can get here for out-of-range string constant accesses,
2026 such as "_"[3]. Bail out of the entire substitution search
2027 and arrange for the entire statement to be replaced by a
2028 call to __builtin_trap. In all likelihood this will all be
2029 constant-folded away, but in the meantime we can't leave with
2030 something that get_expr_operands can't understand. */
2034 if (TREE_CODE (t) == ADDR_EXPR
2035 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2037 /* FIXME: Except that this causes problems elsewhere with dead
2038 code not being deleted, and we die in the rtl expanders
2039 because we failed to remove some ssa_name. In the meantime,
2040 just return zero. */
2041 /* FIXME2: This condition should be signaled by
2042 fold_read_from_constant_string directly, rather than
2043 re-checking for it here. */
2044 return integer_zero_node;
2047 /* Try folding *(B+O) to B->X. Still an improvement. */
2048 if (POINTER_TYPE_P (TREE_TYPE (base)))
2050 t = maybe_fold_offset_to_reference (base, offset,
2057 /* Otherwise we had an offset that we could not simplify. */
2062 /* A quaint feature extant in our address arithmetic is that there
2063 can be hidden type changes here. The type of the result need
2064 not be the same as the type of the input pointer.
2066 What we're after here is an expression of the form
2067 (T *)(&array + const)
2068 where array is OP0, const is OP1, RES_TYPE is T and
2069 the cast doesn't actually exist, but is implicit in the
2070 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2072 which may be able to propagate further. */
2075 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2080 /* It had better be a constant. */
2081 if (TREE_CODE (op1) != INTEGER_CST)
2083 /* The first operand should be an ADDR_EXPR. */
2084 if (TREE_CODE (op0) != ADDR_EXPR)
2086 op0 = TREE_OPERAND (op0, 0);
2088 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2089 the offset into it. */
2090 while (TREE_CODE (op0) == ARRAY_REF)
2092 tree array_obj = TREE_OPERAND (op0, 0);
2093 tree array_idx = TREE_OPERAND (op0, 1);
2094 tree elt_type = TREE_TYPE (op0);
2095 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2098 if (TREE_CODE (array_idx) != INTEGER_CST)
2100 if (TREE_CODE (elt_size) != INTEGER_CST)
2103 /* Un-bias the index by the min index of the array type. */
2104 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2107 min_idx = TYPE_MIN_VALUE (min_idx);
2110 if (TREE_CODE (min_idx) != INTEGER_CST)
2113 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2114 if (!integer_zerop (min_idx))
2115 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2120 /* Convert the index to a byte offset. */
2121 array_idx = fold_convert (sizetype, array_idx);
2122 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2124 /* Update the operands for the next round, or for folding. */
2125 op1 = int_const_binop (PLUS_EXPR,
2130 ptd_type = TREE_TYPE (res_type);
2131 /* If we want a pointer to void, reconstruct the reference from the
2132 array element type. A pointer to that can be trivially converted
2133 to void *. This happens as we fold (void *)(ptr p+ off). */
2134 if (VOID_TYPE_P (ptd_type)
2135 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2136 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2138 /* At which point we can try some of the same things as for indirects. */
2139 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2141 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2144 t = build1 (ADDR_EXPR, res_type, t);
2149 /* For passing state through walk_tree into fold_stmt_r and its
2152 struct fold_stmt_r_data
2156 bool *inside_addr_expr_p;
2159 /* Subroutine of fold_stmt called via walk_tree. We perform several
2160 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2163 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2165 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2166 struct fold_stmt_r_data *fold_stmt_r_data;
2167 bool *inside_addr_expr_p;
2169 tree expr = *expr_p, t;
2170 bool volatile_p = TREE_THIS_VOLATILE (expr);
2172 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2173 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2174 changed_p = fold_stmt_r_data->changed_p;
2176 /* ??? It'd be nice if walk_tree had a pre-order option. */
2177 switch (TREE_CODE (expr))
2180 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2185 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2187 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2188 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2191 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2192 /* If we had a good reason for propagating the address here,
2193 make sure we end up with valid gimple. See PR34989. */
2194 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2198 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2203 if (POINTER_TYPE_P (TREE_TYPE (expr))
2204 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2205 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2206 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2208 TREE_TYPE (TREE_TYPE (expr)))))
2212 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2213 We'd only want to bother decomposing an existing ARRAY_REF if
2214 the base array is found to have another offset contained within.
2215 Otherwise we'd be wasting time. */
2217 /* If we are not processing expressions found within an
2218 ADDR_EXPR, then we can fold constant array references.
2219 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2221 if (!*inside_addr_expr_p && !wi->is_lhs)
2222 t = fold_read_from_constant_string (expr);
2228 *inside_addr_expr_p = true;
2229 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2230 *inside_addr_expr_p = false;
2235 /* Make sure the value is properly considered constant, and so gets
2236 propagated as expected. */
2238 recompute_tree_invariant_for_addr_expr (expr);
2242 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2247 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2248 We've already checked that the records are compatible, so we should
2249 come up with a set of compatible fields. */
2251 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2252 tree expr_field = TREE_OPERAND (expr, 1);
2254 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2256 expr_field = find_compatible_field (expr_record, expr_field);
2257 TREE_OPERAND (expr, 1) = expr_field;
2262 case TARGET_MEM_REF:
2263 t = maybe_fold_tmr (expr);
2266 case POINTER_PLUS_EXPR:
2267 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2270 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2275 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2276 TREE_OPERAND (expr, 0),
2277 TREE_OPERAND (expr, 1));
2281 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2283 tree op0 = TREE_OPERAND (expr, 0);
2287 fold_defer_overflow_warnings ();
2288 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2289 TREE_OPERAND (op0, 0),
2290 TREE_OPERAND (op0, 1));
2291 /* This is actually a conditional expression, not a GIMPLE
2292 conditional statement, however, the valid_gimple_rhs_p
2293 test still applies. */
2294 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2295 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2298 COND_EXPR_COND (expr) = tem;
2311 /* Preserve volatileness of the original expression.
2312 We can end up with a plain decl here which is shared
2313 and we shouldn't mess with its flags. */
2315 TREE_THIS_VOLATILE (t) = volatile_p;
2323 /* Return the string length, maximum string length or maximum value of
2325 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2326 is not NULL and, for TYPE == 0, its value is not equal to the length
2327 we determine or if we are unable to determine the length or value,
2328 return false. VISITED is a bitmap of visited variables.
2329 TYPE is 0 if string length should be returned, 1 for maximum string
2330 length and 2 for maximum value ARG can have. */
2333 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2338 if (TREE_CODE (arg) != SSA_NAME)
2340 if (TREE_CODE (arg) == COND_EXPR)
2341 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2342 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2343 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2344 else if (TREE_CODE (arg) == ADDR_EXPR
2345 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2346 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2348 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2349 if (TREE_CODE (aop0) == INDIRECT_REF
2350 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2351 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2352 length, visited, type);
2358 if (TREE_CODE (val) != INTEGER_CST
2359 || tree_int_cst_sgn (val) < 0)
2363 val = c_strlen (arg, 1);
2371 if (TREE_CODE (*length) != INTEGER_CST
2372 || TREE_CODE (val) != INTEGER_CST)
2375 if (tree_int_cst_lt (*length, val))
2379 else if (simple_cst_equal (val, *length) != 1)
2387 /* If we were already here, break the infinite cycle. */
2388 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2390 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2393 def_stmt = SSA_NAME_DEF_STMT (var);
2395 switch (gimple_code (def_stmt))
2398 /* The RHS of the statement defining VAR must either have a
2399 constant length or come from another SSA_NAME with a constant
2401 if (gimple_assign_single_p (def_stmt)
2402 || gimple_assign_unary_nop_p (def_stmt))
2404 tree rhs = gimple_assign_rhs1 (def_stmt);
2405 return get_maxval_strlen (rhs, length, visited, type);
2411 /* All the arguments of the PHI node must have the same constant
2415 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2417 tree arg = gimple_phi_arg (def_stmt, i)->def;
2419 /* If this PHI has itself as an argument, we cannot
2420 determine the string length of this argument. However,
2421 if we can find a constant string length for the other
2422 PHI args then we can still be sure that this is a
2423 constant string length. So be optimistic and just
2424 continue with the next argument. */
2425 if (arg == gimple_phi_result (def_stmt))
2428 if (!get_maxval_strlen (arg, length, visited, type))
2440 /* Fold builtin call in statement STMT. Returns a simplified tree.
2441 We may return a non-constant expression, including another call
2442 to a different function and with different arguments, e.g.,
2443 substituting memcpy for strcpy when the string length is known.
2444 Note that some builtins expand into inline code that may not
2445 be valid in GIMPLE. Callers must take care. */
2448 ccp_fold_builtin (gimple stmt)
2450 tree result, val[3];
2457 gcc_assert (is_gimple_call (stmt));
2459 ignore = (gimple_call_lhs (stmt) == NULL);
2461 /* First try the generic builtin folder. If that succeeds, return the
2463 result = fold_call_stmt (stmt, ignore);
2467 STRIP_NOPS (result);
2471 /* Ignore MD builtins. */
2472 callee = gimple_call_fndecl (stmt);
2473 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2476 /* If the builtin could not be folded, and it has no argument list,
2478 nargs = gimple_call_num_args (stmt);
2482 /* Limit the work only for builtins we know how to simplify. */
2483 switch (DECL_FUNCTION_CODE (callee))
2485 case BUILT_IN_STRLEN:
2486 case BUILT_IN_FPUTS:
2487 case BUILT_IN_FPUTS_UNLOCKED:
2491 case BUILT_IN_STRCPY:
2492 case BUILT_IN_STRNCPY:
2496 case BUILT_IN_MEMCPY_CHK:
2497 case BUILT_IN_MEMPCPY_CHK:
2498 case BUILT_IN_MEMMOVE_CHK:
2499 case BUILT_IN_MEMSET_CHK:
2500 case BUILT_IN_STRNCPY_CHK:
2504 case BUILT_IN_STRCPY_CHK:
2505 case BUILT_IN_STPCPY_CHK:
2509 case BUILT_IN_SNPRINTF_CHK:
2510 case BUILT_IN_VSNPRINTF_CHK:
2518 if (arg_idx >= nargs)
2521 /* Try to use the dataflow information gathered by the CCP process. */
2522 visited = BITMAP_ALLOC (NULL);
2523 bitmap_clear (visited);
2525 memset (val, 0, sizeof (val));
2526 a = gimple_call_arg (stmt, arg_idx);
2527 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2528 val[arg_idx] = NULL_TREE;
2530 BITMAP_FREE (visited);
2533 switch (DECL_FUNCTION_CODE (callee))
2535 case BUILT_IN_STRLEN:
2536 if (val[0] && nargs == 1)
2539 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2541 /* If the result is not a valid gimple value, or not a cast
2542 of a valid gimple value, then we can not use the result. */
2543 if (is_gimple_val (new_val)
2544 || (is_gimple_cast (new_val)
2545 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2550 case BUILT_IN_STRCPY:
2551 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2552 result = fold_builtin_strcpy (callee,
2553 gimple_call_arg (stmt, 0),
2554 gimple_call_arg (stmt, 1),
2558 case BUILT_IN_STRNCPY:
2559 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2560 result = fold_builtin_strncpy (callee,
2561 gimple_call_arg (stmt, 0),
2562 gimple_call_arg (stmt, 1),
2563 gimple_call_arg (stmt, 2),
2567 case BUILT_IN_FPUTS:
2569 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2570 gimple_call_arg (stmt, 1),
2571 ignore, false, val[0]);
2574 case BUILT_IN_FPUTS_UNLOCKED:
2576 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2577 gimple_call_arg (stmt, 1),
2578 ignore, true, val[0]);
2581 case BUILT_IN_MEMCPY_CHK:
2582 case BUILT_IN_MEMPCPY_CHK:
2583 case BUILT_IN_MEMMOVE_CHK:
2584 case BUILT_IN_MEMSET_CHK:
2585 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2586 result = fold_builtin_memory_chk (callee,
2587 gimple_call_arg (stmt, 0),
2588 gimple_call_arg (stmt, 1),
2589 gimple_call_arg (stmt, 2),
2590 gimple_call_arg (stmt, 3),
2592 DECL_FUNCTION_CODE (callee));
2595 case BUILT_IN_STRCPY_CHK:
2596 case BUILT_IN_STPCPY_CHK:
2597 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2598 result = fold_builtin_stxcpy_chk (callee,
2599 gimple_call_arg (stmt, 0),
2600 gimple_call_arg (stmt, 1),
2601 gimple_call_arg (stmt, 2),
2603 DECL_FUNCTION_CODE (callee));
2606 case BUILT_IN_STRNCPY_CHK:
2607 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2608 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2609 gimple_call_arg (stmt, 1),
2610 gimple_call_arg (stmt, 2),
2611 gimple_call_arg (stmt, 3),
2615 case BUILT_IN_SNPRINTF_CHK:
2616 case BUILT_IN_VSNPRINTF_CHK:
2617 if (val[1] && is_gimple_val (val[1]))
2618 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2619 DECL_FUNCTION_CODE (callee));
2626 if (result && ignore)
2627 result = fold_ignored_result (result);
2631 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2632 replacement rhs for the statement or NULL_TREE if no simplification
2633 could be made. It is assumed that the operands have been previously
2637 fold_gimple_assign (gimple_stmt_iterator *si)
2639 gimple stmt = gsi_stmt (*si);
2640 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2644 switch (get_gimple_rhs_class (subcode))
2646 case GIMPLE_SINGLE_RHS:
2648 tree rhs = gimple_assign_rhs1 (stmt);
2650 /* Try to fold a conditional expression. */
2651 if (TREE_CODE (rhs) == COND_EXPR)
2653 tree temp = fold (COND_EXPR_COND (rhs));
2654 if (temp != COND_EXPR_COND (rhs))
2655 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2656 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2659 /* If we couldn't fold the RHS, hand over to the generic
2661 if (result == NULL_TREE)
2662 result = fold (rhs);
2664 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2665 that may have been added by fold, and "useless" type
2666 conversions that might now be apparent due to propagation. */
2667 STRIP_USELESS_TYPE_CONVERSION (result);
2669 if (result != rhs && valid_gimple_rhs_p (result))
2672 /* It is possible that fold_stmt_r simplified the RHS.
2673 Make sure that the subcode of this statement still
2674 reflects the principal operator of the rhs operand. */
2679 case GIMPLE_UNARY_RHS:
2681 tree rhs = gimple_assign_rhs1 (stmt);
2683 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2686 /* If the operation was a conversion do _not_ mark a
2687 resulting constant with TREE_OVERFLOW if the original
2688 constant was not. These conversions have implementation
2689 defined behavior and retaining the TREE_OVERFLOW flag
2690 here would confuse later passes such as VRP. */
2691 if (CONVERT_EXPR_CODE_P (subcode)
2692 && TREE_CODE (result) == INTEGER_CST
2693 && TREE_CODE (rhs) == INTEGER_CST)
2694 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2696 STRIP_USELESS_TYPE_CONVERSION (result);
2697 if (valid_gimple_rhs_p (result))
2700 else if (CONVERT_EXPR_CODE_P (subcode)
2701 && POINTER_TYPE_P (gimple_expr_type (stmt))
2702 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2704 tree type = gimple_expr_type (stmt);
2705 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2706 integer_zero_node, type);
2713 case GIMPLE_BINARY_RHS:
2714 /* Try to fold pointer addition. */
2715 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2717 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2718 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2720 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2721 if (!useless_type_conversion_p
2722 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2723 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2725 result = maybe_fold_stmt_addition (type,
2726 gimple_assign_rhs1 (stmt),
2727 gimple_assign_rhs2 (stmt));
2731 result = fold_binary (subcode,
2732 TREE_TYPE (gimple_assign_lhs (stmt)),
2733 gimple_assign_rhs1 (stmt),
2734 gimple_assign_rhs2 (stmt));
2738 STRIP_USELESS_TYPE_CONVERSION (result);
2739 if (valid_gimple_rhs_p (result))
2742 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2743 we lose canonicalization opportunities. Do not go again
2744 through fold here though, or the same non-GIMPLE will be
2746 if (commutative_tree_code (subcode)
2747 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2748 gimple_assign_rhs2 (stmt), false))
2749 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2750 gimple_assign_rhs2 (stmt),
2751 gimple_assign_rhs1 (stmt));
2755 case GIMPLE_INVALID_RHS:
2762 /* Attempt to fold a conditional statement. Return true if any changes were
2763 made. We only attempt to fold the condition expression, and do not perform
2764 any transformation that would require alteration of the cfg. It is
2765 assumed that the operands have been previously folded. */
2768 fold_gimple_cond (gimple stmt)
2770 tree result = fold_binary (gimple_cond_code (stmt),
2772 gimple_cond_lhs (stmt),
2773 gimple_cond_rhs (stmt));
2777 STRIP_USELESS_TYPE_CONVERSION (result);
2778 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2780 gimple_cond_set_condition_from_tree (stmt, result);
2789 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2790 The statement may be replaced by another statement, e.g., if the call
2791 simplifies to a constant value. Return true if any changes were made.
2792 It is assumed that the operands have been previously folded. */
2795 fold_gimple_call (gimple_stmt_iterator *gsi)
2797 gimple stmt = gsi_stmt (*gsi);
2799 tree callee = gimple_call_fndecl (stmt);
2801 /* Check for builtins that CCP can handle using information not
2802 available in the generic fold routines. */
2803 if (callee && DECL_BUILT_IN (callee))
2805 tree result = ccp_fold_builtin (stmt);
2808 return update_call_from_tree (gsi, result);
2812 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2813 here are when we've propagated the address of a decl into the
2815 /* ??? Should perhaps do this in fold proper. However, doing it
2816 there requires that we create a new CALL_EXPR, and that requires
2817 copying EH region info to the new node. Easier to just do it
2818 here where we can just smash the call operand. */
2819 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2820 callee = gimple_call_fn (stmt);
2821 if (TREE_CODE (callee) == OBJ_TYPE_REF
2822 && lang_hooks.fold_obj_type_ref
2823 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2824 && DECL_P (TREE_OPERAND
2825 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2829 /* ??? Caution: Broken ADDR_EXPR semantics means that
2830 looking at the type of the operand of the addr_expr
2831 can yield an array type. See silly exception in
2832 check_pointer_types_r. */
2833 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2834 t = lang_hooks.fold_obj_type_ref (callee, t);
2837 gimple_call_set_fn (stmt, t);
2846 /* Fold the statement pointed to by GSI. In some cases, this function may
2847 replace the whole statement with a new one. Returns true iff folding
2848 makes any changes. */
2851 fold_stmt (gimple_stmt_iterator *gsi)
2854 struct fold_stmt_r_data fold_stmt_r_data;
2855 struct walk_stmt_info wi;
2857 bool changed = false;
2858 bool inside_addr_expr = false;
2860 gimple stmt = gsi_stmt (*gsi);
2862 fold_stmt_r_data.stmt = stmt;
2863 fold_stmt_r_data.changed_p = &changed;
2864 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2866 memset (&wi, 0, sizeof (wi));
2867 wi.info = &fold_stmt_r_data;
2869 /* Fold the individual operands.
2870 For example, fold instances of *&VAR into VAR, etc. */
2871 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2874 /* Fold the main computation performed by the statement. */
2875 switch (gimple_code (stmt))
2879 tree new_rhs = fold_gimple_assign (gsi);
2880 if (new_rhs != NULL_TREE)
2882 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2885 stmt = gsi_stmt (*gsi);
2889 changed |= fold_gimple_cond (stmt);
2892 /* The entire statement may be replaced in this case. */
2893 changed |= fold_gimple_call (gsi);
2904 /* Perform the minimal folding on statement STMT. Only operations like
2905 *&x created by constant propagation are handled. The statement cannot
2906 be replaced with a new one. Return true if the statement was
2907 changed, false otherwise. */
2910 fold_stmt_inplace (gimple stmt)
2913 struct fold_stmt_r_data fold_stmt_r_data;
2914 struct walk_stmt_info wi;
2915 gimple_stmt_iterator si;
2917 bool changed = false;
2918 bool inside_addr_expr = false;
2920 fold_stmt_r_data.stmt = stmt;
2921 fold_stmt_r_data.changed_p = &changed;
2922 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2924 memset (&wi, 0, sizeof (wi));
2925 wi.info = &fold_stmt_r_data;
2927 /* Fold the individual operands.
2928 For example, fold instances of *&VAR into VAR, etc.
2930 It appears that, at one time, maybe_fold_stmt_indirect
2931 would cause the walk to return non-null in order to
2932 signal that the entire statement should be replaced with
2933 a call to _builtin_trap. This functionality is currently
2934 disabled, as noted in a FIXME, and cannot be supported here. */
2935 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2938 /* Fold the main computation performed by the statement. */
2939 switch (gimple_code (stmt))
2943 unsigned old_num_ops;
2945 old_num_ops = gimple_num_ops (stmt);
2946 si = gsi_for_stmt (stmt);
2947 new_rhs = fold_gimple_assign (&si);
2948 if (new_rhs != NULL_TREE
2949 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2951 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2954 gcc_assert (gsi_stmt (si) == stmt);
2958 changed |= fold_gimple_cond (stmt);
2968 /* Try to optimize out __builtin_stack_restore. Optimize it out
2969 if there is another __builtin_stack_restore in the same basic
2970 block and no calls or ASM_EXPRs are in between, or if this block's
2971 only outgoing edge is to EXIT_BLOCK and there are no calls or
2972 ASM_EXPRs after this __builtin_stack_restore. */
2975 optimize_stack_restore (gimple_stmt_iterator i)
2978 gimple stmt, stack_save;
2979 gimple_stmt_iterator stack_save_gsi;
2981 basic_block bb = gsi_bb (i);
2982 gimple call = gsi_stmt (i);
2984 if (gimple_code (call) != GIMPLE_CALL
2985 || gimple_call_num_args (call) != 1
2986 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2987 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2990 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2992 stmt = gsi_stmt (i);
2993 if (gimple_code (stmt) == GIMPLE_ASM)
2995 if (gimple_code (stmt) != GIMPLE_CALL)
2998 callee = gimple_call_fndecl (stmt);
3000 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3001 /* All regular builtins are ok, just obviously not alloca. */
3002 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
3005 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3010 && (! single_succ_p (bb)
3011 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3014 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3015 if (gimple_code (stack_save) != GIMPLE_CALL
3016 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3017 || stmt_could_throw_p (stack_save)
3018 || !has_single_use (gimple_call_arg (call, 0)))
3021 callee = gimple_call_fndecl (stack_save);
3023 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3024 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3025 || gimple_call_num_args (stack_save) != 0)
3028 stack_save_gsi = gsi_for_stmt (stack_save);
3029 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3030 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3031 if (!update_call_from_tree (&stack_save_gsi, rhs))
3033 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3036 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3038 /* No effect, so the statement will be deleted. */
3039 return integer_zero_node;
3042 /* If va_list type is a simple pointer and nothing special is needed,
3043 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3044 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3045 pointer assignment. */
3048 optimize_stdarg_builtin (gimple call)
3050 tree callee, lhs, rhs, cfun_va_list;
3051 bool va_list_simple_ptr;
3053 if (gimple_code (call) != GIMPLE_CALL)
3056 callee = gimple_call_fndecl (call);
3058 cfun_va_list = targetm.fn_abi_va_list (callee);
3059 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3060 && (TREE_TYPE (cfun_va_list) == void_type_node
3061 || TREE_TYPE (cfun_va_list) == char_type_node);
3063 switch (DECL_FUNCTION_CODE (callee))
3065 case BUILT_IN_VA_START:
3066 if (!va_list_simple_ptr
3067 || targetm.expand_builtin_va_start != NULL
3068 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3071 if (gimple_call_num_args (call) != 2)
3074 lhs = gimple_call_arg (call, 0);
3075 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3076 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3077 != TYPE_MAIN_VARIANT (cfun_va_list))
3080 lhs = build_fold_indirect_ref (lhs);
3081 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3082 1, integer_zero_node);
3083 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3084 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3086 case BUILT_IN_VA_COPY:
3087 if (!va_list_simple_ptr)
3090 if (gimple_call_num_args (call) != 2)
3093 lhs = gimple_call_arg (call, 0);
3094 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3095 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3096 != TYPE_MAIN_VARIANT (cfun_va_list))
3099 lhs = build_fold_indirect_ref (lhs);
3100 rhs = gimple_call_arg (call, 1);
3101 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3102 != TYPE_MAIN_VARIANT (cfun_va_list))
3105 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3106 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3108 case BUILT_IN_VA_END:
3109 /* No effect, so the statement will be deleted. */
3110 return integer_zero_node;
3117 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3118 RHS of an assignment. Insert the necessary statements before
3119 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3120 is replaced. If the call is expected to produces a result, then it
3121 is replaced by an assignment of the new RHS to the result variable.
3122 If the result is to be ignored, then the call is replaced by a
3126 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3129 tree tmp = NULL_TREE; /* Silence warning. */
3130 gimple stmt, new_stmt;
3131 gimple_stmt_iterator i;
3132 gimple_seq stmts = gimple_seq_alloc();
3133 struct gimplify_ctx gctx;
3135 stmt = gsi_stmt (*si_p);
3137 gcc_assert (is_gimple_call (stmt));
3139 lhs = gimple_call_lhs (stmt);
3141 push_gimplify_context (&gctx);
3143 if (lhs == NULL_TREE)
3144 gimplify_and_add (expr, &stmts);
3146 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3148 pop_gimplify_context (NULL);
3150 if (gimple_has_location (stmt))
3151 annotate_all_with_location (stmts, gimple_location (stmt));
3153 /* The replacement can expose previously unreferenced variables. */
3154 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3156 new_stmt = gsi_stmt (i);
3157 find_new_referenced_vars (new_stmt);
3158 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3159 mark_symbols_for_renaming (new_stmt);
3163 if (lhs == NULL_TREE)
3164 new_stmt = gimple_build_nop ();
3167 new_stmt = gimple_build_assign (lhs, tmp);
3168 copy_virtual_operands (new_stmt, stmt);
3169 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3172 gimple_set_location (new_stmt, gimple_location (stmt));
3173 gsi_replace (si_p, new_stmt, false);
3176 /* A simple pass that attempts to fold all builtin functions. This pass
3177 is run after we've propagated as many constants as we can. */
3180 execute_fold_all_builtins (void)
3182 bool cfg_changed = false;
3184 unsigned int todoflags = 0;
3188 gimple_stmt_iterator i;
3189 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3191 gimple stmt, old_stmt;
3192 tree callee, result;
3193 enum built_in_function fcode;
3195 stmt = gsi_stmt (i);
3197 if (gimple_code (stmt) != GIMPLE_CALL)
3202 callee = gimple_call_fndecl (stmt);
3203 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3208 fcode = DECL_FUNCTION_CODE (callee);
3210 result = ccp_fold_builtin (stmt);
3213 gimple_remove_stmt_histograms (cfun, stmt);
3216 switch (DECL_FUNCTION_CODE (callee))
3218 case BUILT_IN_CONSTANT_P:
3219 /* Resolve __builtin_constant_p. If it hasn't been
3220 folded to integer_one_node by now, it's fairly
3221 certain that the value simply isn't constant. */
3222 result = integer_zero_node;
3225 case BUILT_IN_STACK_RESTORE:
3226 result = optimize_stack_restore (i);
3232 case BUILT_IN_VA_START:
3233 case BUILT_IN_VA_END:
3234 case BUILT_IN_VA_COPY:
3235 /* These shouldn't be folded before pass_stdarg. */
3236 result = optimize_stdarg_builtin (stmt);
3246 if (dump_file && (dump_flags & TDF_DETAILS))
3248 fprintf (dump_file, "Simplified\n ");
3249 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3253 push_stmt_changes (gsi_stmt_ptr (&i));
3255 if (!update_call_from_tree (&i, result))
3257 gimplify_and_update_call_from_tree (&i, result);
3258 todoflags |= TODO_rebuild_alias;
3261 stmt = gsi_stmt (i);
3262 pop_stmt_changes (gsi_stmt_ptr (&i));
3264 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3265 && gimple_purge_dead_eh_edges (bb))
3268 if (dump_file && (dump_flags & TDF_DETAILS))
3270 fprintf (dump_file, "to\n ");
3271 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3272 fprintf (dump_file, "\n");
3275 /* Retry the same statement if it changed into another
3276 builtin, there might be new opportunities now. */
3277 if (gimple_code (stmt) != GIMPLE_CALL)
3282 callee = gimple_call_fndecl (stmt);
3284 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3285 || DECL_FUNCTION_CODE (callee) == fcode)
3290 /* Delete unreachable blocks. */
3292 todoflags |= TODO_cleanup_cfg;
3298 struct gimple_opt_pass pass_fold_builtins =
3304 execute_fold_all_builtins, /* execute */
3307 0, /* static_pass_number */
3309 PROP_cfg | PROP_ssa, /* properties_required */
3310 0, /* properties_provided */
3311 0, /* properties_destroyed */
3312 0, /* todo_flags_start */
3315 | TODO_update_ssa /* todo_flags_finish */