1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t *const_val;
230 static void canonicalize_float_value (prop_value_t *);
232 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
235 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
237 switch (val.lattice_val)
240 fprintf (outf, "%sUNINITIALIZED", prefix);
243 fprintf (outf, "%sUNDEFINED", prefix);
246 fprintf (outf, "%sVARYING", prefix);
249 fprintf (outf, "%sCONSTANT ", prefix);
250 print_generic_expr (outf, val.value, dump_flags);
258 /* Print lattice value VAL to stderr. */
260 void debug_lattice_value (prop_value_t val);
263 debug_lattice_value (prop_value_t val)
265 dump_lattice_value (stderr, "", val);
266 fprintf (stderr, "\n");
271 /* If SYM is a constant variable with known value, return the value.
272 NULL_TREE is returned otherwise. */
275 get_symbol_constant_value (tree sym)
277 if (TREE_STATIC (sym)
278 && TREE_READONLY (sym)
281 tree val = DECL_INITIAL (sym);
284 STRIP_USELESS_TYPE_CONVERSION (val);
285 if (is_gimple_min_invariant (val))
288 /* Variables declared 'const' without an initializer
289 have zero as the initializer if they may not be
290 overridden at link or run time. */
292 && !DECL_EXTERNAL (sym)
293 && targetm.binds_local_p (sym)
294 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
295 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
296 return fold_convert (TREE_TYPE (sym), integer_zero_node);
302 /* Compute a default value for variable VAR and store it in the
303 CONST_VAL array. The following rules are used to get default
306 1- Global and static variables that are declared constant are
309 2- Any other value is considered UNDEFINED. This is useful when
310 considering PHI nodes. PHI arguments that are undefined do not
311 change the constant value of the PHI node, which allows for more
312 constants to be propagated.
314 3- Variables defined by statements other than assignments and PHI
315 nodes are considered VARYING.
317 4- Initial values of variables that are not GIMPLE registers are
318 considered VARYING. */
321 get_default_value (tree var)
323 tree sym = SSA_NAME_VAR (var);
324 prop_value_t val = { UNINITIALIZED, NULL_TREE };
327 if (!is_gimple_reg (var))
329 /* Short circuit for regular CCP. We are not interested in any
330 non-register when DO_STORE_CCP is false. */
331 val.lattice_val = VARYING;
333 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
335 /* Globals and static variables declared 'const' take their
337 val.lattice_val = CONSTANT;
342 gimple stmt = SSA_NAME_DEF_STMT (var);
344 if (gimple_nop_p (stmt))
346 /* Variables defined by an empty statement are those used
347 before being initialized. If VAR is a local variable, we
348 can assume initially that it is UNDEFINED, otherwise we must
349 consider it VARYING. */
350 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
351 val.lattice_val = UNDEFINED;
353 val.lattice_val = VARYING;
355 else if (is_gimple_assign (stmt)
356 /* Value-returning GIMPLE_CALL statements assign to
357 a variable, and are treated similarly to GIMPLE_ASSIGN. */
358 || (is_gimple_call (stmt)
359 && gimple_call_lhs (stmt) != NULL_TREE)
360 || gimple_code (stmt) == GIMPLE_PHI)
362 /* Any other variable defined by an assignment or a PHI node
363 is considered UNDEFINED. */
364 val.lattice_val = UNDEFINED;
368 /* Otherwise, VAR will never take on a constant value. */
369 val.lattice_val = VARYING;
377 /* Get the constant value associated with variable VAR. */
379 static inline prop_value_t *
384 if (const_val == NULL)
387 val = &const_val[SSA_NAME_VERSION (var)];
388 if (val->lattice_val == UNINITIALIZED)
389 *val = get_default_value (var);
391 canonicalize_float_value (val);
396 /* Sets the value associated with VAR to VARYING. */
399 set_value_varying (tree var)
401 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
403 val->lattice_val = VARYING;
404 val->value = NULL_TREE;
407 /* For float types, modify the value of VAL to make ccp work correctly
408 for non-standard values (-0, NaN):
410 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
411 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
412 This is to fix the following problem (see PR 29921): Suppose we have
416 and we set value of y to NaN. This causes value of x to be set to NaN.
417 When we later determine that y is in fact VARYING, fold uses the fact
418 that HONOR_NANS is false, and we try to change the value of x to 0,
419 causing an ICE. With HONOR_NANS being false, the real appearance of
420 NaN would cause undefined behavior, though, so claiming that y (and x)
421 are UNDEFINED initially is correct. */
424 canonicalize_float_value (prop_value_t *val)
426 enum machine_mode mode;
430 if (val->lattice_val != CONSTANT
431 || TREE_CODE (val->value) != REAL_CST)
434 d = TREE_REAL_CST (val->value);
435 type = TREE_TYPE (val->value);
436 mode = TYPE_MODE (type);
438 if (!HONOR_SIGNED_ZEROS (mode)
439 && REAL_VALUE_MINUS_ZERO (d))
441 val->value = build_real (type, dconst0);
445 if (!HONOR_NANS (mode)
446 && REAL_VALUE_ISNAN (d))
448 val->lattice_val = UNDEFINED;
454 /* Set the value for variable VAR to NEW_VAL. Return true if the new
455 value is different from VAR's previous value. */
458 set_lattice_value (tree var, prop_value_t new_val)
460 prop_value_t *old_val = get_value (var);
462 canonicalize_float_value (&new_val);
464 /* Lattice transitions must always be monotonically increasing in
465 value. If *OLD_VAL and NEW_VAL are the same, return false to
466 inform the caller that this was a non-transition. */
468 gcc_assert (old_val->lattice_val < new_val.lattice_val
469 || (old_val->lattice_val == new_val.lattice_val
470 && ((!old_val->value && !new_val.value)
471 || operand_equal_p (old_val->value, new_val.value, 0))));
473 if (old_val->lattice_val != new_val.lattice_val)
475 if (dump_file && (dump_flags & TDF_DETAILS))
477 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
478 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
483 gcc_assert (new_val.lattice_val != UNDEFINED);
491 /* Return the likely CCP lattice value for STMT.
493 If STMT has no operands, then return CONSTANT.
495 Else if undefinedness of operands of STMT cause its value to be
496 undefined, then return UNDEFINED.
498 Else if any operands of STMT are constants, then return CONSTANT.
500 Else return VARYING. */
503 likely_value (gimple stmt)
505 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
509 enum gimple_code code = gimple_code (stmt);
511 /* This function appears to be called only for assignments, calls,
512 conditionals, and switches, due to the logic in visit_stmt. */
513 gcc_assert (code == GIMPLE_ASSIGN
514 || code == GIMPLE_CALL
515 || code == GIMPLE_COND
516 || code == GIMPLE_SWITCH);
518 /* If the statement has volatile operands, it won't fold to a
520 if (gimple_has_volatile_ops (stmt))
523 /* If we are not doing store-ccp, statements with loads
524 and/or stores will never fold into a constant. */
525 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
528 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
529 is_gimple_min_invariant, so we do not consider calls or
530 other forms of assignment. */
531 if (gimple_assign_single_p (stmt)
532 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
535 if (code == GIMPLE_COND
536 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
537 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
540 if (code == GIMPLE_SWITCH
541 && is_gimple_min_invariant (gimple_switch_index (stmt)))
544 /* Arrive here for more complex cases. */
546 has_constant_operand = false;
547 has_undefined_operand = false;
548 all_undefined_operands = true;
549 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
551 prop_value_t *val = get_value (use);
553 if (val->lattice_val == UNDEFINED)
554 has_undefined_operand = true;
556 all_undefined_operands = false;
558 if (val->lattice_val == CONSTANT)
559 has_constant_operand = true;
562 /* If the operation combines operands like COMPLEX_EXPR make sure to
563 not mark the result UNDEFINED if only one part of the result is
565 if (has_undefined_operand && all_undefined_operands)
567 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
569 switch (gimple_assign_rhs_code (stmt))
571 /* Unary operators are handled with all_undefined_operands. */
574 case POINTER_PLUS_EXPR:
575 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
576 Not bitwise operators, one VARYING operand may specify the
577 result completely. Not logical operators for the same reason.
578 Not COMPLEX_EXPR as one VARYING operand makes the result partly
579 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
580 the undefined operand may be promoted. */
587 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
588 fall back to VARYING even if there were CONSTANT operands. */
589 if (has_undefined_operand)
592 if (has_constant_operand
593 /* We do not consider virtual operands here -- load from read-only
594 memory may have only VARYING virtual operands, but still be
596 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
602 /* Returns true if STMT cannot be constant. */
605 surely_varying_stmt_p (gimple stmt)
607 /* If the statement has operands that we cannot handle, it cannot be
609 if (gimple_has_volatile_ops (stmt))
612 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
615 /* If it is a call and does not return a value or is not a
616 builtin and not an indirect call, it is varying. */
617 if (is_gimple_call (stmt))
620 if (!gimple_call_lhs (stmt)
621 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
622 && !DECL_BUILT_IN (fndecl)))
626 /* Anything other than assignments and conditional jumps are not
627 interesting for CCP. */
628 if (gimple_code (stmt) != GIMPLE_ASSIGN
629 && gimple_code (stmt) != GIMPLE_COND
630 && gimple_code (stmt) != GIMPLE_SWITCH
631 && gimple_code (stmt) != GIMPLE_CALL)
637 /* Initialize local data structures for CCP. */
640 ccp_initialize (void)
644 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
646 /* Initialize simulation flags for PHI nodes and statements. */
649 gimple_stmt_iterator i;
651 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
653 gimple stmt = gsi_stmt (i);
654 bool is_varying = surely_varying_stmt_p (stmt);
661 /* If the statement will not produce a constant, mark
662 all its outputs VARYING. */
663 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
666 set_value_varying (def);
669 prop_set_simulate_again (stmt, !is_varying);
673 /* Now process PHI nodes. We never clear the simulate_again flag on
674 phi nodes, since we do not know which edges are executable yet,
675 except for phi nodes for virtual operands when we do not do store ccp. */
678 gimple_stmt_iterator i;
680 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
682 gimple phi = gsi_stmt (i);
684 if (!is_gimple_reg (gimple_phi_result (phi)))
685 prop_set_simulate_again (phi, false);
687 prop_set_simulate_again (phi, true);
693 /* Do final substitution of propagated values, cleanup the flowgraph and
694 free allocated storage.
696 Return TRUE when something was optimized. */
701 /* Perform substitutions based on the known constant values. */
702 bool something_changed = substitute_and_fold (const_val, false);
706 return something_changed;;
710 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
713 any M UNDEFINED = any
714 any M VARYING = VARYING
715 Ci M Cj = Ci if (i == j)
716 Ci M Cj = VARYING if (i != j)
720 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
722 if (val1->lattice_val == UNDEFINED)
724 /* UNDEFINED M any = any */
727 else if (val2->lattice_val == UNDEFINED)
729 /* any M UNDEFINED = any
730 Nothing to do. VAL1 already contains the value we want. */
733 else if (val1->lattice_val == VARYING
734 || val2->lattice_val == VARYING)
736 /* any M VARYING = VARYING. */
737 val1->lattice_val = VARYING;
738 val1->value = NULL_TREE;
740 else if (val1->lattice_val == CONSTANT
741 && val2->lattice_val == CONSTANT
742 && simple_cst_equal (val1->value, val2->value) == 1)
744 /* Ci M Cj = Ci if (i == j)
745 Ci M Cj = VARYING if (i != j)
747 If these two values come from memory stores, make sure that
748 they come from the same memory reference. */
749 val1->lattice_val = CONSTANT;
750 val1->value = val1->value;
754 /* Any other combination is VARYING. */
755 val1->lattice_val = VARYING;
756 val1->value = NULL_TREE;
761 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
762 lattice values to determine PHI_NODE's lattice value. The value of a
763 PHI node is determined calling ccp_lattice_meet with all the arguments
764 of the PHI node that are incoming via executable edges. */
766 static enum ssa_prop_result
767 ccp_visit_phi_node (gimple phi)
770 prop_value_t *old_val, new_val;
772 if (dump_file && (dump_flags & TDF_DETAILS))
774 fprintf (dump_file, "\nVisiting PHI node: ");
775 print_gimple_stmt (dump_file, phi, 0, dump_flags);
778 old_val = get_value (gimple_phi_result (phi));
779 switch (old_val->lattice_val)
782 return SSA_PROP_VARYING;
789 new_val.lattice_val = UNDEFINED;
790 new_val.value = NULL_TREE;
797 for (i = 0; i < gimple_phi_num_args (phi); i++)
799 /* Compute the meet operator over all the PHI arguments flowing
800 through executable edges. */
801 edge e = gimple_phi_arg_edge (phi, i);
803 if (dump_file && (dump_flags & TDF_DETAILS))
806 "\n Argument #%d (%d -> %d %sexecutable)\n",
807 i, e->src->index, e->dest->index,
808 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
811 /* If the incoming edge is executable, Compute the meet operator for
812 the existing value of the PHI node and the current PHI argument. */
813 if (e->flags & EDGE_EXECUTABLE)
815 tree arg = gimple_phi_arg (phi, i)->def;
816 prop_value_t arg_val;
818 if (is_gimple_min_invariant (arg))
820 arg_val.lattice_val = CONSTANT;
824 arg_val = *(get_value (arg));
826 ccp_lattice_meet (&new_val, &arg_val);
828 if (dump_file && (dump_flags & TDF_DETAILS))
830 fprintf (dump_file, "\t");
831 print_generic_expr (dump_file, arg, dump_flags);
832 dump_lattice_value (dump_file, "\tValue: ", arg_val);
833 fprintf (dump_file, "\n");
836 if (new_val.lattice_val == VARYING)
841 if (dump_file && (dump_flags & TDF_DETAILS))
843 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
844 fprintf (dump_file, "\n\n");
847 /* Make the transition to the new value. */
848 if (set_lattice_value (gimple_phi_result (phi), new_val))
850 if (new_val.lattice_val == VARYING)
851 return SSA_PROP_VARYING;
853 return SSA_PROP_INTERESTING;
856 return SSA_PROP_NOT_INTERESTING;
859 /* Return true if we may propagate the address expression ADDR into the
860 dereference DEREF and cancel them. */
863 may_propagate_address_into_dereference (tree addr, tree deref)
865 gcc_assert (INDIRECT_REF_P (deref)
866 && TREE_CODE (addr) == ADDR_EXPR);
868 /* Don't propagate if ADDR's operand has incomplete type. */
869 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
872 /* If the address is invariant then we do not need to preserve restrict
873 qualifications. But we do need to preserve volatile qualifiers until
874 we can annotate the folded dereference itself properly. */
875 if (is_gimple_min_invariant (addr)
876 && (!TREE_THIS_VOLATILE (deref)
877 || TYPE_VOLATILE (TREE_TYPE (addr))))
878 return useless_type_conversion_p (TREE_TYPE (deref),
879 TREE_TYPE (TREE_OPERAND (addr, 0)));
881 /* Else both the address substitution and the folding must result in
882 a valid useless type conversion sequence. */
883 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
885 && useless_type_conversion_p (TREE_TYPE (deref),
886 TREE_TYPE (TREE_OPERAND (addr, 0))));
889 /* CCP specific front-end to the non-destructive constant folding
892 Attempt to simplify the RHS of STMT knowing that one or more
893 operands are constants.
895 If simplification is possible, return the simplified RHS,
896 otherwise return the original RHS or NULL_TREE. */
899 ccp_fold (gimple stmt)
901 switch (gimple_code (stmt))
905 enum tree_code subcode = gimple_assign_rhs_code (stmt);
907 switch (get_gimple_rhs_class (subcode))
909 case GIMPLE_SINGLE_RHS:
911 tree rhs = gimple_assign_rhs1 (stmt);
912 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
914 if (TREE_CODE (rhs) == SSA_NAME)
916 /* If the RHS is an SSA_NAME, return its known constant value,
918 return get_value (rhs)->value;
920 /* Handle propagating invariant addresses into address operations.
921 The folding we do here matches that in tree-ssa-forwprop.c. */
922 else if (TREE_CODE (rhs) == ADDR_EXPR)
925 base = &TREE_OPERAND (rhs, 0);
926 while (handled_component_p (*base))
927 base = &TREE_OPERAND (*base, 0);
928 if (TREE_CODE (*base) == INDIRECT_REF
929 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
931 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
932 if (val->lattice_val == CONSTANT
933 && TREE_CODE (val->value) == ADDR_EXPR
934 && may_propagate_address_into_dereference
937 /* We need to return a new tree, not modify the IL
938 or share parts of it. So play some tricks to
939 avoid manually building it. */
940 tree ret, save = *base;
941 *base = TREE_OPERAND (val->value, 0);
942 ret = unshare_expr (rhs);
943 recompute_tree_invariant_for_addr_expr (ret);
950 if (kind == tcc_reference)
952 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
953 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
955 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
956 if (val->lattice_val == CONSTANT)
957 return fold_unary (VIEW_CONVERT_EXPR,
958 TREE_TYPE (rhs), val->value);
960 return fold_const_aggregate_ref (rhs);
962 else if (kind == tcc_declaration)
963 return get_symbol_constant_value (rhs);
967 case GIMPLE_UNARY_RHS:
969 /* Handle unary operators that can appear in GIMPLE form.
970 Note that we know the single operand must be a constant,
971 so this should almost always return a simplified RHS. */
972 tree lhs = gimple_assign_lhs (stmt);
973 tree op0 = gimple_assign_rhs1 (stmt);
975 /* Simplify the operand down to a constant. */
976 if (TREE_CODE (op0) == SSA_NAME)
978 prop_value_t *val = get_value (op0);
979 if (val->lattice_val == CONSTANT)
980 op0 = get_value (op0)->value;
983 /* Conversions are useless for CCP purposes if they are
984 value-preserving. Thus the restrictions that
985 useless_type_conversion_p places for pointer type conversions
986 do not apply here. Substitution later will only substitute to
988 if (CONVERT_EXPR_CODE_P (subcode)
989 && POINTER_TYPE_P (TREE_TYPE (lhs))
990 && POINTER_TYPE_P (TREE_TYPE (op0))
991 /* Do not allow differences in volatile qualification
992 as this might get us confused as to whether a
993 propagation destination statement is volatile
994 or not. See PR36988. */
995 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
996 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
999 /* Still try to generate a constant of correct type. */
1000 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1002 && ((tem = maybe_fold_offset_to_address
1003 (op0, integer_zero_node, TREE_TYPE (lhs)))
1009 return fold_unary_ignore_overflow (subcode,
1010 gimple_expr_type (stmt), op0);
1013 case GIMPLE_BINARY_RHS:
1015 /* Handle binary operators that can appear in GIMPLE form. */
1016 tree op0 = gimple_assign_rhs1 (stmt);
1017 tree op1 = gimple_assign_rhs2 (stmt);
1019 /* Simplify the operands down to constants when appropriate. */
1020 if (TREE_CODE (op0) == SSA_NAME)
1022 prop_value_t *val = get_value (op0);
1023 if (val->lattice_val == CONSTANT)
1027 if (TREE_CODE (op1) == SSA_NAME)
1029 prop_value_t *val = get_value (op1);
1030 if (val->lattice_val == CONSTANT)
1034 /* Fold &foo + CST into an invariant reference if possible. */
1035 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1036 && TREE_CODE (op0) == ADDR_EXPR
1037 && TREE_CODE (op1) == INTEGER_CST)
1039 tree lhs = gimple_assign_lhs (stmt);
1040 tree tem = maybe_fold_offset_to_address (op0, op1,
1042 if (tem != NULL_TREE)
1046 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1057 tree fn = gimple_call_fn (stmt);
1060 if (TREE_CODE (fn) == SSA_NAME)
1062 val = get_value (fn);
1063 if (val->lattice_val == CONSTANT)
1066 if (TREE_CODE (fn) == ADDR_EXPR
1067 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1068 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1070 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1073 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1075 args[i] = gimple_call_arg (stmt, i);
1076 if (TREE_CODE (args[i]) == SSA_NAME)
1078 val = get_value (args[i]);
1079 if (val->lattice_val == CONSTANT)
1080 args[i] = val->value;
1083 call = build_call_array (gimple_call_return_type (stmt),
1084 fn, gimple_call_num_args (stmt), args);
1085 retval = fold_call_expr (call, false);
1087 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1088 STRIP_NOPS (retval);
1096 /* Handle comparison operators that can appear in GIMPLE form. */
1097 tree op0 = gimple_cond_lhs (stmt);
1098 tree op1 = gimple_cond_rhs (stmt);
1099 enum tree_code code = gimple_cond_code (stmt);
1101 /* Simplify the operands down to constants when appropriate. */
1102 if (TREE_CODE (op0) == SSA_NAME)
1104 prop_value_t *val = get_value (op0);
1105 if (val->lattice_val == CONSTANT)
1109 if (TREE_CODE (op1) == SSA_NAME)
1111 prop_value_t *val = get_value (op1);
1112 if (val->lattice_val == CONSTANT)
1116 return fold_binary (code, boolean_type_node, op0, op1);
1121 tree rhs = gimple_switch_index (stmt);
1123 if (TREE_CODE (rhs) == SSA_NAME)
1125 /* If the RHS is an SSA_NAME, return its known constant value,
1127 return get_value (rhs)->value;
1139 /* Return the tree representing the element referenced by T if T is an
1140 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1141 NULL_TREE otherwise. */
1144 fold_const_aggregate_ref (tree t)
1146 prop_value_t *value;
1147 tree base, ctor, idx, field;
1148 unsigned HOST_WIDE_INT cnt;
1151 switch (TREE_CODE (t))
1154 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1155 DECL_INITIAL. If BASE is a nested reference into another
1156 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1157 the inner reference. */
1158 base = TREE_OPERAND (t, 0);
1159 switch (TREE_CODE (base))
1162 if (!TREE_READONLY (base)
1163 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1164 || !targetm.binds_local_p (base))
1167 ctor = DECL_INITIAL (base);
1172 ctor = fold_const_aggregate_ref (base);
1184 if (ctor == NULL_TREE
1185 || (TREE_CODE (ctor) != CONSTRUCTOR
1186 && TREE_CODE (ctor) != STRING_CST)
1187 || !TREE_STATIC (ctor))
1190 /* Get the index. If we have an SSA_NAME, try to resolve it
1191 with the current lattice value for the SSA_NAME. */
1192 idx = TREE_OPERAND (t, 1);
1193 switch (TREE_CODE (idx))
1196 if ((value = get_value (idx))
1197 && value->lattice_val == CONSTANT
1198 && TREE_CODE (value->value) == INTEGER_CST)
1211 /* Fold read from constant string. */
1212 if (TREE_CODE (ctor) == STRING_CST)
1214 if ((TYPE_MODE (TREE_TYPE (t))
1215 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1216 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1218 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1219 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1220 return build_int_cst_type (TREE_TYPE (t),
1221 (TREE_STRING_POINTER (ctor)
1222 [TREE_INT_CST_LOW (idx)]));
1226 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1227 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1228 if (tree_int_cst_equal (cfield, idx))
1230 STRIP_USELESS_TYPE_CONVERSION (cval);
1236 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1237 DECL_INITIAL. If BASE is a nested reference into another
1238 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1239 the inner reference. */
1240 base = TREE_OPERAND (t, 0);
1241 switch (TREE_CODE (base))
1244 if (!TREE_READONLY (base)
1245 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1246 || !targetm.binds_local_p (base))
1249 ctor = DECL_INITIAL (base);
1254 ctor = fold_const_aggregate_ref (base);
1261 if (ctor == NULL_TREE
1262 || TREE_CODE (ctor) != CONSTRUCTOR
1263 || !TREE_STATIC (ctor))
1266 field = TREE_OPERAND (t, 1);
1268 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1270 /* FIXME: Handle bit-fields. */
1271 && ! DECL_BIT_FIELD (cfield))
1273 STRIP_USELESS_TYPE_CONVERSION (cval);
1281 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1282 if (c && TREE_CODE (c) == COMPLEX_CST)
1283 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1289 tree base = TREE_OPERAND (t, 0);
1290 if (TREE_CODE (base) == SSA_NAME
1291 && (value = get_value (base))
1292 && value->lattice_val == CONSTANT
1293 && TREE_CODE (value->value) == ADDR_EXPR
1294 && useless_type_conversion_p (TREE_TYPE (t),
1295 TREE_TYPE (TREE_TYPE (value->value))))
1296 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1307 /* Evaluate statement STMT.
1308 Valid only for assignments, calls, conditionals, and switches. */
1311 evaluate_stmt (gimple stmt)
1314 tree simplified = NULL_TREE;
1315 ccp_lattice_t likelyvalue = likely_value (stmt);
1318 fold_defer_overflow_warnings ();
1320 /* If the statement is likely to have a CONSTANT result, then try
1321 to fold the statement to determine the constant value. */
1322 /* FIXME. This is the only place that we call ccp_fold.
1323 Since likely_value never returns CONSTANT for calls, we will
1324 not attempt to fold them, including builtins that may profit. */
1325 if (likelyvalue == CONSTANT)
1326 simplified = ccp_fold (stmt);
1327 /* If the statement is likely to have a VARYING result, then do not
1328 bother folding the statement. */
1329 else if (likelyvalue == VARYING)
1331 enum gimple_code code = gimple_code (stmt);
1332 if (code == GIMPLE_ASSIGN)
1334 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1336 /* Other cases cannot satisfy is_gimple_min_invariant
1338 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1339 simplified = gimple_assign_rhs1 (stmt);
1341 else if (code == GIMPLE_SWITCH)
1342 simplified = gimple_switch_index (stmt);
1344 /* These cannot satisfy is_gimple_min_invariant without folding. */
1345 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1348 is_constant = simplified && is_gimple_min_invariant (simplified);
1350 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1352 if (dump_file && (dump_flags & TDF_DETAILS))
1354 fprintf (dump_file, "which is likely ");
1355 switch (likelyvalue)
1358 fprintf (dump_file, "CONSTANT");
1361 fprintf (dump_file, "UNDEFINED");
1364 fprintf (dump_file, "VARYING");
1368 fprintf (dump_file, "\n");
1373 /* The statement produced a constant value. */
1374 val.lattice_val = CONSTANT;
1375 val.value = simplified;
1379 /* The statement produced a nonconstant value. If the statement
1380 had UNDEFINED operands, then the result of the statement
1381 should be UNDEFINED. Otherwise, the statement is VARYING. */
1382 if (likelyvalue == UNDEFINED)
1383 val.lattice_val = likelyvalue;
1385 val.lattice_val = VARYING;
1387 val.value = NULL_TREE;
1393 /* Visit the assignment statement STMT. Set the value of its LHS to the
1394 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1395 creates virtual definitions, set the value of each new name to that
1396 of the RHS (if we can derive a constant out of the RHS).
1397 Value-returning call statements also perform an assignment, and
1398 are handled here. */
1400 static enum ssa_prop_result
1401 visit_assignment (gimple stmt, tree *output_p)
1404 enum ssa_prop_result retval;
1406 tree lhs = gimple_get_lhs (stmt);
1408 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1409 || gimple_call_lhs (stmt) != NULL_TREE);
1411 if (gimple_assign_copy_p (stmt))
1413 tree rhs = gimple_assign_rhs1 (stmt);
1415 if (TREE_CODE (rhs) == SSA_NAME)
1417 /* For a simple copy operation, we copy the lattice values. */
1418 prop_value_t *nval = get_value (rhs);
1422 val = evaluate_stmt (stmt);
1425 /* Evaluate the statement, which could be
1426 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1427 val = evaluate_stmt (stmt);
1429 retval = SSA_PROP_NOT_INTERESTING;
1431 /* Set the lattice value of the statement's output. */
1432 if (TREE_CODE (lhs) == SSA_NAME)
1434 /* If STMT is an assignment to an SSA_NAME, we only have one
1436 if (set_lattice_value (lhs, val))
1439 if (val.lattice_val == VARYING)
1440 retval = SSA_PROP_VARYING;
1442 retval = SSA_PROP_INTERESTING;
1450 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1451 if it can determine which edge will be taken. Otherwise, return
1452 SSA_PROP_VARYING. */
1454 static enum ssa_prop_result
1455 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1460 block = gimple_bb (stmt);
1461 val = evaluate_stmt (stmt);
1463 /* Find which edge out of the conditional block will be taken and add it
1464 to the worklist. If no single edge can be determined statically,
1465 return SSA_PROP_VARYING to feed all the outgoing edges to the
1466 propagation engine. */
1467 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1469 return SSA_PROP_INTERESTING;
1471 return SSA_PROP_VARYING;
1475 /* Evaluate statement STMT. If the statement produces an output value and
1476 its evaluation changes the lattice value of its output, return
1477 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1480 If STMT is a conditional branch and we can determine its truth
1481 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1482 value, return SSA_PROP_VARYING. */
1484 static enum ssa_prop_result
1485 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1490 if (dump_file && (dump_flags & TDF_DETAILS))
1492 fprintf (dump_file, "\nVisiting statement:\n");
1493 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1496 switch (gimple_code (stmt))
1499 /* If the statement is an assignment that produces a single
1500 output value, evaluate its RHS to see if the lattice value of
1501 its output has changed. */
1502 return visit_assignment (stmt, output_p);
1505 /* A value-returning call also performs an assignment. */
1506 if (gimple_call_lhs (stmt) != NULL_TREE)
1507 return visit_assignment (stmt, output_p);
1512 /* If STMT is a conditional branch, see if we can determine
1513 which branch will be taken. */
1514 /* FIXME. It appears that we should be able to optimize
1515 computed GOTOs here as well. */
1516 return visit_cond_stmt (stmt, taken_edge_p);
1522 /* Any other kind of statement is not interesting for constant
1523 propagation and, therefore, not worth simulating. */
1524 if (dump_file && (dump_flags & TDF_DETAILS))
1525 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1527 /* Definitions made by statements other than assignments to
1528 SSA_NAMEs represent unknown modifications to their outputs.
1529 Mark them VARYING. */
1530 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1532 prop_value_t v = { VARYING, NULL_TREE };
1533 set_lattice_value (def, v);
1536 return SSA_PROP_VARYING;
1540 /* Main entry point for SSA Conditional Constant Propagation. */
1546 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1547 if (ccp_finalize ())
1548 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1557 return flag_tree_ccp != 0;
1561 struct gimple_opt_pass pass_ccp =
1566 gate_ccp, /* gate */
1567 do_ssa_ccp, /* execute */
1570 0, /* static_pass_number */
1571 TV_TREE_CCP, /* tv_id */
1572 PROP_cfg | PROP_ssa, /* properties_required */
1573 0, /* properties_provided */
1574 0, /* properties_destroyed */
1575 0, /* todo_flags_start */
1576 TODO_dump_func | TODO_verify_ssa
1577 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1582 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1583 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1584 is the desired result type. */
1587 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1588 bool allow_negative_idx)
1590 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1591 tree array_type, elt_type, elt_size;
1594 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1595 measured in units of the size of elements type) from that ARRAY_REF).
1596 We can't do anything if either is variable.
1598 The case we handle here is *(&A[N]+O). */
1599 if (TREE_CODE (base) == ARRAY_REF)
1601 tree low_bound = array_ref_low_bound (base);
1603 elt_offset = TREE_OPERAND (base, 1);
1604 if (TREE_CODE (low_bound) != INTEGER_CST
1605 || TREE_CODE (elt_offset) != INTEGER_CST)
1608 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1609 base = TREE_OPERAND (base, 0);
1612 /* Ignore stupid user tricks of indexing non-array variables. */
1613 array_type = TREE_TYPE (base);
1614 if (TREE_CODE (array_type) != ARRAY_TYPE)
1616 elt_type = TREE_TYPE (array_type);
1617 if (!useless_type_conversion_p (orig_type, elt_type))
1620 /* Use signed size type for intermediate computation on the index. */
1621 idx_type = signed_type_for (size_type_node);
1623 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1624 element type (so we can use the alignment if it's not constant).
1625 Otherwise, compute the offset as an index by using a division. If the
1626 division isn't exact, then don't do anything. */
1627 elt_size = TYPE_SIZE_UNIT (elt_type);
1630 if (integer_zerop (offset))
1632 if (TREE_CODE (elt_size) != INTEGER_CST)
1633 elt_size = size_int (TYPE_ALIGN (elt_type));
1635 idx = build_int_cst (idx_type, 0);
1639 unsigned HOST_WIDE_INT lquo, lrem;
1640 HOST_WIDE_INT hquo, hrem;
1643 /* The final array offset should be signed, so we need
1644 to sign-extend the (possibly pointer) offset here
1645 and use signed division. */
1646 soffset = double_int_sext (tree_to_double_int (offset),
1647 TYPE_PRECISION (TREE_TYPE (offset)));
1648 if (TREE_CODE (elt_size) != INTEGER_CST
1649 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1650 soffset.low, soffset.high,
1651 TREE_INT_CST_LOW (elt_size),
1652 TREE_INT_CST_HIGH (elt_size),
1653 &lquo, &hquo, &lrem, &hrem)
1657 idx = build_int_cst_wide (idx_type, lquo, hquo);
1660 /* Assume the low bound is zero. If there is a domain type, get the
1661 low bound, if any, convert the index into that type, and add the
1663 min_idx = build_int_cst (idx_type, 0);
1664 domain_type = TYPE_DOMAIN (array_type);
1667 idx_type = domain_type;
1668 if (TYPE_MIN_VALUE (idx_type))
1669 min_idx = TYPE_MIN_VALUE (idx_type);
1671 min_idx = fold_convert (idx_type, min_idx);
1673 if (TREE_CODE (min_idx) != INTEGER_CST)
1676 elt_offset = fold_convert (idx_type, elt_offset);
1679 if (!integer_zerop (min_idx))
1680 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1681 if (!integer_zerop (elt_offset))
1682 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1684 /* Make sure to possibly truncate late after offsetting. */
1685 idx = fold_convert (idx_type, idx);
1687 /* We don't want to construct access past array bounds. For example
1690 should not be simplified into (*c)[14] or tree-vrp will
1691 give false warnings. The same is true for
1692 struct A { long x; char d[0]; } *a;
1694 which should be not folded to &a->d[-8]. */
1696 && TYPE_MAX_VALUE (domain_type)
1697 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1699 tree up_bound = TYPE_MAX_VALUE (domain_type);
1701 if (tree_int_cst_lt (up_bound, idx)
1702 /* Accesses after the end of arrays of size 0 (gcc
1703 extension) and 1 are likely intentional ("struct
1705 && compare_tree_int (up_bound, 1) > 0)
1709 && TYPE_MIN_VALUE (domain_type))
1711 if (!allow_negative_idx
1712 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1713 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1716 else if (!allow_negative_idx
1717 && compare_tree_int (idx, 0) < 0)
1720 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1724 /* Attempt to fold *(S+O) to S.X.
1725 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1726 is the desired result type. */
1729 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1730 tree orig_type, bool base_is_ptr)
1732 tree f, t, field_type, tail_array_field, field_offset;
1736 if (TREE_CODE (record_type) != RECORD_TYPE
1737 && TREE_CODE (record_type) != UNION_TYPE
1738 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1741 /* Short-circuit silly cases. */
1742 if (useless_type_conversion_p (record_type, orig_type))
1745 tail_array_field = NULL_TREE;
1746 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1750 if (TREE_CODE (f) != FIELD_DECL)
1752 if (DECL_BIT_FIELD (f))
1755 if (!DECL_FIELD_OFFSET (f))
1757 field_offset = byte_position (f);
1758 if (TREE_CODE (field_offset) != INTEGER_CST)
1761 /* ??? Java creates "interesting" fields for representing base classes.
1762 They have no name, and have no context. With no context, we get into
1763 trouble with nonoverlapping_component_refs_p. Skip them. */
1764 if (!DECL_FIELD_CONTEXT (f))
1767 /* The previous array field isn't at the end. */
1768 tail_array_field = NULL_TREE;
1770 /* Check to see if this offset overlaps with the field. */
1771 cmp = tree_int_cst_compare (field_offset, offset);
1775 field_type = TREE_TYPE (f);
1777 /* Here we exactly match the offset being checked. If the types match,
1778 then we can return that field. */
1780 && useless_type_conversion_p (orig_type, field_type))
1783 base = build1 (INDIRECT_REF, record_type, base);
1784 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1788 /* Don't care about offsets into the middle of scalars. */
1789 if (!AGGREGATE_TYPE_P (field_type))
1792 /* Check for array at the end of the struct. This is often
1793 used as for flexible array members. We should be able to
1794 turn this into an array access anyway. */
1795 if (TREE_CODE (field_type) == ARRAY_TYPE)
1796 tail_array_field = f;
1798 /* Check the end of the field against the offset. */
1799 if (!DECL_SIZE_UNIT (f)
1800 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1802 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1803 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1806 /* If we matched, then set offset to the displacement into
1809 new_base = build1 (INDIRECT_REF, record_type, base);
1812 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1814 /* Recurse to possibly find the match. */
1815 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1816 f == TYPE_FIELDS (record_type));
1819 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1825 if (!tail_array_field)
1828 f = tail_array_field;
1829 field_type = TREE_TYPE (f);
1830 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1832 /* If we get here, we've got an aggregate field, and a possibly
1833 nonzero offset into them. Recurse and hope for a valid match. */
1835 base = build1 (INDIRECT_REF, record_type, base);
1836 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1838 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1839 f == TYPE_FIELDS (record_type));
1842 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1846 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1847 or BASE[index] or by combination of those.
1849 Before attempting the conversion strip off existing ADDR_EXPRs and
1850 handled component refs. */
1853 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1857 bool base_is_ptr = true;
1860 if (TREE_CODE (base) == ADDR_EXPR)
1862 base_is_ptr = false;
1864 base = TREE_OPERAND (base, 0);
1866 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1867 so it needs to be removed and new COMPONENT_REF constructed.
1868 The wrong COMPONENT_REF are often constructed by folding the
1869 (type *)&object within the expression (type *)&object+offset */
1870 if (handled_component_p (base))
1872 HOST_WIDE_INT sub_offset, size, maxsize;
1874 newbase = get_ref_base_and_extent (base, &sub_offset,
1876 gcc_assert (newbase);
1879 && !(sub_offset & (BITS_PER_UNIT - 1)))
1883 offset = int_const_binop (PLUS_EXPR, offset,
1884 build_int_cst (TREE_TYPE (offset),
1885 sub_offset / BITS_PER_UNIT), 1);
1888 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1889 && integer_zerop (offset))
1891 type = TREE_TYPE (base);
1896 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1898 type = TREE_TYPE (TREE_TYPE (base));
1900 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1901 orig_type, base_is_ptr);
1905 base = build1 (INDIRECT_REF, type, base);
1906 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1911 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1912 or &BASE[index] or by combination of those.
1914 Before attempting the conversion strip off existing component refs. */
1917 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1921 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1922 && POINTER_TYPE_P (orig_type));
1924 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1930 /* For __builtin_object_size to function correctly we need to
1931 make sure not to fold address arithmetic so that we change
1932 reference from one array to another. This would happen for
1935 struct X { char s1[10]; char s2[10] } s;
1936 char *foo (void) { return &s.s2[-4]; }
1938 where we need to avoid generating &s.s1[6]. As the C and
1939 C++ frontends create different initial trees
1940 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1941 sophisticated comparisons here. Note that checking for the
1942 condition after the fact is easier than trying to avoid doing
1945 if (TREE_CODE (orig) == ADDR_EXPR)
1946 orig = TREE_OPERAND (orig, 0);
1947 if ((TREE_CODE (orig) == ARRAY_REF
1948 || (TREE_CODE (orig) == COMPONENT_REF
1949 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1950 && (TREE_CODE (t) == ARRAY_REF
1951 || TREE_CODE (t) == COMPONENT_REF)
1952 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1953 ? TREE_OPERAND (orig, 0) : orig,
1954 TREE_CODE (t) == ARRAY_REF
1955 ? TREE_OPERAND (t, 0) : t, 0))
1958 ptr_type = build_pointer_type (TREE_TYPE (t));
1959 if (!useless_type_conversion_p (orig_type, ptr_type))
1961 return build_fold_addr_expr_with_type (t, ptr_type);
1967 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1968 Return the simplified expression, or NULL if nothing could be done. */
1971 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1974 bool volatile_p = TREE_THIS_VOLATILE (expr);
1976 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1977 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1978 are sometimes added. */
1980 STRIP_TYPE_NOPS (base);
1981 TREE_OPERAND (expr, 0) = base;
1983 /* One possibility is that the address reduces to a string constant. */
1984 t = fold_read_from_constant_string (expr);
1988 /* Add in any offset from a POINTER_PLUS_EXPR. */
1989 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
1993 offset2 = TREE_OPERAND (base, 1);
1994 if (TREE_CODE (offset2) != INTEGER_CST)
1996 base = TREE_OPERAND (base, 0);
1998 offset = fold_convert (sizetype,
1999 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2002 if (TREE_CODE (base) == ADDR_EXPR)
2004 tree base_addr = base;
2006 /* Strip the ADDR_EXPR. */
2007 base = TREE_OPERAND (base, 0);
2009 /* Fold away CONST_DECL to its value, if the type is scalar. */
2010 if (TREE_CODE (base) == CONST_DECL
2011 && is_gimple_min_invariant (DECL_INITIAL (base)))
2012 return DECL_INITIAL (base);
2014 /* Try folding *(&B+O) to B.X. */
2015 t = maybe_fold_offset_to_reference (base_addr, offset,
2019 /* Preserve volatileness of the original expression.
2020 We can end up with a plain decl here which is shared
2021 and we shouldn't mess with its flags. */
2023 TREE_THIS_VOLATILE (t) = volatile_p;
2029 /* We can get here for out-of-range string constant accesses,
2030 such as "_"[3]. Bail out of the entire substitution search
2031 and arrange for the entire statement to be replaced by a
2032 call to __builtin_trap. In all likelihood this will all be
2033 constant-folded away, but in the meantime we can't leave with
2034 something that get_expr_operands can't understand. */
2038 if (TREE_CODE (t) == ADDR_EXPR
2039 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2041 /* FIXME: Except that this causes problems elsewhere with dead
2042 code not being deleted, and we die in the rtl expanders
2043 because we failed to remove some ssa_name. In the meantime,
2044 just return zero. */
2045 /* FIXME2: This condition should be signaled by
2046 fold_read_from_constant_string directly, rather than
2047 re-checking for it here. */
2048 return integer_zero_node;
2051 /* Try folding *(B+O) to B->X. Still an improvement. */
2052 if (POINTER_TYPE_P (TREE_TYPE (base)))
2054 t = maybe_fold_offset_to_reference (base, offset,
2061 /* Otherwise we had an offset that we could not simplify. */
2066 /* A quaint feature extant in our address arithmetic is that there
2067 can be hidden type changes here. The type of the result need
2068 not be the same as the type of the input pointer.
2070 What we're after here is an expression of the form
2071 (T *)(&array + const)
2072 where array is OP0, const is OP1, RES_TYPE is T and
2073 the cast doesn't actually exist, but is implicit in the
2074 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2076 which may be able to propagate further. */
2079 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2084 /* It had better be a constant. */
2085 if (TREE_CODE (op1) != INTEGER_CST)
2087 /* The first operand should be an ADDR_EXPR. */
2088 if (TREE_CODE (op0) != ADDR_EXPR)
2090 op0 = TREE_OPERAND (op0, 0);
2092 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2093 the offset into it. */
2094 while (TREE_CODE (op0) == ARRAY_REF)
2096 tree array_obj = TREE_OPERAND (op0, 0);
2097 tree array_idx = TREE_OPERAND (op0, 1);
2098 tree elt_type = TREE_TYPE (op0);
2099 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2102 if (TREE_CODE (array_idx) != INTEGER_CST)
2104 if (TREE_CODE (elt_size) != INTEGER_CST)
2107 /* Un-bias the index by the min index of the array type. */
2108 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2111 min_idx = TYPE_MIN_VALUE (min_idx);
2114 if (TREE_CODE (min_idx) != INTEGER_CST)
2117 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2118 if (!integer_zerop (min_idx))
2119 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2124 /* Convert the index to a byte offset. */
2125 array_idx = fold_convert (sizetype, array_idx);
2126 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2128 /* Update the operands for the next round, or for folding. */
2129 op1 = int_const_binop (PLUS_EXPR,
2134 ptd_type = TREE_TYPE (res_type);
2135 /* If we want a pointer to void, reconstruct the reference from the
2136 array element type. A pointer to that can be trivially converted
2137 to void *. This happens as we fold (void *)(ptr p+ off). */
2138 if (VOID_TYPE_P (ptd_type)
2139 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2140 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2142 /* At which point we can try some of the same things as for indirects. */
2143 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2145 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2148 t = build1 (ADDR_EXPR, res_type, t);
2153 /* For passing state through walk_tree into fold_stmt_r and its
2156 struct fold_stmt_r_data
2160 bool *inside_addr_expr_p;
2163 /* Subroutine of fold_stmt called via walk_tree. We perform several
2164 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2167 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2169 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2170 struct fold_stmt_r_data *fold_stmt_r_data;
2171 bool *inside_addr_expr_p;
2173 tree expr = *expr_p, t;
2174 bool volatile_p = TREE_THIS_VOLATILE (expr);
2176 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2177 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2178 changed_p = fold_stmt_r_data->changed_p;
2180 /* ??? It'd be nice if walk_tree had a pre-order option. */
2181 switch (TREE_CODE (expr))
2184 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2189 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2191 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2192 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2195 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2196 /* If we had a good reason for propagating the address here,
2197 make sure we end up with valid gimple. See PR34989. */
2198 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2202 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2207 if (POINTER_TYPE_P (TREE_TYPE (expr))
2208 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2209 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2210 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2212 TREE_TYPE (TREE_TYPE (expr)))))
2216 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2217 We'd only want to bother decomposing an existing ARRAY_REF if
2218 the base array is found to have another offset contained within.
2219 Otherwise we'd be wasting time. */
2221 /* If we are not processing expressions found within an
2222 ADDR_EXPR, then we can fold constant array references.
2223 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2225 if (!*inside_addr_expr_p && !wi->is_lhs)
2226 t = fold_read_from_constant_string (expr);
2232 *inside_addr_expr_p = true;
2233 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2234 *inside_addr_expr_p = false;
2239 /* Make sure the value is properly considered constant, and so gets
2240 propagated as expected. */
2242 recompute_tree_invariant_for_addr_expr (expr);
2246 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2251 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2252 We've already checked that the records are compatible, so we should
2253 come up with a set of compatible fields. */
2255 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2256 tree expr_field = TREE_OPERAND (expr, 1);
2258 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2260 expr_field = find_compatible_field (expr_record, expr_field);
2261 TREE_OPERAND (expr, 1) = expr_field;
2266 case TARGET_MEM_REF:
2267 t = maybe_fold_tmr (expr);
2270 case POINTER_PLUS_EXPR:
2271 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2274 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2279 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2280 TREE_OPERAND (expr, 0),
2281 TREE_OPERAND (expr, 1));
2285 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2287 tree op0 = TREE_OPERAND (expr, 0);
2291 fold_defer_overflow_warnings ();
2292 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2293 TREE_OPERAND (op0, 0),
2294 TREE_OPERAND (op0, 1));
2295 /* This is actually a conditional expression, not a GIMPLE
2296 conditional statement, however, the valid_gimple_rhs_p
2297 test still applies. */
2298 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2299 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2302 COND_EXPR_COND (expr) = tem;
2315 /* Preserve volatileness of the original expression.
2316 We can end up with a plain decl here which is shared
2317 and we shouldn't mess with its flags. */
2319 TREE_THIS_VOLATILE (t) = volatile_p;
2327 /* Return the string length, maximum string length or maximum value of
2329 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2330 is not NULL and, for TYPE == 0, its value is not equal to the length
2331 we determine or if we are unable to determine the length or value,
2332 return false. VISITED is a bitmap of visited variables.
2333 TYPE is 0 if string length should be returned, 1 for maximum string
2334 length and 2 for maximum value ARG can have. */
2337 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2342 if (TREE_CODE (arg) != SSA_NAME)
2344 if (TREE_CODE (arg) == COND_EXPR)
2345 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2346 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2347 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2348 else if (TREE_CODE (arg) == ADDR_EXPR
2349 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2350 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2352 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2353 if (TREE_CODE (aop0) == INDIRECT_REF
2354 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2355 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2356 length, visited, type);
2362 if (TREE_CODE (val) != INTEGER_CST
2363 || tree_int_cst_sgn (val) < 0)
2367 val = c_strlen (arg, 1);
2375 if (TREE_CODE (*length) != INTEGER_CST
2376 || TREE_CODE (val) != INTEGER_CST)
2379 if (tree_int_cst_lt (*length, val))
2383 else if (simple_cst_equal (val, *length) != 1)
2391 /* If we were already here, break the infinite cycle. */
2392 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2394 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2397 def_stmt = SSA_NAME_DEF_STMT (var);
2399 switch (gimple_code (def_stmt))
2402 /* The RHS of the statement defining VAR must either have a
2403 constant length or come from another SSA_NAME with a constant
2405 if (gimple_assign_single_p (def_stmt)
2406 || gimple_assign_unary_nop_p (def_stmt))
2408 tree rhs = gimple_assign_rhs1 (def_stmt);
2409 return get_maxval_strlen (rhs, length, visited, type);
2415 /* All the arguments of the PHI node must have the same constant
2419 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2421 tree arg = gimple_phi_arg (def_stmt, i)->def;
2423 /* If this PHI has itself as an argument, we cannot
2424 determine the string length of this argument. However,
2425 if we can find a constant string length for the other
2426 PHI args then we can still be sure that this is a
2427 constant string length. So be optimistic and just
2428 continue with the next argument. */
2429 if (arg == gimple_phi_result (def_stmt))
2432 if (!get_maxval_strlen (arg, length, visited, type))
2444 /* Fold builtin call in statement STMT. Returns a simplified tree.
2445 We may return a non-constant expression, including another call
2446 to a different function and with different arguments, e.g.,
2447 substituting memcpy for strcpy when the string length is known.
2448 Note that some builtins expand into inline code that may not
2449 be valid in GIMPLE. Callers must take care. */
2452 ccp_fold_builtin (gimple stmt)
2454 tree result, val[3];
2461 gcc_assert (is_gimple_call (stmt));
2463 ignore = (gimple_call_lhs (stmt) == NULL);
2465 /* First try the generic builtin folder. If that succeeds, return the
2467 result = fold_call_stmt (stmt, ignore);
2471 STRIP_NOPS (result);
2475 /* Ignore MD builtins. */
2476 callee = gimple_call_fndecl (stmt);
2477 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2480 /* If the builtin could not be folded, and it has no argument list,
2482 nargs = gimple_call_num_args (stmt);
2486 /* Limit the work only for builtins we know how to simplify. */
2487 switch (DECL_FUNCTION_CODE (callee))
2489 case BUILT_IN_STRLEN:
2490 case BUILT_IN_FPUTS:
2491 case BUILT_IN_FPUTS_UNLOCKED:
2495 case BUILT_IN_STRCPY:
2496 case BUILT_IN_STRNCPY:
2500 case BUILT_IN_MEMCPY_CHK:
2501 case BUILT_IN_MEMPCPY_CHK:
2502 case BUILT_IN_MEMMOVE_CHK:
2503 case BUILT_IN_MEMSET_CHK:
2504 case BUILT_IN_STRNCPY_CHK:
2508 case BUILT_IN_STRCPY_CHK:
2509 case BUILT_IN_STPCPY_CHK:
2513 case BUILT_IN_SNPRINTF_CHK:
2514 case BUILT_IN_VSNPRINTF_CHK:
2522 if (arg_idx >= nargs)
2525 /* Try to use the dataflow information gathered by the CCP process. */
2526 visited = BITMAP_ALLOC (NULL);
2527 bitmap_clear (visited);
2529 memset (val, 0, sizeof (val));
2530 a = gimple_call_arg (stmt, arg_idx);
2531 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2532 val[arg_idx] = NULL_TREE;
2534 BITMAP_FREE (visited);
2537 switch (DECL_FUNCTION_CODE (callee))
2539 case BUILT_IN_STRLEN:
2540 if (val[0] && nargs == 1)
2543 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2545 /* If the result is not a valid gimple value, or not a cast
2546 of a valid gimple value, then we can not use the result. */
2547 if (is_gimple_val (new_val)
2548 || (is_gimple_cast (new_val)
2549 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2554 case BUILT_IN_STRCPY:
2555 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2556 result = fold_builtin_strcpy (callee,
2557 gimple_call_arg (stmt, 0),
2558 gimple_call_arg (stmt, 1),
2562 case BUILT_IN_STRNCPY:
2563 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2564 result = fold_builtin_strncpy (callee,
2565 gimple_call_arg (stmt, 0),
2566 gimple_call_arg (stmt, 1),
2567 gimple_call_arg (stmt, 2),
2571 case BUILT_IN_FPUTS:
2573 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2574 gimple_call_arg (stmt, 1),
2575 ignore, false, val[0]);
2578 case BUILT_IN_FPUTS_UNLOCKED:
2580 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2581 gimple_call_arg (stmt, 1),
2582 ignore, true, val[0]);
2585 case BUILT_IN_MEMCPY_CHK:
2586 case BUILT_IN_MEMPCPY_CHK:
2587 case BUILT_IN_MEMMOVE_CHK:
2588 case BUILT_IN_MEMSET_CHK:
2589 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2590 result = fold_builtin_memory_chk (callee,
2591 gimple_call_arg (stmt, 0),
2592 gimple_call_arg (stmt, 1),
2593 gimple_call_arg (stmt, 2),
2594 gimple_call_arg (stmt, 3),
2596 DECL_FUNCTION_CODE (callee));
2599 case BUILT_IN_STRCPY_CHK:
2600 case BUILT_IN_STPCPY_CHK:
2601 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2602 result = fold_builtin_stxcpy_chk (callee,
2603 gimple_call_arg (stmt, 0),
2604 gimple_call_arg (stmt, 1),
2605 gimple_call_arg (stmt, 2),
2607 DECL_FUNCTION_CODE (callee));
2610 case BUILT_IN_STRNCPY_CHK:
2611 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2612 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2613 gimple_call_arg (stmt, 1),
2614 gimple_call_arg (stmt, 2),
2615 gimple_call_arg (stmt, 3),
2619 case BUILT_IN_SNPRINTF_CHK:
2620 case BUILT_IN_VSNPRINTF_CHK:
2621 if (val[1] && is_gimple_val (val[1]))
2622 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2623 DECL_FUNCTION_CODE (callee));
2630 if (result && ignore)
2631 result = fold_ignored_result (result);
2635 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2636 replacement rhs for the statement or NULL_TREE if no simplification
2637 could be made. It is assumed that the operands have been previously
2641 fold_gimple_assign (gimple_stmt_iterator *si)
2643 gimple stmt = gsi_stmt (*si);
2644 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2648 switch (get_gimple_rhs_class (subcode))
2650 case GIMPLE_SINGLE_RHS:
2652 tree rhs = gimple_assign_rhs1 (stmt);
2654 /* Try to fold a conditional expression. */
2655 if (TREE_CODE (rhs) == COND_EXPR)
2657 tree temp = fold (COND_EXPR_COND (rhs));
2658 if (temp != COND_EXPR_COND (rhs))
2659 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2660 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2663 /* If we couldn't fold the RHS, hand over to the generic
2665 if (result == NULL_TREE)
2666 result = fold (rhs);
2668 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2669 that may have been added by fold, and "useless" type
2670 conversions that might now be apparent due to propagation. */
2671 STRIP_USELESS_TYPE_CONVERSION (result);
2673 if (result != rhs && valid_gimple_rhs_p (result))
2676 /* It is possible that fold_stmt_r simplified the RHS.
2677 Make sure that the subcode of this statement still
2678 reflects the principal operator of the rhs operand. */
2683 case GIMPLE_UNARY_RHS:
2685 tree rhs = gimple_assign_rhs1 (stmt);
2687 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2690 /* If the operation was a conversion do _not_ mark a
2691 resulting constant with TREE_OVERFLOW if the original
2692 constant was not. These conversions have implementation
2693 defined behavior and retaining the TREE_OVERFLOW flag
2694 here would confuse later passes such as VRP. */
2695 if (CONVERT_EXPR_CODE_P (subcode)
2696 && TREE_CODE (result) == INTEGER_CST
2697 && TREE_CODE (rhs) == INTEGER_CST)
2698 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2700 STRIP_USELESS_TYPE_CONVERSION (result);
2701 if (valid_gimple_rhs_p (result))
2704 else if (CONVERT_EXPR_CODE_P (subcode)
2705 && POINTER_TYPE_P (gimple_expr_type (stmt))
2706 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2708 tree type = gimple_expr_type (stmt);
2709 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2710 integer_zero_node, type);
2717 case GIMPLE_BINARY_RHS:
2718 /* Try to fold pointer addition. */
2719 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2721 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2722 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2724 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2725 if (!useless_type_conversion_p
2726 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2727 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2729 result = maybe_fold_stmt_addition (type,
2730 gimple_assign_rhs1 (stmt),
2731 gimple_assign_rhs2 (stmt));
2735 result = fold_binary (subcode,
2736 TREE_TYPE (gimple_assign_lhs (stmt)),
2737 gimple_assign_rhs1 (stmt),
2738 gimple_assign_rhs2 (stmt));
2742 STRIP_USELESS_TYPE_CONVERSION (result);
2743 if (valid_gimple_rhs_p (result))
2746 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2747 we lose canonicalization opportunities. Do not go again
2748 through fold here though, or the same non-GIMPLE will be
2750 if (commutative_tree_code (subcode)
2751 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2752 gimple_assign_rhs2 (stmt), false))
2753 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2754 gimple_assign_rhs2 (stmt),
2755 gimple_assign_rhs1 (stmt));
2759 case GIMPLE_INVALID_RHS:
2766 /* Attempt to fold a conditional statement. Return true if any changes were
2767 made. We only attempt to fold the condition expression, and do not perform
2768 any transformation that would require alteration of the cfg. It is
2769 assumed that the operands have been previously folded. */
2772 fold_gimple_cond (gimple stmt)
2774 tree result = fold_binary (gimple_cond_code (stmt),
2776 gimple_cond_lhs (stmt),
2777 gimple_cond_rhs (stmt));
2781 STRIP_USELESS_TYPE_CONVERSION (result);
2782 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2784 gimple_cond_set_condition_from_tree (stmt, result);
2793 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2794 The statement may be replaced by another statement, e.g., if the call
2795 simplifies to a constant value. Return true if any changes were made.
2796 It is assumed that the operands have been previously folded. */
2799 fold_gimple_call (gimple_stmt_iterator *gsi)
2801 gimple stmt = gsi_stmt (*gsi);
2803 tree callee = gimple_call_fndecl (stmt);
2805 /* Check for builtins that CCP can handle using information not
2806 available in the generic fold routines. */
2807 if (callee && DECL_BUILT_IN (callee))
2809 tree result = ccp_fold_builtin (stmt);
2812 return update_call_from_tree (gsi, result);
2816 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2817 here are when we've propagated the address of a decl into the
2819 /* ??? Should perhaps do this in fold proper. However, doing it
2820 there requires that we create a new CALL_EXPR, and that requires
2821 copying EH region info to the new node. Easier to just do it
2822 here where we can just smash the call operand. */
2823 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2824 callee = gimple_call_fn (stmt);
2825 if (TREE_CODE (callee) == OBJ_TYPE_REF
2826 && lang_hooks.fold_obj_type_ref
2827 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2828 && DECL_P (TREE_OPERAND
2829 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2833 /* ??? Caution: Broken ADDR_EXPR semantics means that
2834 looking at the type of the operand of the addr_expr
2835 can yield an array type. See silly exception in
2836 check_pointer_types_r. */
2837 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2838 t = lang_hooks.fold_obj_type_ref (callee, t);
2841 gimple_call_set_fn (stmt, t);
2850 /* Fold the statement pointed to by GSI. In some cases, this function may
2851 replace the whole statement with a new one. Returns true iff folding
2852 makes any changes. */
2855 fold_stmt (gimple_stmt_iterator *gsi)
2858 struct fold_stmt_r_data fold_stmt_r_data;
2859 struct walk_stmt_info wi;
2861 bool changed = false;
2862 bool inside_addr_expr = false;
2864 gimple stmt = gsi_stmt (*gsi);
2866 fold_stmt_r_data.stmt = stmt;
2867 fold_stmt_r_data.changed_p = &changed;
2868 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2870 memset (&wi, 0, sizeof (wi));
2871 wi.info = &fold_stmt_r_data;
2873 /* Fold the individual operands.
2874 For example, fold instances of *&VAR into VAR, etc. */
2875 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2878 /* Fold the main computation performed by the statement. */
2879 switch (gimple_code (stmt))
2883 tree new_rhs = fold_gimple_assign (gsi);
2884 if (new_rhs != NULL_TREE)
2886 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2889 stmt = gsi_stmt (*gsi);
2893 changed |= fold_gimple_cond (stmt);
2896 /* The entire statement may be replaced in this case. */
2897 changed |= fold_gimple_call (gsi);
2908 /* Perform the minimal folding on statement STMT. Only operations like
2909 *&x created by constant propagation are handled. The statement cannot
2910 be replaced with a new one. Return true if the statement was
2911 changed, false otherwise. */
2914 fold_stmt_inplace (gimple stmt)
2917 struct fold_stmt_r_data fold_stmt_r_data;
2918 struct walk_stmt_info wi;
2919 gimple_stmt_iterator si;
2921 bool changed = false;
2922 bool inside_addr_expr = false;
2924 fold_stmt_r_data.stmt = stmt;
2925 fold_stmt_r_data.changed_p = &changed;
2926 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2928 memset (&wi, 0, sizeof (wi));
2929 wi.info = &fold_stmt_r_data;
2931 /* Fold the individual operands.
2932 For example, fold instances of *&VAR into VAR, etc.
2934 It appears that, at one time, maybe_fold_stmt_indirect
2935 would cause the walk to return non-null in order to
2936 signal that the entire statement should be replaced with
2937 a call to _builtin_trap. This functionality is currently
2938 disabled, as noted in a FIXME, and cannot be supported here. */
2939 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2942 /* Fold the main computation performed by the statement. */
2943 switch (gimple_code (stmt))
2947 unsigned old_num_ops;
2949 old_num_ops = gimple_num_ops (stmt);
2950 si = gsi_for_stmt (stmt);
2951 new_rhs = fold_gimple_assign (&si);
2952 if (new_rhs != NULL_TREE
2953 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2955 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2958 gcc_assert (gsi_stmt (si) == stmt);
2962 changed |= fold_gimple_cond (stmt);
2972 /* Try to optimize out __builtin_stack_restore. Optimize it out
2973 if there is another __builtin_stack_restore in the same basic
2974 block and no calls or ASM_EXPRs are in between, or if this block's
2975 only outgoing edge is to EXIT_BLOCK and there are no calls or
2976 ASM_EXPRs after this __builtin_stack_restore. */
2979 optimize_stack_restore (gimple_stmt_iterator i)
2982 gimple stmt, stack_save;
2983 gimple_stmt_iterator stack_save_gsi;
2985 basic_block bb = gsi_bb (i);
2986 gimple call = gsi_stmt (i);
2988 if (gimple_code (call) != GIMPLE_CALL
2989 || gimple_call_num_args (call) != 1
2990 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2991 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2994 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2996 stmt = gsi_stmt (i);
2997 if (gimple_code (stmt) == GIMPLE_ASM)
2999 if (gimple_code (stmt) != GIMPLE_CALL)
3002 callee = gimple_call_fndecl (stmt);
3003 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3006 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3011 && (! single_succ_p (bb)
3012 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3015 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3016 if (gimple_code (stack_save) != GIMPLE_CALL
3017 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3018 || stmt_could_throw_p (stack_save)
3019 || !has_single_use (gimple_call_arg (call, 0)))
3022 callee = gimple_call_fndecl (stack_save);
3024 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3025 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3026 || gimple_call_num_args (stack_save) != 0)
3029 stack_save_gsi = gsi_for_stmt (stack_save);
3030 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3031 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3032 if (!update_call_from_tree (&stack_save_gsi, rhs))
3034 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3037 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3039 /* No effect, so the statement will be deleted. */
3040 return integer_zero_node;
3043 /* If va_list type is a simple pointer and nothing special is needed,
3044 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3045 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3046 pointer assignment. */
3049 optimize_stdarg_builtin (gimple call)
3051 tree callee, lhs, rhs, cfun_va_list;
3052 bool va_list_simple_ptr;
3054 if (gimple_code (call) != GIMPLE_CALL)
3057 callee = gimple_call_fndecl (call);
3059 cfun_va_list = targetm.fn_abi_va_list (callee);
3060 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3061 && (TREE_TYPE (cfun_va_list) == void_type_node
3062 || TREE_TYPE (cfun_va_list) == char_type_node);
3064 switch (DECL_FUNCTION_CODE (callee))
3066 case BUILT_IN_VA_START:
3067 if (!va_list_simple_ptr
3068 || targetm.expand_builtin_va_start != NULL
3069 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3072 if (gimple_call_num_args (call) != 2)
3075 lhs = gimple_call_arg (call, 0);
3076 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3077 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3078 != TYPE_MAIN_VARIANT (cfun_va_list))
3081 lhs = build_fold_indirect_ref (lhs);
3082 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3083 1, integer_zero_node);
3084 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3085 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3087 case BUILT_IN_VA_COPY:
3088 if (!va_list_simple_ptr)
3091 if (gimple_call_num_args (call) != 2)
3094 lhs = gimple_call_arg (call, 0);
3095 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3096 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3097 != TYPE_MAIN_VARIANT (cfun_va_list))
3100 lhs = build_fold_indirect_ref (lhs);
3101 rhs = gimple_call_arg (call, 1);
3102 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3103 != TYPE_MAIN_VARIANT (cfun_va_list))
3106 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3107 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3109 case BUILT_IN_VA_END:
3110 /* No effect, so the statement will be deleted. */
3111 return integer_zero_node;
3118 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3119 RHS of an assignment. Insert the necessary statements before
3120 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3121 is replaced. If the call is expected to produces a result, then it
3122 is replaced by an assignment of the new RHS to the result variable.
3123 If the result is to be ignored, then the call is replaced by a
3127 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3130 tree tmp = NULL_TREE; /* Silence warning. */
3131 gimple stmt, new_stmt;
3132 gimple_stmt_iterator i;
3133 gimple_seq stmts = gimple_seq_alloc();
3134 struct gimplify_ctx gctx;
3136 stmt = gsi_stmt (*si_p);
3138 gcc_assert (is_gimple_call (stmt));
3140 lhs = gimple_call_lhs (stmt);
3142 push_gimplify_context (&gctx);
3144 if (lhs == NULL_TREE)
3145 gimplify_and_add (expr, &stmts);
3147 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3149 pop_gimplify_context (NULL);
3151 if (gimple_has_location (stmt))
3152 annotate_all_with_location (stmts, gimple_location (stmt));
3154 /* The replacement can expose previously unreferenced variables. */
3155 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3157 new_stmt = gsi_stmt (i);
3158 find_new_referenced_vars (new_stmt);
3159 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3160 mark_symbols_for_renaming (new_stmt);
3164 if (lhs == NULL_TREE)
3165 new_stmt = gimple_build_nop ();
3168 new_stmt = gimple_build_assign (lhs, tmp);
3169 copy_virtual_operands (new_stmt, stmt);
3170 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3173 gimple_set_location (new_stmt, gimple_location (stmt));
3174 gsi_replace (si_p, new_stmt, false);
3177 /* A simple pass that attempts to fold all builtin functions. This pass
3178 is run after we've propagated as many constants as we can. */
3181 execute_fold_all_builtins (void)
3183 bool cfg_changed = false;
3185 unsigned int todoflags = 0;
3189 gimple_stmt_iterator i;
3190 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3192 gimple stmt, old_stmt;
3193 tree callee, result;
3194 enum built_in_function fcode;
3196 stmt = gsi_stmt (i);
3198 if (gimple_code (stmt) != GIMPLE_CALL)
3203 callee = gimple_call_fndecl (stmt);
3204 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3209 fcode = DECL_FUNCTION_CODE (callee);
3211 result = ccp_fold_builtin (stmt);
3214 gimple_remove_stmt_histograms (cfun, stmt);
3217 switch (DECL_FUNCTION_CODE (callee))
3219 case BUILT_IN_CONSTANT_P:
3220 /* Resolve __builtin_constant_p. If it hasn't been
3221 folded to integer_one_node by now, it's fairly
3222 certain that the value simply isn't constant. */
3223 result = integer_zero_node;
3226 case BUILT_IN_STACK_RESTORE:
3227 result = optimize_stack_restore (i);
3233 case BUILT_IN_VA_START:
3234 case BUILT_IN_VA_END:
3235 case BUILT_IN_VA_COPY:
3236 /* These shouldn't be folded before pass_stdarg. */
3237 result = optimize_stdarg_builtin (stmt);
3247 if (dump_file && (dump_flags & TDF_DETAILS))
3249 fprintf (dump_file, "Simplified\n ");
3250 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3254 push_stmt_changes (gsi_stmt_ptr (&i));
3256 if (!update_call_from_tree (&i, result))
3258 gimplify_and_update_call_from_tree (&i, result);
3259 todoflags |= TODO_rebuild_alias;
3262 stmt = gsi_stmt (i);
3263 pop_stmt_changes (gsi_stmt_ptr (&i));
3265 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3266 && gimple_purge_dead_eh_edges (bb))
3269 if (dump_file && (dump_flags & TDF_DETAILS))
3271 fprintf (dump_file, "to\n ");
3272 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3273 fprintf (dump_file, "\n");
3276 /* Retry the same statement if it changed into another
3277 builtin, there might be new opportunities now. */
3278 if (gimple_code (stmt) != GIMPLE_CALL)
3283 callee = gimple_call_fndecl (stmt);
3285 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3286 || DECL_FUNCTION_CODE (callee) == fcode)
3291 /* Delete unreachable blocks. */
3293 todoflags |= TODO_cleanup_cfg;
3299 struct gimple_opt_pass pass_fold_builtins =
3305 execute_fold_all_builtins, /* execute */
3308 0, /* static_pass_number */
3310 PROP_cfg | PROP_ssa, /* properties_required */
3311 0, /* properties_provided */
3312 0, /* properties_destroyed */
3313 0, /* todo_flags_start */
3316 | TODO_update_ssa /* todo_flags_finish */