2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
24 #include "coretypes.h"
28 #include "tree-inline.h"
34 #include "insn-config.h"
35 #include "integrate.h"
38 #include "pointer-set.h"
39 #include "splay-tree.h"
40 #include "langhooks.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
46 #include "diagnostic.h"
49 /* I'm not real happy about this, but we need to handle gimple and
51 #include "tree-iterator.h"
52 #include "tree-gimple.h"
54 /* 0 if we should not perform inlining.
55 1 if we should expand functions calls inline at the tree level.
56 2 if we should consider *all* functions to be inline
59 int flag_inline_trees = 0;
63 o In order to make inlining-on-trees work, we pessimized
64 function-local static constants. In particular, they are now
65 always output, even when not addressed. Fix this by treating
66 function-local static constants just like global static
67 constants; the back-end already knows not to output them if they
70 o Provide heuristics to clamp inlining of recursive template
73 /* Data required for function inlining. */
75 typedef struct inline_data
77 /* A stack of the functions we are inlining. For example, if we are
78 compiling `f', which calls `g', which calls `h', and we are
79 inlining the body of `h', the stack will contain, `h', followed
80 by `g', followed by `f'. The first few elements of the stack may
81 contain other functions that we know we should not recurse into,
82 even though they are not directly being inlined. */
84 /* The index of the first element of FNS that really represents an
86 unsigned first_inlined_fn;
87 /* The label to jump to when a return statement is encountered. If
88 this value is NULL, then return statements will simply be
89 remapped as return statements, rather than as jumps. */
91 /* The VAR_DECL for the return value. */
93 /* The map from local declarations in the inlined function to
94 equivalents in the function into which it is being inlined. */
96 /* Nonzero if we are currently within the cleanup for a
98 int in_target_cleanup_p;
99 /* We use the same mechanism to build clones that we do to perform
100 inlining. However, there are a few places where we need to
101 distinguish between those two situations. This flag is true if
102 we are cloning, rather than inlining. */
104 /* Similarly for saving function body. */
106 /* Hash table used to prevent walk_tree from visiting the same node
107 umpteen million times. */
109 /* Callgraph node of function we are inlining into. */
110 struct cgraph_node *node;
111 /* Callgraph node of currently inlined function. */
112 struct cgraph_node *current_node;
113 /* Statement iterator. We need this so we can keep the tree in
114 gimple form when we insert the inlined function. It is not
115 used when we are not dealing with gimple trees. */
116 tree_stmt_iterator tsi;
121 /* The approximate number of instructions per statement. This number
122 need not be particularly accurate; it is used only to make
123 decisions about when a function is too big to inline. */
124 #define INSNS_PER_STMT (10)
126 static tree copy_body_r (tree *, int *, void *);
127 static tree copy_body (inline_data *);
128 static tree expand_call_inline (tree *, int *, void *);
129 static void expand_calls_inline (tree *, inline_data *);
130 static bool inlinable_function_p (tree);
131 static tree remap_decl (tree, inline_data *);
132 static tree remap_type (tree, inline_data *);
133 static tree initialize_inlined_parameters (inline_data *, tree,
135 static void remap_block (tree *, inline_data *);
136 static tree remap_decls (tree, inline_data *);
137 static void copy_bind_expr (tree *, int *, inline_data *);
138 static tree mark_local_for_remap_r (tree *, int *, void *);
139 static void unsave_expr_1 (tree);
140 static tree unsave_r (tree *, int *, void *);
141 static void declare_inline_vars (tree bind_expr, tree vars);
142 static void remap_save_expr (tree *, void *, int *);
144 /* Insert a tree->tree mapping for ID. Despite the name suggests
145 that the trees should be variables, it is used for more than that. */
148 insert_decl_map (inline_data *id, tree key, tree value)
150 splay_tree_insert (id->decl_map, (splay_tree_key) key,
151 (splay_tree_value) value);
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
156 splay_tree_insert (id->decl_map, (splay_tree_key) value,
157 (splay_tree_value) value);
160 /* Remap DECL during the copying of the BLOCK tree for the function.
161 We are only called to remap local variables in the current function. */
164 remap_decl (tree decl, inline_data *id)
166 splay_tree_node n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
167 tree fn = VARRAY_TOP_TREE (id->fns);
169 /* See if we have remapped this declaration. If we didn't already have an
170 equivalent for this declaration, create one now. */
173 /* Make a copy of the variable or label. */
174 tree t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
176 /* Remember it, so that if we encounter this local entity again
177 we can reuse this copy. Do this early because remap_type may
178 need this decl for TYPE_STUB_DECL. */
179 insert_decl_map (id, decl, t);
181 /* Remap types, if necessary. */
182 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
183 if (TREE_CODE (t) == TYPE_DECL)
184 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
185 else if (TREE_CODE (t) == PARM_DECL)
186 DECL_ARG_TYPE_AS_WRITTEN (t)
187 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
189 /* Remap sizes as necessary. */
190 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
191 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
193 /* If fields, do likewise for offset and qualifier. */
194 if (TREE_CODE (t) == FIELD_DECL)
196 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
197 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
198 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
202 /* FIXME handle anon aggrs. */
203 if (! DECL_NAME (t) && TREE_TYPE (t)
204 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
206 /* For a VAR_DECL of anonymous type, we must also copy the
207 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
211 for (src = DECL_ANON_UNION_ELEMS (t); src;
212 src = TREE_CHAIN (src))
214 tree member = remap_decl (TREE_VALUE (src), id);
216 gcc_assert (!TREE_PURPOSE (src));
217 members = tree_cons (NULL, member, members);
219 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
226 return unshare_expr ((tree) n->value);
230 remap_type (tree type, inline_data *id)
232 splay_tree_node node;
238 /* See if we have remapped this type. */
239 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
241 return (tree) node->value;
243 /* The type only needs remapping if it's variably modified by a variable
244 in the function we are inlining. */
245 if (! variably_modified_type_p (type, VARRAY_TOP_TREE (id->fns)))
247 insert_decl_map (id, type, type);
251 /* We do need a copy. build and register it now. If this is a pointer or
252 reference type, remap the designated type and make a new pointer or
254 if (TREE_CODE (type) == POINTER_TYPE)
256 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
258 TYPE_REF_CAN_ALIAS_ALL (type));
259 insert_decl_map (id, type, new);
262 else if (TREE_CODE (type) == REFERENCE_TYPE)
264 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
266 TYPE_REF_CAN_ALIAS_ALL (type));
267 insert_decl_map (id, type, new);
271 new = copy_node (type);
273 insert_decl_map (id, type, new);
275 /* This is a new type, not a copy of an old type. Need to reassociate
276 variants. We can handle everything except the main variant lazily. */
277 t = TYPE_MAIN_VARIANT (type);
280 t = remap_type (t, id);
281 TYPE_MAIN_VARIANT (new) = t;
282 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
283 TYPE_NEXT_VARIANT (t) = new;
287 TYPE_MAIN_VARIANT (new) = new;
288 TYPE_NEXT_VARIANT (new) = NULL;
291 if (TYPE_STUB_DECL (type))
292 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
294 /* Lazily create pointer and reference types. */
295 TYPE_POINTER_TO (new) = NULL;
296 TYPE_REFERENCE_TO (new) = NULL;
298 switch (TREE_CODE (new))
305 t = TYPE_MIN_VALUE (new);
306 if (t && TREE_CODE (t) != INTEGER_CST)
307 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
309 t = TYPE_MAX_VALUE (new);
310 if (t && TREE_CODE (t) != INTEGER_CST)
311 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
315 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
316 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
320 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
321 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
326 case QUAL_UNION_TYPE:
327 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
333 /* Shouldn't have been thought variable sized. */
337 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
338 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
344 remap_decls (tree decls, inline_data *id)
347 tree new_decls = NULL_TREE;
349 /* Remap its variables. */
350 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
354 /* Remap the variable. */
355 new_var = remap_decl (old_var, id);
357 /* If we didn't remap this variable, so we can't mess with its
358 TREE_CHAIN. If we remapped this variable to the return slot, it's
359 already declared somewhere else, so don't declare it here. */
360 if (!new_var || new_var == id->retvar)
364 gcc_assert (DECL_P (new_var));
365 TREE_CHAIN (new_var) = new_decls;
370 return nreverse (new_decls);
373 /* Copy the BLOCK to contain remapped versions of the variables
374 therein. And hook the new block into the block-tree. */
377 remap_block (tree *block, inline_data *id)
383 /* Make the new block. */
385 new_block = make_node (BLOCK);
386 TREE_USED (new_block) = TREE_USED (old_block);
387 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
390 /* Remap its variables. */
391 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
393 fn = VARRAY_TREE (id->fns, 0);
395 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
396 rest_of_compilation is a good start. */
398 /* We're building a clone; DECL_INITIAL is still
399 error_mark_node, and current_binding_level is the parm
401 lang_hooks.decls.insert_block (new_block);
404 /* Attach this new block after the DECL_INITIAL block for the
405 function into which this block is being inlined. In
406 rest_of_compilation we will straighten out the BLOCK tree. */
408 if (DECL_INITIAL (fn))
409 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
411 first_block = &DECL_INITIAL (fn);
412 BLOCK_CHAIN (new_block) = *first_block;
413 *first_block = new_block;
416 /* Remember the remapped block. */
417 insert_decl_map (id, old_block, new_block);
421 copy_statement_list (tree *tp)
423 tree_stmt_iterator oi, ni;
426 new = alloc_stmt_list ();
427 ni = tsi_start (new);
428 oi = tsi_start (*tp);
431 for (; !tsi_end_p (oi); tsi_next (&oi))
432 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
436 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
438 tree block = BIND_EXPR_BLOCK (*tp);
439 /* Copy (and replace) the statement. */
440 copy_tree_r (tp, walk_subtrees, NULL);
443 remap_block (&block, id);
444 BIND_EXPR_BLOCK (*tp) = block;
447 if (BIND_EXPR_VARS (*tp))
448 /* This will remap a lot of the same decls again, but this should be
450 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
453 /* Called from copy_body via walk_tree. DATA is really an `inline_data *'. */
456 copy_body_r (tree *tp, int *walk_subtrees, void *data)
458 inline_data *id = (inline_data *) data;
459 tree fn = VARRAY_TOP_TREE (id->fns);
462 /* All automatic variables should have a DECL_CONTEXT indicating
463 what function they come from. */
464 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
465 && DECL_NAMESPACE_SCOPE_P (*tp))
466 gcc_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp));
469 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
470 GOTO_EXPR with the RET_LABEL as its target. */
471 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
473 tree return_stmt = *tp;
476 /* Build the GOTO_EXPR. */
477 tree assignment = TREE_OPERAND (return_stmt, 0);
478 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
479 TREE_USED (id->ret_label) = 1;
481 /* If we're returning something, just turn that into an
482 assignment into the equivalent of the original
486 /* Do not create a statement containing a naked RESULT_DECL. */
487 if (TREE_CODE (assignment) == RESULT_DECL)
488 gimplify_stmt (&assignment);
490 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
491 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
492 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
494 /* If we're not returning anything just do the jump. */
498 /* Local variables and labels need to be replaced by equivalent
499 variables. We don't want to copy static variables; there's only
500 one of those, no matter how many times we inline the containing
501 function. Similarly for globals from an outer function. */
502 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
506 /* Remap the declaration. */
507 new_decl = remap_decl (*tp, id);
508 gcc_assert (new_decl);
509 /* Replace this variable with the copy. */
510 STRIP_TYPE_NOPS (new_decl);
514 else if (TREE_CODE (*tp) == STATEMENT_LIST)
515 copy_statement_list (tp);
516 else if (TREE_CODE (*tp) == SAVE_EXPR)
517 remap_save_expr (tp, id->decl_map, walk_subtrees);
518 else if (TREE_CODE (*tp) == BIND_EXPR)
519 copy_bind_expr (tp, walk_subtrees, id);
520 /* Types may need remapping as well. */
521 else if (TYPE_P (*tp))
522 *tp = remap_type (*tp, id);
524 /* If this is a constant, we have to copy the node iff the type will be
525 remapped. copy_tree_r will not copy a constant. */
526 else if (TREE_CODE_CLASS (TREE_CODE (*tp)) == tcc_constant)
528 tree new_type = remap_type (TREE_TYPE (*tp), id);
530 if (new_type == TREE_TYPE (*tp))
533 else if (TREE_CODE (*tp) == INTEGER_CST)
534 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
535 TREE_INT_CST_HIGH (*tp));
538 *tp = copy_node (*tp);
539 TREE_TYPE (*tp) = new_type;
543 /* Otherwise, just copy the node. Note that copy_tree_r already
544 knows not to copy VAR_DECLs, etc., so this is safe. */
549 if (TREE_CODE (*tp) == MODIFY_EXPR
550 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
551 && (lang_hooks.tree_inlining.auto_var_in_fn_p
552 (TREE_OPERAND (*tp, 0), fn)))
554 /* Some assignments VAR = VAR; don't generate any rtl code
555 and thus don't count as variable modification. Avoid
556 keeping bogosities like 0 = 0. */
557 tree decl = TREE_OPERAND (*tp, 0), value;
560 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
563 value = (tree) n->value;
564 STRIP_TYPE_NOPS (value);
565 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
567 *tp = build_empty_stmt ();
568 return copy_body_r (tp, walk_subtrees, data);
572 else if (TREE_CODE (*tp) == INDIRECT_REF)
574 /* Get rid of *& from inline substitutions that can happen when a
575 pointer argument is an ADDR_EXPR. */
576 tree decl = TREE_OPERAND (*tp, 0), value;
579 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
582 value = (tree) n->value;
584 if (TREE_CODE (value) == ADDR_EXPR
585 && (lang_hooks.types_compatible_p
586 (TREE_TYPE (*tp), TREE_TYPE (TREE_OPERAND (value, 0)))))
588 *tp = TREE_OPERAND (value, 0);
589 return copy_body_r (tp, walk_subtrees, data);
594 copy_tree_r (tp, walk_subtrees, NULL);
596 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
600 struct cgraph_node *node;
601 struct cgraph_edge *edge;
603 for (node = id->node->next_clone; node; node = node->next_clone)
605 edge = cgraph_edge (node, old_node);
607 edge->call_expr = *tp;
612 struct cgraph_edge *edge
613 = cgraph_edge (id->current_node, old_node);
616 cgraph_clone_edge (edge, id->node, *tp);
620 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
622 /* The copied TARGET_EXPR has never been expanded, even if the
623 original node was expanded already. */
624 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
626 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
627 TREE_OPERAND (*tp, 3) = NULL_TREE;
630 /* Variable substitution need not be simple. In particular, the
631 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
632 and friends are up-to-date. */
633 else if (TREE_CODE (*tp) == ADDR_EXPR)
635 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
636 recompute_tree_invarant_for_addr_expr (*tp);
641 /* Keep iterating. */
645 /* Make a copy of the body of FN so that it can be inserted inline in
649 copy_body (inline_data *id)
652 tree fndecl = VARRAY_TOP_TREE (id->fns);
654 if (fndecl == current_function_decl
656 body = cfun->saved_tree;
658 body = DECL_SAVED_TREE (fndecl);
659 walk_tree (&body, copy_body_r, id, NULL);
664 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
665 defined in function FN, or of a data member thereof. */
668 self_inlining_addr_expr (tree value, tree fn)
672 if (TREE_CODE (value) != ADDR_EXPR)
675 var = get_base_address (TREE_OPERAND (value, 0));
677 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
681 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
682 tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
687 /* If the parameter is never assigned to, we may not need to
688 create a new variable here at all. Instead, we may be able
689 to just use the argument value. */
690 if (TREE_READONLY (p)
691 && !TREE_ADDRESSABLE (p)
692 && value && !TREE_SIDE_EFFECTS (value))
694 /* We can't risk substituting complex expressions. They
695 might contain variables that will be assigned to later.
696 Theoretically, we could check the expression to see if
697 all of the variables that determine its value are
698 read-only, but we don't bother. */
699 /* We may produce non-gimple trees by adding NOPs or introduce
700 invalid sharing when operand is not really constant.
701 It is not big deal to prohibit constant propagation here as
702 we will constant propagate in DOM1 pass anyway. */
703 if (is_gimple_min_invariant (value)
704 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
705 /* We have to be very careful about ADDR_EXPR. Make sure
706 the base variable isn't a local variable of the inlined
707 function, e.g., when doing recursive inlining, direct or
708 mutually-recursive or whatever, which is why we don't
709 just test whether fn == current_function_decl. */
710 && ! self_inlining_addr_expr (value, fn))
712 insert_decl_map (id, p, value);
717 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
718 here since the type of this decl must be visible to the calling
720 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
722 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
723 that way, when the PARM_DECL is encountered, it will be
724 automatically replaced by the VAR_DECL. */
725 insert_decl_map (id, p, var);
727 /* Declare this new variable. */
728 TREE_CHAIN (var) = *vars;
731 /* Make gimplifier happy about this variable. */
732 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
734 /* Even if P was TREE_READONLY, the new VAR should not be.
735 In the original code, we would have constructed a
736 temporary, and then the function body would have never
737 changed the value of P. However, now, we will be
738 constructing VAR directly. The constructor body may
739 change its value multiple times as it is being
740 constructed. Therefore, it must not be TREE_READONLY;
741 the back-end assumes that TREE_READONLY variable is
742 assigned to only once. */
743 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
744 TREE_READONLY (var) = 0;
746 /* Initialize this VAR_DECL from the equivalent argument. Convert
747 the argument to the proper type in case it was promoted. */
750 tree rhs = fold_convert (TREE_TYPE (var), value);
752 if (rhs == error_mark_node)
755 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
756 keep our trees in gimple form. */
757 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
758 append_to_statement_list (init_stmt, init_stmts);
760 /* If we did not create a gimple value and we did not create a gimple
761 cast of a gimple value, then we will need to gimplify INIT_STMTS
762 at the end. Note that is_gimple_cast only checks the outer
763 tree code, not its operand. Thus the explicit check that it's
764 operand is a gimple value. */
765 if (!is_gimple_val (rhs)
766 && (!is_gimple_cast (rhs)
767 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
768 *gimplify_init_stmts_p = true;
772 /* Generate code to initialize the parameters of the function at the
773 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
776 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
777 tree fn, tree bind_expr)
779 tree init_stmts = NULL_TREE;
783 tree vars = NULL_TREE;
784 bool gimplify_init_stmts_p = false;
787 /* Figure out what the parameters are. */
788 parms = DECL_ARGUMENTS (fn);
789 if (fn == current_function_decl)
790 parms = cfun->saved_args;
792 /* Loop through the parameter declarations, replacing each with an
793 equivalent VAR_DECL, appropriately initialized. */
794 for (p = parms, a = args; p;
795 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
801 /* Find the initializer. */
802 value = lang_hooks.tree_inlining.convert_parm_for_inlining
803 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
805 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
806 &gimplify_init_stmts_p);
809 /* Evaluate trailing arguments. */
810 for (; a; a = TREE_CHAIN (a))
812 tree value = TREE_VALUE (a);
813 append_to_statement_list (value, &init_stmts);
816 /* Initialize the static chain. */
817 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
818 if (fn == current_function_decl)
819 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
822 /* No static chain? Seems like a bug in tree-nested.c. */
823 gcc_assert (static_chain);
825 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
826 &gimplify_init_stmts_p);
829 if (gimplify_init_stmts_p)
830 gimplify_body (&init_stmts, current_function_decl, false);
832 declare_inline_vars (bind_expr, vars);
836 /* Declare a return variable to replace the RESULT_DECL for the function we
837 are calling. RETURN_SLOT_ADDR, if non-null, was a fake parameter that
838 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
839 the MODIFY_EXPR to which this call is the RHS.
841 The return value is a (possibly null) value that is the result of the
842 function as seen by the callee. *USE_P is a (possibly null) value that
843 holds the result as seen by the caller. */
846 declare_return_variable (inline_data *id, tree return_slot_addr,
847 tree modify_dest, tree *use_p)
849 tree callee = VARRAY_TOP_TREE (id->fns);
850 tree caller = VARRAY_TREE (id->fns, 0);
851 tree result = DECL_RESULT (callee);
852 tree callee_type = TREE_TYPE (result);
853 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
856 /* We don't need to do anything for functions that don't return
858 if (!result || VOID_TYPE_P (callee_type))
864 /* If there was a return slot, then the return value is the
865 dereferenced address of that object. */
866 if (return_slot_addr)
868 /* The front end shouldn't have used both return_slot_addr and
869 a modify expression. */
870 gcc_assert (!modify_dest);
871 if (DECL_BY_REFERENCE (result))
872 var = return_slot_addr;
874 var = build_fold_indirect_ref (return_slot_addr);
879 /* All types requiring non-trivial constructors should have been handled. */
880 gcc_assert (!TREE_ADDRESSABLE (callee_type));
882 /* Attempt to avoid creating a new temporary variable. */
887 /* We can't use MODIFY_DEST if there's type promotion involved. */
888 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
891 /* ??? If we're assigning to a variable sized type, then we must
892 reuse the destination variable, because we've no good way to
893 create variable sized temporaries at this point. */
894 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
897 /* If the callee cannot possibly modify MODIFY_DEST, then we can
898 reuse it as the result of the call directly. Don't do this if
899 it would promote MODIFY_DEST to addressable. */
900 else if (!TREE_STATIC (modify_dest)
901 && !TREE_ADDRESSABLE (modify_dest)
902 && !TREE_ADDRESSABLE (result))
913 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
915 var = copy_decl_for_inlining (result, callee, caller);
916 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
917 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
918 = tree_cons (NULL_TREE, var,
919 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
921 /* Do not have the rest of GCC warn about this variable as it should
922 not be visible to the user. */
923 TREE_NO_WARNING (var) = 1;
925 /* Build the use expr. If the return type of the function was
926 promoted, convert it back to the expected type. */
928 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
929 use = fold_convert (caller_type, var);
932 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
933 way, when the RESULT_DECL is encountered, it will be
934 automatically replaced by the VAR_DECL. */
935 insert_decl_map (id, result, var);
937 /* Remember this so we can ignore it in remap_decls. */
944 /* Returns nonzero if a function can be inlined as a tree. */
947 tree_inlinable_function_p (tree fn)
949 return inlinable_function_p (fn);
952 static const char *inline_forbidden_reason;
955 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
959 tree fn = (tree) fnp;
962 switch (TREE_CODE (node))
965 /* Refuse to inline alloca call unless user explicitly forced so as
966 this may change program's memory overhead drastically when the
967 function using alloca is called in loop. In GCC present in
968 SPEC2000 inlining into schedule_block cause it to require 2GB of
969 RAM instead of 256MB. */
970 if (alloca_call_p (node)
971 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
973 inline_forbidden_reason
974 = N_("%Jfunction %qF can never be inlined because it uses "
975 "alloca (override using the always_inline attribute)");
978 t = get_callee_fndecl (node);
982 /* We cannot inline functions that call setjmp. */
983 if (setjmp_call_p (t))
985 inline_forbidden_reason
986 = N_("%Jfunction %qF can never be inlined because it uses setjmp");
990 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
991 switch (DECL_FUNCTION_CODE (t))
993 /* We cannot inline functions that take a variable number of
995 case BUILT_IN_VA_START:
996 case BUILT_IN_STDARG_START:
997 case BUILT_IN_NEXT_ARG:
998 case BUILT_IN_VA_END:
999 inline_forbidden_reason
1000 = N_("%Jfunction %qF can never be inlined because it "
1001 "uses variable argument lists");
1004 case BUILT_IN_LONGJMP:
1005 /* We can't inline functions that call __builtin_longjmp at
1006 all. The non-local goto machinery really requires the
1007 destination be in a different function. If we allow the
1008 function calling __builtin_longjmp to be inlined into the
1009 function calling __builtin_setjmp, Things will Go Awry. */
1010 inline_forbidden_reason
1011 = N_("%Jfunction %qF can never be inlined because "
1012 "it uses setjmp-longjmp exception handling");
1015 case BUILT_IN_NONLOCAL_GOTO:
1017 inline_forbidden_reason
1018 = N_("%Jfunction %qF can never be inlined because "
1019 "it uses non-local goto");
1022 case BUILT_IN_RETURN:
1023 case BUILT_IN_APPLY_ARGS:
1024 /* If a __builtin_apply_args caller would be inlined,
1025 it would be saving arguments of the function it has
1026 been inlined into. Similarly __builtin_return would
1027 return from the function the inline has been inlined into. */
1028 inline_forbidden_reason
1029 = N_("%Jfunction %qF can never be inlined because "
1030 "it uses __builtin_return or __builtin_apply_args");
1039 t = TREE_OPERAND (node, 0);
1041 /* We will not inline a function which uses computed goto. The
1042 addresses of its local labels, which may be tucked into
1043 global storage, are of course not constant across
1044 instantiations, which causes unexpected behavior. */
1045 if (TREE_CODE (t) != LABEL_DECL)
1047 inline_forbidden_reason
1048 = N_("%Jfunction %qF can never be inlined "
1049 "because it contains a computed goto");
1055 t = TREE_OPERAND (node, 0);
1056 if (DECL_NONLOCAL (t))
1058 /* We cannot inline a function that receives a non-local goto
1059 because we cannot remap the destination label used in the
1060 function that is performing the non-local goto. */
1061 inline_forbidden_reason
1062 = N_("%Jfunction %qF can never be inlined "
1063 "because it receives a non-local goto");
1070 /* We cannot inline a function of the form
1072 void F (int i) { struct S { int ar[i]; } s; }
1074 Attempting to do so produces a catch-22.
1075 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1076 UNION_TYPE nodes, then it goes into infinite recursion on a
1077 structure containing a pointer to its own type. If it doesn't,
1078 then the type node for S doesn't get adjusted properly when
1079 F is inlined, and we abort in find_function_data.
1081 ??? This is likely no longer true, but it's too late in the 4.0
1082 cycle to try to find out. This should be checked for 4.1. */
1083 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1084 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1086 inline_forbidden_reason
1087 = N_("%Jfunction %qF can never be inlined "
1088 "because it uses variable sized variables");
1099 /* Return subexpression representing possible alloca call, if any. */
1101 inline_forbidden_p (tree fndecl)
1103 location_t saved_loc = input_location;
1104 tree ret = walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl),
1105 inline_forbidden_p_1, fndecl);
1107 input_location = saved_loc;
1111 /* Returns nonzero if FN is a function that does not have any
1112 fundamental inline blocking properties. */
1115 inlinable_function_p (tree fn)
1117 bool inlinable = true;
1119 /* If we've already decided this function shouldn't be inlined,
1120 there's no need to check again. */
1121 if (DECL_UNINLINABLE (fn))
1124 /* See if there is any language-specific reason it cannot be
1125 inlined. (It is important that this hook be called early because
1126 in C++ it may result in template instantiation.)
1127 If the function is not inlinable for language-specific reasons,
1128 it is left up to the langhook to explain why. */
1129 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1131 /* If we don't have the function body available, we can't inline it.
1132 However, this should not be recorded since we also get here for
1133 forward declared inline functions. Therefore, return at once. */
1134 if (!DECL_SAVED_TREE (fn))
1137 /* If we're not inlining at all, then we cannot inline this function. */
1138 else if (!flag_inline_trees)
1141 /* Only try to inline functions if DECL_INLINE is set. This should be
1142 true for all functions declared `inline', and for all other functions
1143 as well with -finline-functions.
1145 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1146 it's the front-end that must set DECL_INLINE in this case, because
1147 dwarf2out loses if a function that does not have DECL_INLINE set is
1148 inlined anyway. That is why we have both DECL_INLINE and
1149 DECL_DECLARED_INLINE_P. */
1150 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1151 here should be redundant. */
1152 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1155 else if (inline_forbidden_p (fn))
1157 /* See if we should warn about uninlinable functions. Previously,
1158 some of these warnings would be issued while trying to expand
1159 the function inline, but that would cause multiple warnings
1160 about functions that would for example call alloca. But since
1161 this a property of the function, just one warning is enough.
1162 As a bonus we can now give more details about the reason why a
1163 function is not inlinable.
1164 We only warn for functions declared `inline' by the user. */
1165 bool do_warning = (warn_inline
1167 && DECL_DECLARED_INLINE_P (fn)
1168 && !DECL_IN_SYSTEM_HEADER (fn));
1170 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1171 sorry (inline_forbidden_reason, fn, fn);
1172 else if (do_warning)
1173 warning (inline_forbidden_reason, fn, fn);
1178 /* Squirrel away the result so that we don't have to check again. */
1179 DECL_UNINLINABLE (fn) = !inlinable;
1184 /* Estimate the cost of a memory move. Use machine dependent
1185 word size and take possible memcpy call into account. */
1188 estimate_move_cost (tree type)
1192 size = int_size_in_bytes (type);
1194 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1195 /* Cost of a memcpy call, 3 arguments and the call. */
1198 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1201 /* Used by estimate_num_insns. Estimate number of instructions seen
1202 by given statement. */
1205 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1210 if (IS_TYPE_OR_DECL_P (x))
1215 /* Assume that constants and references counts nothing. These should
1216 be majorized by amount of operations among them we count later
1217 and are common target of CSE and similar optimizations. */
1218 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1221 switch (TREE_CODE (x))
1223 /* Containers have no cost. */
1230 case ALIGN_INDIRECT_REF:
1231 case MISALIGNED_INDIRECT_REF:
1233 case ARRAY_RANGE_REF:
1235 case EXC_PTR_EXPR: /* ??? */
1236 case FILTER_EXPR: /* ??? */
1239 case WITH_CLEANUP_EXPR:
1241 case VIEW_CONVERT_EXPR:
1246 case CASE_LABEL_EXPR:
1249 case EH_FILTER_EXPR:
1250 case STATEMENT_LIST:
1252 case NON_LVALUE_EXPR:
1255 case TRY_CATCH_EXPR:
1256 case TRY_FINALLY_EXPR:
1263 case WITH_SIZE_EXPR:
1266 /* We don't account constants for now. Assume that the cost is amortized
1267 by operations that do use them. We may re-consider this decision once
1268 we are able to optimize the tree before estimating it's size and break
1269 out static initializers. */
1270 case IDENTIFIER_NODE:
1279 /* Try to estimate the cost of assignments. We have three cases to
1281 1) Simple assignments to registers;
1282 2) Stores to things that must live in memory. This includes
1283 "normal" stores to scalars, but also assignments of large
1284 structures, or constructors of big arrays;
1287 Let us look at the first two cases, assuming we have "a = b + C":
1288 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1289 If "a" is a GIMPLE register, the assignment to it is free on almost
1290 any target, because "a" usually ends up in a real register. Hence
1291 the only cost of this expression comes from the PLUS_EXPR, and we
1292 can ignore the MODIFY_EXPR.
1293 If "a" is not a GIMPLE register, the assignment to "a" will most
1294 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1295 of moving something into "a", which we compute using the function
1298 The third case deals with TARGET_EXPRs, for which the semantics are
1299 that a temporary is assigned, unless the TARGET_EXPR itself is being
1300 assigned to something else. In the latter case we do not need the
1301 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1302 MODIFY_EXPR is free. */
1305 /* Is the right and side a TARGET_EXPR? */
1306 if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1308 /* ... fall through ... */
1311 x = TREE_OPERAND (x, 0);
1312 /* Is this an assignments to a register? */
1313 if (is_gimple_reg (x))
1315 /* Otherwise it's a store, so fall through to compute the move cost. */
1318 *count += estimate_move_cost (TREE_TYPE (x));
1321 /* Assign cost of 1 to usual operations.
1322 ??? We may consider mapping RTL costs to this. */
1329 case FIX_TRUNC_EXPR:
1331 case FIX_FLOOR_EXPR:
1332 case FIX_ROUND_EXPR:
1350 case TRUTH_ANDIF_EXPR:
1351 case TRUTH_ORIF_EXPR:
1352 case TRUTH_AND_EXPR:
1354 case TRUTH_XOR_EXPR:
1355 case TRUTH_NOT_EXPR:
1364 case UNORDERED_EXPR:
1377 case PREDECREMENT_EXPR:
1378 case PREINCREMENT_EXPR:
1379 case POSTDECREMENT_EXPR:
1380 case POSTINCREMENT_EXPR:
1386 case REALIGN_LOAD_EXPR:
1392 /* Few special cases of expensive operations. This is useful
1393 to avoid inlining on functions having too many of these. */
1394 case TRUNC_DIV_EXPR:
1396 case FLOOR_DIV_EXPR:
1397 case ROUND_DIV_EXPR:
1398 case EXACT_DIV_EXPR:
1399 case TRUNC_MOD_EXPR:
1401 case FLOOR_MOD_EXPR:
1402 case ROUND_MOD_EXPR:
1408 tree decl = get_callee_fndecl (x);
1411 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1412 switch (DECL_FUNCTION_CODE (decl))
1414 case BUILT_IN_CONSTANT_P:
1417 case BUILT_IN_EXPECT:
1423 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1424 that does use function declaration to figure out the arguments. */
1427 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1428 *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1432 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1433 *count += estimate_move_cost (TREE_TYPE (arg));
1436 *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1440 /* Abort here se we know we don't miss any nodes. */
1446 /* Estimate number of instructions that will be created by expanding EXPR. */
1449 estimate_num_insns (tree expr)
1452 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1456 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1459 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1472 tree return_slot_addr;
1474 location_t saved_location;
1475 struct cgraph_edge *edge;
1478 /* See what we've got. */
1479 id = (inline_data *) data;
1482 /* Set input_location here so we get the right instantiation context
1483 if we call instantiate_decl from inlinable_function_p. */
1484 saved_location = input_location;
1485 if (EXPR_HAS_LOCATION (t))
1486 input_location = EXPR_LOCATION (t);
1488 /* Recurse, but letting recursive invocations know that we are
1489 inside the body of a TARGET_EXPR. */
1490 if (TREE_CODE (*tp) == TARGET_EXPR)
1493 int i, len = TREE_CODE_LENGTH (TARGET_EXPR);
1495 /* We're walking our own subtrees. */
1498 /* Actually walk over them. This loop is the body of
1499 walk_trees, omitting the case where the TARGET_EXPR
1500 itself is handled. */
1501 for (i = 0; i < len; ++i)
1504 ++id->in_target_cleanup_p;
1505 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1508 --id->in_target_cleanup_p;
1516 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1517 them should not be expanded. This can happen if the type is a
1518 dynamic array type, for example. */
1521 /* From here on, we're only interested in CALL_EXPRs. */
1522 if (TREE_CODE (t) != CALL_EXPR)
1525 /* First, see if we can figure out what function is being called.
1526 If we cannot, then there is no hope of inlining the function. */
1527 fn = get_callee_fndecl (t);
1531 /* Turn forward declarations into real ones. */
1532 fn = cgraph_node (fn)->decl;
1534 /* If fn is a declaration of a function in a nested scope that was
1535 globally declared inline, we don't set its DECL_INITIAL.
1536 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1537 C++ front-end uses it for cdtors to refer to their internal
1538 declarations, that are not real functions. Fortunately those
1539 don't have trees to be saved, so we can tell by checking their
1541 if (! DECL_INITIAL (fn)
1542 && DECL_ABSTRACT_ORIGIN (fn)
1543 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1544 fn = DECL_ABSTRACT_ORIGIN (fn);
1546 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1547 Kill this check once this is fixed. */
1548 if (!id->current_node->analyzed)
1551 edge = cgraph_edge (id->current_node, t);
1553 /* Constant propagation on argument done during previous inlining
1554 may create new direct call. Produce an edge for it. */
1557 struct cgraph_node *dest = cgraph_node (fn);
1559 /* We have missing edge in the callgraph. This can happen in one case
1560 where previous inlining turned indirect call into direct call by
1561 constant propagating arguments. In all other cases we hit a bug
1562 (incorrect node sharing is most common reason for missing edges. */
1563 gcc_assert (dest->needed || !flag_unit_at_a_time);
1564 cgraph_create_edge (id->node, dest, t)->inline_failed
1565 = N_("originally indirect function call not considered for inlining");
1569 /* Don't try to inline functions that are not well-suited to
1571 if (!cgraph_inline_p (edge, &reason))
1573 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1575 sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1576 sorry ("called from here");
1578 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1579 && !DECL_IN_SYSTEM_HEADER (fn)
1581 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
1583 warning ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1584 warning ("called from here");
1589 #ifdef ENABLE_CHECKING
1590 if (edge->callee->decl != id->node->decl)
1591 verify_cgraph_node (edge->callee);
1594 if (! lang_hooks.tree_inlining.start_inlining (fn))
1597 /* Build a block containing code to initialize the arguments, the
1598 actual inline expansion of the body, and a label for the return
1599 statements within the function to jump to. The type of the
1600 statement expression is the return type of the function call. */
1602 expr = build (BIND_EXPR, void_type_node, NULL_TREE,
1603 stmt, make_node (BLOCK));
1604 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1606 /* Local declarations will be replaced by their equivalents in this
1609 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1612 /* Initialize the parameters. */
1613 args = TREE_OPERAND (t, 1);
1614 return_slot_addr = NULL_TREE;
1615 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1617 return_slot_addr = TREE_VALUE (args);
1618 args = TREE_CHAIN (args);
1619 TREE_TYPE (expr) = void_type_node;
1622 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1626 /* Expand any inlined calls in the initializers. Do this before we
1627 push FN on the stack of functions we are inlining; we want to
1628 inline calls to FN that appear in the initializers for the
1631 Note we need to save and restore the saved tree statement iterator
1632 to avoid having it clobbered by expand_calls_inline. */
1633 tree_stmt_iterator save_tsi;
1636 expand_calls_inline (&arg_inits, id);
1639 /* And add them to the tree. */
1640 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1643 /* Record the function we are about to inline so that we can avoid
1644 recursing into it. */
1645 VARRAY_PUSH_TREE (id->fns, fn);
1647 /* Return statements in the function body will be replaced by jumps
1648 to the RET_LABEL. */
1649 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1650 DECL_ARTIFICIAL (id->ret_label) = 1;
1651 DECL_IGNORED_P (id->ret_label) = 1;
1652 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1653 insert_decl_map (id, id->ret_label, id->ret_label);
1655 gcc_assert (DECL_INITIAL (fn));
1656 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
1658 /* Find the lhs to which the result of this call is assigned. */
1659 modify_dest = tsi_stmt (id->tsi);
1660 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
1662 modify_dest = TREE_OPERAND (modify_dest, 0);
1664 /* The function which we are inlining might not return a value,
1665 in which case we should issue a warning that the function
1666 does not return a value. In that case the optimizers will
1667 see that the variable to which the value is assigned was not
1668 initialized. We do not want to issue a warning about that
1669 uninitialized variable. */
1670 if (DECL_P (modify_dest))
1671 TREE_NO_WARNING (modify_dest) = 1;
1676 /* Declare the return variable for the function. */
1677 decl = declare_return_variable (id, return_slot_addr,
1678 modify_dest, &use_retvar);
1680 /* After we've initialized the parameters, we insert the body of the
1683 struct cgraph_node *old_node = id->current_node;
1686 id->current_node = edge->callee;
1687 copy = copy_body (id);
1689 /* If the function uses a return slot, then it may legitimately
1690 fall through while still returning a value, so we have to skip
1691 the warning here. */
1692 if (warn_return_type
1693 && !TREE_NO_WARNING (fn)
1694 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
1695 && return_slot_addr == NULL_TREE
1696 && block_may_fallthru (copy))
1698 warning ("control may reach end of non-void function %qD being inlined",
1700 TREE_NO_WARNING (fn) = 1;
1703 append_to_statement_list (copy, &BIND_EXPR_BODY (expr));
1704 id->current_node = old_node;
1706 inlined_body = &BIND_EXPR_BODY (expr);
1708 /* After the body of the function comes the RET_LABEL. This must come
1709 before we evaluate the returned value below, because that evaluation
1710 may cause RTL to be generated. */
1711 if (TREE_USED (id->ret_label))
1713 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1714 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1718 splay_tree_delete (id->decl_map);
1721 /* Although, from the semantic viewpoint, the new expression has
1722 side-effects only if the old one did, it is not possible, from
1723 the technical viewpoint, to evaluate the body of a function
1724 multiple times without serious havoc. */
1725 TREE_SIDE_EFFECTS (expr) = 1;
1727 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1729 /* If the inlined function returns a result that we care about,
1730 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1731 the call was a standalone statement and we can just replace it
1732 with the BIND_EXPR inline representation of the called function. */
1733 if (!use_retvar || !modify_dest)
1734 *tsi_stmt_ptr (id->tsi) = build_empty_stmt ();
1738 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS on
1739 the call if it is to a "const" function. Thus the copy of
1740 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above with
1741 result in TREE_SIDE_EFFECTS not being set for the inlined copy of a
1744 Unfortunately, that is wrong as inlining the function can create/expose
1745 interesting side effects (such as setting of a return value).
1747 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
1748 the toplevel expression. */
1749 recalculate_side_effects (expr);
1751 /* Output the inlining info for this abstract function, since it has been
1752 inlined. If we don't do this now, we can lose the information about the
1753 variables in the function when the blocks get blown away as soon as we
1754 remove the cgraph node. */
1755 (*debug_hooks->outlining_inline_function) (edge->callee->decl);
1757 /* Update callgraph if needed. */
1758 cgraph_remove_node (edge->callee);
1760 /* Recurse into the body of the just inlined function. */
1761 expand_calls_inline (inlined_body, id);
1762 VARRAY_POP (id->fns);
1764 /* Don't walk into subtrees. We've already handled them above. */
1767 lang_hooks.tree_inlining.end_inlining (fn);
1769 /* Keep iterating. */
1771 input_location = saved_location;
1776 expand_calls_inline (tree *stmt_p, inline_data *id)
1778 tree stmt = *stmt_p;
1779 enum tree_code code = TREE_CODE (stmt);
1784 case STATEMENT_LIST:
1786 tree_stmt_iterator i;
1789 for (i = tsi_start (stmt); !tsi_end_p (i); )
1792 expand_calls_inline (tsi_stmt_ptr (i), id);
1795 if (TREE_CODE (new) == STATEMENT_LIST)
1797 tsi_link_before (&i, new, TSI_SAME_STMT);
1807 expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1808 expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1812 expand_calls_inline (&CATCH_BODY (stmt), id);
1815 case EH_FILTER_EXPR:
1816 expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1819 case TRY_CATCH_EXPR:
1820 case TRY_FINALLY_EXPR:
1821 expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1822 expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1826 expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1830 /* We're gimple. We should have gotten rid of all these. */
1834 stmt_p = &TREE_OPERAND (stmt, 0);
1836 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1842 stmt_p = &TREE_OPERAND (stmt, 1);
1844 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
1846 stmt_p = &TREE_OPERAND (stmt, 0);
1849 if (TREE_CODE (stmt) != CALL_EXPR)
1855 expand_call_inline (stmt_p, &dummy, id);
1863 /* Expand calls to inline functions in the body of FN. */
1866 optimize_inline_calls (tree fn)
1871 /* There is no point in performing inlining if errors have already
1872 occurred -- and we might crash if we try to inline invalid
1874 if (errorcount || sorrycount)
1878 memset (&id, 0, sizeof (id));
1880 id.current_node = id.node = cgraph_node (fn);
1881 /* Don't allow recursion into FN. */
1882 VARRAY_TREE_INIT (id.fns, 32, "fns");
1883 VARRAY_PUSH_TREE (id.fns, fn);
1884 /* Or any functions that aren't finished yet. */
1885 prev_fn = NULL_TREE;
1886 if (current_function_decl)
1888 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1889 prev_fn = current_function_decl;
1892 prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
1894 /* Keep track of the low-water mark, i.e., the point where the first
1895 real inlining is represented in ID.FNS. */
1896 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1898 /* Replace all calls to inline functions with the bodies of those
1900 id.tree_pruner = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1901 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1904 htab_delete (id.tree_pruner);
1906 #ifdef ENABLE_CHECKING
1908 struct cgraph_edge *e;
1910 verify_cgraph_node (id.node);
1912 /* Double check that we inlined everything we are supposed to inline. */
1913 for (e = id.node->callees; e; e = e->next_callee)
1914 gcc_assert (e->inline_failed);
1919 /* FN is a function that has a complete body, and CLONE is a function whose
1920 body is to be set to a copy of FN, mapping argument declarations according
1921 to the ARG_MAP splay_tree. */
1924 clone_body (tree clone, tree fn, void *arg_map)
1928 /* Clone the body, as if we were making an inline call. But, remap the
1929 parameters in the callee to the parameters of caller. If there's an
1930 in-charge parameter, map it to an appropriate constant. */
1931 memset (&id, 0, sizeof (id));
1932 VARRAY_TREE_INIT (id.fns, 2, "fns");
1933 VARRAY_PUSH_TREE (id.fns, clone);
1934 VARRAY_PUSH_TREE (id.fns, fn);
1935 id.decl_map = (splay_tree)arg_map;
1937 /* Cloning is treated slightly differently from inlining. Set
1938 CLONING_P so that it's clear which operation we're performing. */
1939 id.cloning_p = true;
1941 /* Actually copy the body. */
1942 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1945 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
1946 in *arg_copy and of the static chain, if any, in *sc_copy. */
1949 save_body (tree fn, tree *arg_copy, tree *sc_copy)
1954 memset (&id, 0, sizeof (id));
1955 VARRAY_TREE_INIT (id.fns, 1, "fns");
1956 VARRAY_PUSH_TREE (id.fns, fn);
1957 id.node = cgraph_node (fn);
1959 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1960 *arg_copy = DECL_ARGUMENTS (fn);
1962 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1964 tree new = copy_node (*parg);
1966 lang_hooks.dup_lang_specific_decl (new);
1967 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1968 insert_decl_map (&id, *parg, new);
1969 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1973 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1976 tree new = copy_node (*sc_copy);
1978 lang_hooks.dup_lang_specific_decl (new);
1979 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
1980 insert_decl_map (&id, *sc_copy, new);
1981 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
1985 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1987 /* Actually copy the body. */
1988 body = copy_body (&id);
1991 splay_tree_delete (id.decl_map);
1995 #define WALK_SUBTREE(NODE) \
1998 result = walk_tree (&(NODE), func, data, pset); \
2004 /* This is a subroutine of walk_tree that walks field of TYPE that are to
2005 be walked whenever a type is seen in the tree. Rest of operands and return
2006 value are as for walk_tree. */
2009 walk_type_fields (tree type, walk_tree_fn func, void *data,
2010 struct pointer_set_t *pset)
2012 tree result = NULL_TREE;
2014 switch (TREE_CODE (type))
2017 case REFERENCE_TYPE:
2018 /* We have to worry about mutually recursive pointers. These can't
2019 be written in C. They can in Ada. It's pathological, but
2020 there's an ACATS test (c38102a) that checks it. Deal with this
2021 by checking if we're pointing to another pointer, that one
2022 points to another pointer, that one does too, and we have no htab.
2023 If so, get a hash table. We check three levels deep to avoid
2024 the cost of the hash table if we don't need one. */
2025 if (POINTER_TYPE_P (TREE_TYPE (type))
2026 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
2027 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
2030 result = walk_tree_without_duplicates (&TREE_TYPE (type),
2038 /* ... fall through ... */
2041 WALK_SUBTREE (TREE_TYPE (type));
2045 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
2050 WALK_SUBTREE (TREE_TYPE (type));
2054 /* We never want to walk into default arguments. */
2055 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
2056 WALK_SUBTREE (TREE_VALUE (arg));
2061 /* Don't follow this nodes's type if a pointer for fear that we'll
2062 have infinite recursion. Those types are uninteresting anyway. */
2063 if (!POINTER_TYPE_P (TREE_TYPE (type))
2064 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)
2065 WALK_SUBTREE (TREE_TYPE (type));
2066 WALK_SUBTREE (TYPE_DOMAIN (type));
2074 WALK_SUBTREE (TYPE_MIN_VALUE (type));
2075 WALK_SUBTREE (TYPE_MAX_VALUE (type));
2079 WALK_SUBTREE (TREE_TYPE (type));
2080 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
2090 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
2091 called with the DATA and the address of each sub-tree. If FUNC returns a
2092 non-NULL value, the traversal is aborted, and the value returned by FUNC
2093 is returned. If PSET is non-NULL it is used to record the nodes visited,
2094 and to avoid visiting a node more than once. */
2097 walk_tree (tree *tp, walk_tree_fn func, void *data, struct pointer_set_t *pset)
2099 enum tree_code code;
2103 #define WALK_SUBTREE_TAIL(NODE) \
2107 goto tail_recurse; \
2112 /* Skip empty subtrees. */
2116 /* Don't walk the same tree twice, if the user has requested
2117 that we avoid doing so. */
2118 if (pset && pointer_set_insert (pset, *tp))
2121 /* Call the function. */
2123 result = (*func) (tp, &walk_subtrees, data);
2125 /* If we found something, return it. */
2129 code = TREE_CODE (*tp);
2131 /* Even if we didn't, FUNC may have decided that there was nothing
2132 interesting below this point in the tree. */
2135 if (code == TREE_LIST)
2136 /* But we still need to check our siblings. */
2137 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2142 result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
2144 if (result || ! walk_subtrees)
2147 /* If this is a DECL_EXPR, walk into various fields of the type that it's
2148 defining. We only want to walk into these fields of a type in this
2149 case. Note that decls get walked as part of the processing of a
2152 ??? Precisely which fields of types that we are supposed to walk in
2153 this case vs. the normal case aren't well defined. */
2154 if (code == DECL_EXPR
2155 && TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL
2156 && TREE_CODE (TREE_TYPE (DECL_EXPR_DECL (*tp))) != ERROR_MARK)
2158 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
2160 /* Call the function for the type. See if it returns anything or
2161 doesn't want us to continue. If we are to continue, walk both
2162 the normal fields and those for the declaration case. */
2163 result = (*func) (type_p, &walk_subtrees, data);
2164 if (result || !walk_subtrees)
2167 result = walk_type_fields (*type_p, func, data, pset);
2171 WALK_SUBTREE (TYPE_SIZE (*type_p));
2172 WALK_SUBTREE (TYPE_SIZE_UNIT (*type_p));
2174 /* If this is a record type, also walk the fields. */
2175 if (TREE_CODE (*type_p) == RECORD_TYPE
2176 || TREE_CODE (*type_p) == UNION_TYPE
2177 || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2181 for (field = TYPE_FIELDS (*type_p); field;
2182 field = TREE_CHAIN (field))
2184 /* We'd like to look at the type of the field, but we can easily
2185 get infinite recursion. So assume it's pointed to elsewhere
2186 in the tree. Also, ignore things that aren't fields. */
2187 if (TREE_CODE (field) != FIELD_DECL)
2190 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
2191 WALK_SUBTREE (DECL_SIZE (field));
2192 WALK_SUBTREE (DECL_SIZE_UNIT (field));
2193 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2194 WALK_SUBTREE (DECL_QUALIFIER (field));
2199 else if (code != SAVE_EXPR
2200 && code != BIND_EXPR
2201 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2205 /* Walk over all the sub-trees of this operand. */
2206 len = TREE_CODE_LENGTH (code);
2207 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
2208 But, we only want to walk once. */
2209 if (code == TARGET_EXPR
2210 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
2213 /* Go through the subtrees. We need to do this in forward order so
2214 that the scope of a FOR_EXPR is handled properly. */
2215 #ifdef DEBUG_WALK_TREE
2216 for (i = 0; i < len; ++i)
2217 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2219 for (i = 0; i < len - 1; ++i)
2220 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2224 /* The common case is that we may tail recurse here. */
2225 if (code != BIND_EXPR
2226 && !TREE_CHAIN (*tp))
2227 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
2229 WALK_SUBTREE (TREE_OPERAND (*tp, len - 1));
2234 /* If this is a type, walk the needed fields in the type. */
2235 else if (TYPE_P (*tp))
2237 result = walk_type_fields (*tp, func, data, pset);
2243 /* Not one of the easy cases. We must explicitly go through the
2248 case IDENTIFIER_NODE:
2254 case PLACEHOLDER_EXPR:
2258 /* None of thse have subtrees other than those already walked
2263 WALK_SUBTREE (TREE_VALUE (*tp));
2264 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2269 int len = TREE_VEC_LENGTH (*tp);
2274 /* Walk all elements but the first. */
2276 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
2278 /* Now walk the first one as a tail call. */
2279 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
2283 WALK_SUBTREE (TREE_REALPART (*tp));
2284 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
2287 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
2290 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
2295 for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
2297 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
2298 into declarations that are just mentioned, rather than
2299 declared; they don't really belong to this part of the tree.
2300 And, we can see cycles: the initializer for a declaration
2301 can refer to the declaration itself. */
2302 WALK_SUBTREE (DECL_INITIAL (decl));
2303 WALK_SUBTREE (DECL_SIZE (decl));
2304 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
2306 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
2309 case STATEMENT_LIST:
2311 tree_stmt_iterator i;
2312 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
2313 WALK_SUBTREE (*tsi_stmt_ptr (i));
2318 /* ??? This could be a language-defined node. We really should make
2319 a hook for it, but right now just ignore it. */
2324 /* We didn't find what we were looking for. */
2328 #undef WALK_SUBTREE_TAIL
2331 /* Like walk_tree, but does not walk duplicate nodes more than once. */
2334 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
2337 struct pointer_set_t *pset;
2339 pset = pointer_set_create ();
2340 result = walk_tree (tp, func, data, pset);
2341 pointer_set_destroy (pset);
2345 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2348 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2350 enum tree_code code = TREE_CODE (*tp);
2352 /* We make copies of most nodes. */
2353 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2354 || code == TREE_LIST
2356 || code == TYPE_DECL)
2358 /* Because the chain gets clobbered when we make a copy, we save it
2360 tree chain = TREE_CHAIN (*tp);
2363 /* Copy the node. */
2364 new = copy_node (*tp);
2366 /* Propagate mudflap marked-ness. */
2367 if (flag_mudflap && mf_marked_p (*tp))
2372 /* Now, restore the chain, if appropriate. That will cause
2373 walk_tree to walk into the chain as well. */
2374 if (code == PARM_DECL || code == TREE_LIST)
2375 TREE_CHAIN (*tp) = chain;
2377 /* For now, we don't update BLOCKs when we make copies. So, we
2378 have to nullify all BIND_EXPRs. */
2379 if (TREE_CODE (*tp) == BIND_EXPR)
2380 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2383 else if (TREE_CODE_CLASS (code) == tcc_type)
2385 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2387 else if (TREE_CODE_CLASS (code) == tcc_constant)
2390 gcc_assert (code != STATEMENT_LIST);
2394 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2395 information indicating to what new SAVE_EXPR this one should be mapped,
2396 use that one. Otherwise, create a new node and enter it in ST. */
2399 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2401 splay_tree st = (splay_tree) st_;
2405 /* See if we already encountered this SAVE_EXPR. */
2406 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2408 /* If we didn't already remap this SAVE_EXPR, do so now. */
2411 t = copy_node (*tp);
2413 /* Remember this SAVE_EXPR. */
2414 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2415 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2416 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2420 /* We've already walked into this SAVE_EXPR; don't do it again. */
2422 t = (tree) n->value;
2425 /* Replace this SAVE_EXPR with the copy. */
2429 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2430 copies the declaration and enters it in the splay_tree in DATA (which is
2431 really an `inline_data *'). */
2434 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2437 inline_data *id = (inline_data *) data;
2439 /* Don't walk into types. */
2443 else if (TREE_CODE (*tp) == LABEL_EXPR)
2445 tree decl = TREE_OPERAND (*tp, 0);
2447 /* Copy the decl and remember the copy. */
2448 insert_decl_map (id, decl,
2449 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2450 DECL_CONTEXT (decl)));
2456 /* Perform any modifications to EXPR required when it is unsaved. Does
2457 not recurse into EXPR's subtrees. */
2460 unsave_expr_1 (tree expr)
2462 switch (TREE_CODE (expr))
2465 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2466 It's OK for this to happen if it was part of a subtree that
2467 isn't immediately expanded, such as operand 2 of another
2469 if (TREE_OPERAND (expr, 1))
2472 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2473 TREE_OPERAND (expr, 3) = NULL_TREE;
2481 /* Called via walk_tree when an expression is unsaved. Using the
2482 splay_tree pointed to by ST (which is really a `splay_tree'),
2483 remaps all local declarations to appropriate replacements. */
2486 unsave_r (tree *tp, int *walk_subtrees, void *data)
2488 inline_data *id = (inline_data *) data;
2489 splay_tree st = id->decl_map;
2492 /* Only a local declaration (variable or label). */
2493 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2494 || TREE_CODE (*tp) == LABEL_DECL)
2496 /* Lookup the declaration. */
2497 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2499 /* If it's there, remap it. */
2501 *tp = (tree) n->value;
2504 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2505 copy_statement_list (tp);
2506 else if (TREE_CODE (*tp) == BIND_EXPR)
2507 copy_bind_expr (tp, walk_subtrees, id);
2508 else if (TREE_CODE (*tp) == SAVE_EXPR)
2509 remap_save_expr (tp, st, walk_subtrees);
2512 copy_tree_r (tp, walk_subtrees, NULL);
2514 /* Do whatever unsaving is required. */
2515 unsave_expr_1 (*tp);
2518 /* Keep iterating. */
2522 /* Copies everything in EXPR and replaces variables, labels
2523 and SAVE_EXPRs local to EXPR. */
2526 unsave_expr_now (tree expr)
2530 /* There's nothing to do for NULL_TREE. */
2535 memset (&id, 0, sizeof (id));
2536 VARRAY_TREE_INIT (id.fns, 1, "fns");
2537 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2538 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2540 /* Walk the tree once to find local labels. */
2541 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2543 /* Walk the tree again, copying, remapping, and unsaving. */
2544 walk_tree (&expr, unsave_r, &id, NULL);
2547 splay_tree_delete (id.decl_map);
2552 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2555 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2564 debug_find_tree (tree top, tree search)
2566 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2569 /* Declare the variables created by the inliner. Add all the variables in
2570 VARS to BIND_EXPR. */
2573 declare_inline_vars (tree bind_expr, tree vars)
2576 for (t = vars; t; t = TREE_CHAIN (t))
2577 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2579 add_var_to_bind_expr (bind_expr, vars);