GCC47: Add local modifications
[dragonfly.git] / contrib / gcc-4.7 / gcc / tree-inline.c
CommitLineData
e4b17023
JM
1/* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
3 2012 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 3, or (at your option)
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "diagnostic-core.h"
27#include "tree.h"
28#include "tree-inline.h"
29#include "flags.h"
30#include "params.h"
31#include "input.h"
32#include "insn-config.h"
33#include "hashtab.h"
34#include "langhooks.h"
35#include "basic-block.h"
36#include "tree-iterator.h"
37#include "cgraph.h"
38#include "intl.h"
39#include "tree-mudflap.h"
40#include "tree-flow.h"
41#include "function.h"
42#include "tree-flow.h"
43#include "tree-pretty-print.h"
44#include "except.h"
45#include "debug.h"
46#include "pointer-set.h"
47#include "ipa-prop.h"
48#include "value-prof.h"
49#include "tree-pass.h"
50#include "target.h"
51#include "integrate.h"
52
53#include "rtl.h" /* FIXME: For asm_str_count. */
54
55/* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57#include "gimple.h"
58
59/* Inlining, Cloning, Versioning, Parallelization
60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
67
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
90
91/* To Do:
92
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
99
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
102
103
104/* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
106
107eni_weights eni_size_weights;
108
109/* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
111
112eni_weights eni_time_weights;
113
114/* Prototypes. */
115
116static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
117static void remap_block (tree *, copy_body_data *);
118static void copy_bind_expr (tree *, int *, copy_body_data *);
119static tree mark_local_for_remap_r (tree *, int *, void *);
120static void unsave_expr_1 (tree);
121static tree unsave_r (tree *, int *, void *);
122static void declare_inline_vars (tree, tree);
123static void remap_save_expr (tree *, void *, int *);
124static void prepend_lexical_block (tree current_block, tree new_block);
125static tree copy_decl_to_var (tree, copy_body_data *);
126static tree copy_result_decl_to_var (tree, copy_body_data *);
127static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128static gimple remap_gimple_stmt (gimple, copy_body_data *);
129static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
130
131/* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
133
134void
135insert_decl_map (copy_body_data *id, tree key, tree value)
136{
137 *pointer_map_insert (id->decl_map, key) = value;
138
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
143}
144
145/* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
147
148static void
149insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150{
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
153
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
156
157 if (!target_for_debug_bind (key))
158 return;
159
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
162
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
165
166 *pointer_map_insert (id->debug_map, key) = value;
167}
168
169/* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173static int processing_debug_stmt = 0;
174
175/* Construct new SSA name for old NAME. ID is the inline context. */
176
177static tree
178remap_ssa_name (tree name, copy_body_data *id)
179{
180 tree new_tree;
181 tree *n;
182
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
184
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
188
189 if (processing_debug_stmt)
190 {
191 if (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
192 && SSA_NAME_IS_DEFAULT_DEF (name)
193 && id->entry_bb == NULL
194 && single_succ_p (ENTRY_BLOCK_PTR))
195 {
196 tree vexpr = make_node (DEBUG_EXPR_DECL);
197 gimple def_temp;
198 gimple_stmt_iterator gsi;
199 tree val = SSA_NAME_VAR (name);
200
201 n = (tree *) pointer_map_contains (id->decl_map, val);
202 if (n != NULL)
203 val = *n;
204 if (TREE_CODE (val) != PARM_DECL)
205 {
206 processing_debug_stmt = -1;
207 return name;
208 }
209 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
210 DECL_ARTIFICIAL (vexpr) = 1;
211 TREE_TYPE (vexpr) = TREE_TYPE (name);
212 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
213 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
214 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
215 return vexpr;
216 }
217
218 processing_debug_stmt = -1;
219 return name;
220 }
221
222 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
223 in copy_bb. */
224 new_tree = remap_decl (SSA_NAME_VAR (name), id);
225
226 /* We might've substituted constant or another SSA_NAME for
227 the variable.
228
229 Replace the SSA name representing RESULT_DECL by variable during
230 inlining: this saves us from need to introduce PHI node in a case
231 return value is just partly initialized. */
232 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
233 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
234 || !id->transform_return_to_modify))
235 {
236 struct ptr_info_def *pi;
237 new_tree = make_ssa_name (new_tree, NULL);
238 insert_decl_map (id, name, new_tree);
239 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
240 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
241 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
242 /* At least IPA points-to info can be directly transferred. */
243 if (id->src_cfun->gimple_df
244 && id->src_cfun->gimple_df->ipa_pta
245 && (pi = SSA_NAME_PTR_INFO (name))
246 && !pi->pt.anything)
247 {
248 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
249 new_pi->pt = pi->pt;
250 }
251 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
252 {
253 /* By inlining function having uninitialized variable, we might
254 extend the lifetime (variable might get reused). This cause
255 ICE in the case we end up extending lifetime of SSA name across
256 abnormal edge, but also increase register pressure.
257
258 We simply initialize all uninitialized vars by 0 except
259 for case we are inlining to very first BB. We can avoid
260 this for all BBs that are not inside strongly connected
261 regions of the CFG, but this is expensive to test. */
262 if (id->entry_bb
263 && is_gimple_reg (SSA_NAME_VAR (name))
264 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
265 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
266 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
267 || EDGE_COUNT (id->entry_bb->preds) != 1))
268 {
269 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
270 gimple init_stmt;
271 tree zero = build_zero_cst (TREE_TYPE (new_tree));
272
273 init_stmt = gimple_build_assign (new_tree, zero);
274 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
275 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
276 }
277 else
278 {
279 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
280 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
281 == name)
282 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
283 }
284 }
285 }
286 else
287 insert_decl_map (id, name, new_tree);
288 return new_tree;
289}
290
291/* Remap DECL during the copying of the BLOCK tree for the function. */
292
293tree
294remap_decl (tree decl, copy_body_data *id)
295{
296 tree *n;
297
298 /* We only remap local variables in the current function. */
299
300 /* See if we have remapped this declaration. */
301
302 n = (tree *) pointer_map_contains (id->decl_map, decl);
303
304 if (!n && processing_debug_stmt)
305 {
306 processing_debug_stmt = -1;
307 return decl;
308 }
309
310 /* If we didn't already have an equivalent for this declaration,
311 create one now. */
312 if (!n)
313 {
314 /* Make a copy of the variable or label. */
315 tree t = id->copy_decl (decl, id);
316
317 /* Remember it, so that if we encounter this local entity again
318 we can reuse this copy. Do this early because remap_type may
319 need this decl for TYPE_STUB_DECL. */
320 insert_decl_map (id, decl, t);
321
322 if (!DECL_P (t))
323 return t;
324
325 /* Remap types, if necessary. */
326 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
327 if (TREE_CODE (t) == TYPE_DECL)
328 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
329
330 /* Remap sizes as necessary. */
331 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
332 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
333
334 /* If fields, do likewise for offset and qualifier. */
335 if (TREE_CODE (t) == FIELD_DECL)
336 {
337 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
338 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
339 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
340 }
341
342 if ((TREE_CODE (t) == VAR_DECL
343 || TREE_CODE (t) == RESULT_DECL
344 || TREE_CODE (t) == PARM_DECL)
345 && id->src_fn && DECL_STRUCT_FUNCTION (id->src_fn)
346 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
347 /* We don't want to mark as referenced VAR_DECLs that were
348 not marked as such in the src function. */
349 && (TREE_CODE (decl) != VAR_DECL
350 || referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
351 DECL_UID (decl))))
352 add_referenced_var (t);
353 return t;
354 }
355
356 if (id->do_not_unshare)
357 return *n;
358 else
359 return unshare_expr (*n);
360}
361
362static tree
363remap_type_1 (tree type, copy_body_data *id)
364{
365 tree new_tree, t;
366
367 /* We do need a copy. build and register it now. If this is a pointer or
368 reference type, remap the designated type and make a new pointer or
369 reference type. */
370 if (TREE_CODE (type) == POINTER_TYPE)
371 {
372 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
373 TYPE_MODE (type),
374 TYPE_REF_CAN_ALIAS_ALL (type));
375 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
376 new_tree = build_type_attribute_qual_variant (new_tree,
377 TYPE_ATTRIBUTES (type),
378 TYPE_QUALS (type));
379 insert_decl_map (id, type, new_tree);
380 return new_tree;
381 }
382 else if (TREE_CODE (type) == REFERENCE_TYPE)
383 {
384 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
385 TYPE_MODE (type),
386 TYPE_REF_CAN_ALIAS_ALL (type));
387 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
388 new_tree = build_type_attribute_qual_variant (new_tree,
389 TYPE_ATTRIBUTES (type),
390 TYPE_QUALS (type));
391 insert_decl_map (id, type, new_tree);
392 return new_tree;
393 }
394 else
395 new_tree = copy_node (type);
396
397 insert_decl_map (id, type, new_tree);
398
399 /* This is a new type, not a copy of an old type. Need to reassociate
400 variants. We can handle everything except the main variant lazily. */
401 t = TYPE_MAIN_VARIANT (type);
402 if (type != t)
403 {
404 t = remap_type (t, id);
405 TYPE_MAIN_VARIANT (new_tree) = t;
406 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
407 TYPE_NEXT_VARIANT (t) = new_tree;
408 }
409 else
410 {
411 TYPE_MAIN_VARIANT (new_tree) = new_tree;
412 TYPE_NEXT_VARIANT (new_tree) = NULL;
413 }
414
415 if (TYPE_STUB_DECL (type))
416 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
417
418 /* Lazily create pointer and reference types. */
419 TYPE_POINTER_TO (new_tree) = NULL;
420 TYPE_REFERENCE_TO (new_tree) = NULL;
421
422 switch (TREE_CODE (new_tree))
423 {
424 case INTEGER_TYPE:
425 case REAL_TYPE:
426 case FIXED_POINT_TYPE:
427 case ENUMERAL_TYPE:
428 case BOOLEAN_TYPE:
429 t = TYPE_MIN_VALUE (new_tree);
430 if (t && TREE_CODE (t) != INTEGER_CST)
431 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
432
433 t = TYPE_MAX_VALUE (new_tree);
434 if (t && TREE_CODE (t) != INTEGER_CST)
435 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
436 return new_tree;
437
438 case FUNCTION_TYPE:
439 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
440 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
441 return new_tree;
442
443 case ARRAY_TYPE:
444 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
445 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
446 break;
447
448 case RECORD_TYPE:
449 case UNION_TYPE:
450 case QUAL_UNION_TYPE:
451 {
452 tree f, nf = NULL;
453
454 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
455 {
456 t = remap_decl (f, id);
457 DECL_CONTEXT (t) = new_tree;
458 DECL_CHAIN (t) = nf;
459 nf = t;
460 }
461 TYPE_FIELDS (new_tree) = nreverse (nf);
462 }
463 break;
464
465 case OFFSET_TYPE:
466 default:
467 /* Shouldn't have been thought variable sized. */
468 gcc_unreachable ();
469 }
470
471 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
472 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
473
474 return new_tree;
475}
476
477tree
478remap_type (tree type, copy_body_data *id)
479{
480 tree *node;
481 tree tmp;
482
483 if (type == NULL)
484 return type;
485
486 /* See if we have remapped this type. */
487 node = (tree *) pointer_map_contains (id->decl_map, type);
488 if (node)
489 return *node;
490
491 /* The type only needs remapping if it's variably modified. */
492 if (! variably_modified_type_p (type, id->src_fn))
493 {
494 insert_decl_map (id, type, type);
495 return type;
496 }
497
498 id->remapping_type_depth++;
499 tmp = remap_type_1 (type, id);
500 id->remapping_type_depth--;
501
502 return tmp;
503}
504
505/* Return previously remapped type of TYPE in ID. Return NULL if TYPE
506 is NULL or TYPE has not been remapped before. */
507
508static tree
509remapped_type (tree type, copy_body_data *id)
510{
511 tree *node;
512
513 if (type == NULL)
514 return type;
515
516 /* See if we have remapped this type. */
517 node = (tree *) pointer_map_contains (id->decl_map, type);
518 if (node)
519 return *node;
520 else
521 return NULL;
522}
523
524 /* The type only needs remapping if it's variably modified. */
525/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
526
527static bool
528can_be_nonlocal (tree decl, copy_body_data *id)
529{
530 /* We can not duplicate function decls. */
531 if (TREE_CODE (decl) == FUNCTION_DECL)
532 return true;
533
534 /* Local static vars must be non-local or we get multiple declaration
535 problems. */
536 if (TREE_CODE (decl) == VAR_DECL
537 && !auto_var_in_fn_p (decl, id->src_fn))
538 return true;
539
540 /* At the moment dwarf2out can handle only these types of nodes. We
541 can support more later. */
542 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
543 return false;
544
545 /* We must use global type. We call remapped_type instead of
546 remap_type since we don't want to remap this type here if it
547 hasn't been remapped before. */
548 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
549 return false;
550
551 /* Wihtout SSA we can't tell if variable is used. */
552 if (!gimple_in_ssa_p (cfun))
553 return false;
554
555 /* Live variables must be copied so we can attach DECL_RTL. */
556 if (var_ann (decl))
557 return false;
558
559 return true;
560}
561
562static tree
563remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
564{
565 tree old_var;
566 tree new_decls = NULL_TREE;
567
568 /* Remap its variables. */
569 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
570 {
571 tree new_var;
572
573 if (can_be_nonlocal (old_var, id))
574 {
575 if (TREE_CODE (old_var) == VAR_DECL
576 && ! DECL_EXTERNAL (old_var)
577 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
578 add_local_decl (cfun, old_var);
579 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
580 && !DECL_IGNORED_P (old_var)
581 && nonlocalized_list)
582 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
583 continue;
584 }
585
586 /* Remap the variable. */
587 new_var = remap_decl (old_var, id);
588
589 /* If we didn't remap this variable, we can't mess with its
590 TREE_CHAIN. If we remapped this variable to the return slot, it's
591 already declared somewhere else, so don't declare it here. */
592
593 if (new_var == id->retvar)
594 ;
595 else if (!new_var)
596 {
597 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
598 && !DECL_IGNORED_P (old_var)
599 && nonlocalized_list)
600 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
601 }
602 else
603 {
604 gcc_assert (DECL_P (new_var));
605 DECL_CHAIN (new_var) = new_decls;
606 new_decls = new_var;
607
608 /* Also copy value-expressions. */
609 if (TREE_CODE (new_var) == VAR_DECL
610 && DECL_HAS_VALUE_EXPR_P (new_var))
611 {
612 tree tem = DECL_VALUE_EXPR (new_var);
613 bool old_regimplify = id->regimplify;
614 id->remapping_type_depth++;
615 walk_tree (&tem, copy_tree_body_r, id, NULL);
616 id->remapping_type_depth--;
617 id->regimplify = old_regimplify;
618 SET_DECL_VALUE_EXPR (new_var, tem);
619 }
620 }
621 }
622
623 return nreverse (new_decls);
624}
625
626/* Copy the BLOCK to contain remapped versions of the variables
627 therein. And hook the new block into the block-tree. */
628
629static void
630remap_block (tree *block, copy_body_data *id)
631{
632 tree old_block;
633 tree new_block;
634
635 /* Make the new block. */
636 old_block = *block;
637 new_block = make_node (BLOCK);
638 TREE_USED (new_block) = TREE_USED (old_block);
639 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
640 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
641 BLOCK_NONLOCALIZED_VARS (new_block)
642 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
643 *block = new_block;
644
645 /* Remap its variables. */
646 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
647 &BLOCK_NONLOCALIZED_VARS (new_block),
648 id);
649
650 if (id->transform_lang_insert_block)
651 id->transform_lang_insert_block (new_block);
652
653 /* Remember the remapped block. */
654 insert_decl_map (id, old_block, new_block);
655}
656
657/* Copy the whole block tree and root it in id->block. */
658static tree
659remap_blocks (tree block, copy_body_data *id)
660{
661 tree t;
662 tree new_tree = block;
663
664 if (!block)
665 return NULL;
666
667 remap_block (&new_tree, id);
668 gcc_assert (new_tree != block);
669 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
670 prepend_lexical_block (new_tree, remap_blocks (t, id));
671 /* Blocks are in arbitrary order, but make things slightly prettier and do
672 not swap order when producing a copy. */
673 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
674 return new_tree;
675}
676
677static void
678copy_statement_list (tree *tp)
679{
680 tree_stmt_iterator oi, ni;
681 tree new_tree;
682
683 new_tree = alloc_stmt_list ();
684 ni = tsi_start (new_tree);
685 oi = tsi_start (*tp);
686 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
687 *tp = new_tree;
688
689 for (; !tsi_end_p (oi); tsi_next (&oi))
690 {
691 tree stmt = tsi_stmt (oi);
692 if (TREE_CODE (stmt) == STATEMENT_LIST)
693 /* This copy is not redundant; tsi_link_after will smash this
694 STATEMENT_LIST into the end of the one we're building, and we
695 don't want to do that with the original. */
696 copy_statement_list (&stmt);
697 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
698 }
699}
700
701static void
702copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
703{
704 tree block = BIND_EXPR_BLOCK (*tp);
705 /* Copy (and replace) the statement. */
706 copy_tree_r (tp, walk_subtrees, NULL);
707 if (block)
708 {
709 remap_block (&block, id);
710 BIND_EXPR_BLOCK (*tp) = block;
711 }
712
713 if (BIND_EXPR_VARS (*tp))
714 /* This will remap a lot of the same decls again, but this should be
715 harmless. */
716 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
717}
718
719
720/* Create a new gimple_seq by remapping all the statements in BODY
721 using the inlining information in ID. */
722
723static gimple_seq
724remap_gimple_seq (gimple_seq body, copy_body_data *id)
725{
726 gimple_stmt_iterator si;
727 gimple_seq new_body = NULL;
728
729 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
730 {
731 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
732 gimple_seq_add_stmt (&new_body, new_stmt);
733 }
734
735 return new_body;
736}
737
738
739/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
740 block using the mapping information in ID. */
741
742static gimple
743copy_gimple_bind (gimple stmt, copy_body_data *id)
744{
745 gimple new_bind;
746 tree new_block, new_vars;
747 gimple_seq body, new_body;
748
749 /* Copy the statement. Note that we purposely don't use copy_stmt
750 here because we need to remap statements as we copy. */
751 body = gimple_bind_body (stmt);
752 new_body = remap_gimple_seq (body, id);
753
754 new_block = gimple_bind_block (stmt);
755 if (new_block)
756 remap_block (&new_block, id);
757
758 /* This will remap a lot of the same decls again, but this should be
759 harmless. */
760 new_vars = gimple_bind_vars (stmt);
761 if (new_vars)
762 new_vars = remap_decls (new_vars, NULL, id);
763
764 new_bind = gimple_build_bind (new_vars, new_body, new_block);
765
766 return new_bind;
767}
768
769
770/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
771 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
772 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
773 recursing into the children nodes of *TP. */
774
775static tree
776remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
777{
778 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
779 copy_body_data *id = (copy_body_data *) wi_p->info;
780 tree fn = id->src_fn;
781
782 if (TREE_CODE (*tp) == SSA_NAME)
783 {
784 *tp = remap_ssa_name (*tp, id);
785 *walk_subtrees = 0;
786 return NULL;
787 }
788 else if (auto_var_in_fn_p (*tp, fn))
789 {
790 /* Local variables and labels need to be replaced by equivalent
791 variables. We don't want to copy static variables; there's
792 only one of those, no matter how many times we inline the
793 containing function. Similarly for globals from an outer
794 function. */
795 tree new_decl;
796
797 /* Remap the declaration. */
798 new_decl = remap_decl (*tp, id);
799 gcc_assert (new_decl);
800 /* Replace this variable with the copy. */
801 STRIP_TYPE_NOPS (new_decl);
802 /* ??? The C++ frontend uses void * pointer zero to initialize
803 any other type. This confuses the middle-end type verification.
804 As cloned bodies do not go through gimplification again the fixup
805 there doesn't trigger. */
806 if (TREE_CODE (new_decl) == INTEGER_CST
807 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
808 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
809 *tp = new_decl;
810 *walk_subtrees = 0;
811 }
812 else if (TREE_CODE (*tp) == STATEMENT_LIST)
813 gcc_unreachable ();
814 else if (TREE_CODE (*tp) == SAVE_EXPR)
815 gcc_unreachable ();
816 else if (TREE_CODE (*tp) == LABEL_DECL
817 && (!DECL_CONTEXT (*tp)
818 || decl_function_context (*tp) == id->src_fn))
819 /* These may need to be remapped for EH handling. */
820 *tp = remap_decl (*tp, id);
821 else if (TREE_CODE (*tp) == FIELD_DECL)
822 {
823 /* If the enclosing record type is variably_modified_type_p, the field
824 has already been remapped. Otherwise, it need not be. */
825 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
826 if (n)
827 *tp = *n;
828 *walk_subtrees = 0;
829 }
830 else if (TYPE_P (*tp))
831 /* Types may need remapping as well. */
832 *tp = remap_type (*tp, id);
833 else if (CONSTANT_CLASS_P (*tp))
834 {
835 /* If this is a constant, we have to copy the node iff the type
836 will be remapped. copy_tree_r will not copy a constant. */
837 tree new_type = remap_type (TREE_TYPE (*tp), id);
838
839 if (new_type == TREE_TYPE (*tp))
840 *walk_subtrees = 0;
841
842 else if (TREE_CODE (*tp) == INTEGER_CST)
843 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
844 TREE_INT_CST_HIGH (*tp));
845 else
846 {
847 *tp = copy_node (*tp);
848 TREE_TYPE (*tp) = new_type;
849 }
850 }
851 else
852 {
853 /* Otherwise, just copy the node. Note that copy_tree_r already
854 knows not to copy VAR_DECLs, etc., so this is safe. */
855
856 /* We should never have TREE_BLOCK set on non-statements. */
857 if (EXPR_P (*tp))
858 gcc_assert (!TREE_BLOCK (*tp));
859
860 if (TREE_CODE (*tp) == MEM_REF)
861 {
862 tree ptr = TREE_OPERAND (*tp, 0);
863 tree type = remap_type (TREE_TYPE (*tp), id);
864 tree old = *tp;
865
866 /* We need to re-canonicalize MEM_REFs from inline substitutions
867 that can happen when a pointer argument is an ADDR_EXPR.
868 Recurse here manually to allow that. */
869 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
870 *tp = fold_build2 (MEM_REF, type,
871 ptr, TREE_OPERAND (*tp, 1));
872 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
873 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
874 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
875 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
876 *walk_subtrees = 0;
877 return NULL;
878 }
879
880 /* Here is the "usual case". Copy this tree node, and then
881 tweak some special cases. */
882 copy_tree_r (tp, walk_subtrees, NULL);
883
884 if (TREE_CODE (*tp) != OMP_CLAUSE)
885 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
886
887 /* Global variables we haven't seen yet need to go into referenced
888 vars. If not referenced from types only. */
889 if (gimple_in_ssa_p (cfun)
890 && TREE_CODE (*tp) == VAR_DECL
891 && id->remapping_type_depth == 0
892 && !processing_debug_stmt)
893 add_referenced_var (*tp);
894
895 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
896 {
897 /* The copied TARGET_EXPR has never been expanded, even if the
898 original node was expanded already. */
899 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
900 TREE_OPERAND (*tp, 3) = NULL_TREE;
901 }
902 else if (TREE_CODE (*tp) == ADDR_EXPR)
903 {
904 /* Variable substitution need not be simple. In particular,
905 the MEM_REF substitution above. Make sure that
906 TREE_CONSTANT and friends are up-to-date. But make sure
907 to not improperly set TREE_BLOCK on some sub-expressions. */
908 int invariant = is_gimple_min_invariant (*tp);
909 tree block = id->block;
910 id->block = NULL_TREE;
911 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
912 id->block = block;
913 recompute_tree_invariant_for_addr_expr (*tp);
914
915 /* If this used to be invariant, but is not any longer,
916 then regimplification is probably needed. */
917 if (invariant && !is_gimple_min_invariant (*tp))
918 id->regimplify = true;
919
920 *walk_subtrees = 0;
921 }
922 }
923
924 /* Keep iterating. */
925 return NULL_TREE;
926}
927
928
929/* Called from copy_body_id via walk_tree. DATA is really a
930 `copy_body_data *'. */
931
932tree
933copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
934{
935 copy_body_data *id = (copy_body_data *) data;
936 tree fn = id->src_fn;
937 tree new_block;
938
939 /* Begin by recognizing trees that we'll completely rewrite for the
940 inlining context. Our output for these trees is completely
941 different from out input (e.g. RETURN_EXPR is deleted, and morphs
942 into an edge). Further down, we'll handle trees that get
943 duplicated and/or tweaked. */
944
945 /* When requested, RETURN_EXPRs should be transformed to just the
946 contained MODIFY_EXPR. The branch semantics of the return will
947 be handled elsewhere by manipulating the CFG rather than a statement. */
948 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
949 {
950 tree assignment = TREE_OPERAND (*tp, 0);
951
952 /* If we're returning something, just turn that into an
953 assignment into the equivalent of the original RESULT_DECL.
954 If the "assignment" is just the result decl, the result
955 decl has already been set (e.g. a recent "foo (&result_decl,
956 ...)"); just toss the entire RETURN_EXPR. */
957 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
958 {
959 /* Replace the RETURN_EXPR with (a copy of) the
960 MODIFY_EXPR hanging underneath. */
961 *tp = copy_node (assignment);
962 }
963 else /* Else the RETURN_EXPR returns no value. */
964 {
965 *tp = NULL;
966 return (tree) (void *)1;
967 }
968 }
969 else if (TREE_CODE (*tp) == SSA_NAME)
970 {
971 *tp = remap_ssa_name (*tp, id);
972 *walk_subtrees = 0;
973 return NULL;
974 }
975
976 /* Local variables and labels need to be replaced by equivalent
977 variables. We don't want to copy static variables; there's only
978 one of those, no matter how many times we inline the containing
979 function. Similarly for globals from an outer function. */
980 else if (auto_var_in_fn_p (*tp, fn))
981 {
982 tree new_decl;
983
984 /* Remap the declaration. */
985 new_decl = remap_decl (*tp, id);
986 gcc_assert (new_decl);
987 /* Replace this variable with the copy. */
988 STRIP_TYPE_NOPS (new_decl);
989 *tp = new_decl;
990 *walk_subtrees = 0;
991 }
992 else if (TREE_CODE (*tp) == STATEMENT_LIST)
993 copy_statement_list (tp);
994 else if (TREE_CODE (*tp) == SAVE_EXPR
995 || TREE_CODE (*tp) == TARGET_EXPR)
996 remap_save_expr (tp, id->decl_map, walk_subtrees);
997 else if (TREE_CODE (*tp) == LABEL_DECL
998 && (! DECL_CONTEXT (*tp)
999 || decl_function_context (*tp) == id->src_fn))
1000 /* These may need to be remapped for EH handling. */
1001 *tp = remap_decl (*tp, id);
1002 else if (TREE_CODE (*tp) == BIND_EXPR)
1003 copy_bind_expr (tp, walk_subtrees, id);
1004 /* Types may need remapping as well. */
1005 else if (TYPE_P (*tp))
1006 *tp = remap_type (*tp, id);
1007
1008 /* If this is a constant, we have to copy the node iff the type will be
1009 remapped. copy_tree_r will not copy a constant. */
1010 else if (CONSTANT_CLASS_P (*tp))
1011 {
1012 tree new_type = remap_type (TREE_TYPE (*tp), id);
1013
1014 if (new_type == TREE_TYPE (*tp))
1015 *walk_subtrees = 0;
1016
1017 else if (TREE_CODE (*tp) == INTEGER_CST)
1018 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1019 TREE_INT_CST_HIGH (*tp));
1020 else
1021 {
1022 *tp = copy_node (*tp);
1023 TREE_TYPE (*tp) = new_type;
1024 }
1025 }
1026
1027 /* Otherwise, just copy the node. Note that copy_tree_r already
1028 knows not to copy VAR_DECLs, etc., so this is safe. */
1029 else
1030 {
1031 /* Here we handle trees that are not completely rewritten.
1032 First we detect some inlining-induced bogosities for
1033 discarding. */
1034 if (TREE_CODE (*tp) == MODIFY_EXPR
1035 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1036 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1037 {
1038 /* Some assignments VAR = VAR; don't generate any rtl code
1039 and thus don't count as variable modification. Avoid
1040 keeping bogosities like 0 = 0. */
1041 tree decl = TREE_OPERAND (*tp, 0), value;
1042 tree *n;
1043
1044 n = (tree *) pointer_map_contains (id->decl_map, decl);
1045 if (n)
1046 {
1047 value = *n;
1048 STRIP_TYPE_NOPS (value);
1049 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1050 {
1051 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1052 return copy_tree_body_r (tp, walk_subtrees, data);
1053 }
1054 }
1055 }
1056 else if (TREE_CODE (*tp) == INDIRECT_REF)
1057 {
1058 /* Get rid of *& from inline substitutions that can happen when a
1059 pointer argument is an ADDR_EXPR. */
1060 tree decl = TREE_OPERAND (*tp, 0);
1061 tree *n;
1062
1063 n = (tree *) pointer_map_contains (id->decl_map, decl);
1064 if (n)
1065 {
1066 tree new_tree;
1067 tree old;
1068 /* If we happen to get an ADDR_EXPR in n->value, strip
1069 it manually here as we'll eventually get ADDR_EXPRs
1070 which lie about their types pointed to. In this case
1071 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1072 but we absolutely rely on that. As fold_indirect_ref
1073 does other useful transformations, try that first, though. */
1074 tree type = TREE_TYPE (TREE_TYPE (*n));
1075 if (id->do_not_unshare)
1076 new_tree = *n;
1077 else
1078 new_tree = unshare_expr (*n);
1079 old = *tp;
1080 *tp = gimple_fold_indirect_ref (new_tree);
1081 if (! *tp)
1082 {
1083 if (TREE_CODE (new_tree) == ADDR_EXPR)
1084 {
1085 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1086 type, new_tree);
1087 /* ??? We should either assert here or build
1088 a VIEW_CONVERT_EXPR instead of blindly leaking
1089 incompatible types to our IL. */
1090 if (! *tp)
1091 *tp = TREE_OPERAND (new_tree, 0);
1092 }
1093 else
1094 {
1095 *tp = build1 (INDIRECT_REF, type, new_tree);
1096 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1097 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1098 TREE_READONLY (*tp) = TREE_READONLY (old);
1099 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1100 }
1101 }
1102 *walk_subtrees = 0;
1103 return NULL;
1104 }
1105 }
1106 else if (TREE_CODE (*tp) == MEM_REF)
1107 {
1108 /* We need to re-canonicalize MEM_REFs from inline substitutions
1109 that can happen when a pointer argument is an ADDR_EXPR. */
1110 tree decl = TREE_OPERAND (*tp, 0);
1111 tree *n;
1112
1113 n = (tree *) pointer_map_contains (id->decl_map, decl);
1114 if (n)
1115 {
1116 tree old = *tp;
1117 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1118 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1119 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1120 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1121 *walk_subtrees = 0;
1122 return NULL;
1123 }
1124 }
1125
1126 /* Here is the "usual case". Copy this tree node, and then
1127 tweak some special cases. */
1128 copy_tree_r (tp, walk_subtrees, NULL);
1129
1130 /* Global variables we haven't seen yet needs to go into referenced
1131 vars. If not referenced from types or debug stmts only. */
1132 if (gimple_in_ssa_p (cfun)
1133 && TREE_CODE (*tp) == VAR_DECL
1134 && id->remapping_type_depth == 0
1135 && !processing_debug_stmt)
1136 add_referenced_var (*tp);
1137
1138 /* If EXPR has block defined, map it to newly constructed block.
1139 When inlining we want EXPRs without block appear in the block
1140 of function call if we are not remapping a type. */
1141 if (EXPR_P (*tp))
1142 {
1143 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1144 if (TREE_BLOCK (*tp))
1145 {
1146 tree *n;
1147 n = (tree *) pointer_map_contains (id->decl_map,
1148 TREE_BLOCK (*tp));
1149 gcc_assert (n || id->remapping_type_depth != 0);
1150 if (n)
1151 new_block = *n;
1152 }
1153 TREE_BLOCK (*tp) = new_block;
1154 }
1155
1156 if (TREE_CODE (*tp) != OMP_CLAUSE)
1157 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1158
1159 /* The copied TARGET_EXPR has never been expanded, even if the
1160 original node was expanded already. */
1161 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1162 {
1163 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1164 TREE_OPERAND (*tp, 3) = NULL_TREE;
1165 }
1166
1167 /* Variable substitution need not be simple. In particular, the
1168 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1169 and friends are up-to-date. */
1170 else if (TREE_CODE (*tp) == ADDR_EXPR)
1171 {
1172 int invariant = is_gimple_min_invariant (*tp);
1173 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1174
1175 /* Handle the case where we substituted an INDIRECT_REF
1176 into the operand of the ADDR_EXPR. */
1177 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1178 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1179 else
1180 recompute_tree_invariant_for_addr_expr (*tp);
1181
1182 /* If this used to be invariant, but is not any longer,
1183 then regimplification is probably needed. */
1184 if (invariant && !is_gimple_min_invariant (*tp))
1185 id->regimplify = true;
1186
1187 *walk_subtrees = 0;
1188 }
1189 }
1190
1191 /* Keep iterating. */
1192 return NULL_TREE;
1193}
1194
1195/* Helper for remap_gimple_stmt. Given an EH region number for the
1196 source function, map that to the duplicate EH region number in
1197 the destination function. */
1198
1199static int
1200remap_eh_region_nr (int old_nr, copy_body_data *id)
1201{
1202 eh_region old_r, new_r;
1203 void **slot;
1204
1205 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1206 slot = pointer_map_contains (id->eh_map, old_r);
1207 new_r = (eh_region) *slot;
1208
1209 return new_r->index;
1210}
1211
1212/* Similar, but operate on INTEGER_CSTs. */
1213
1214static tree
1215remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1216{
1217 int old_nr, new_nr;
1218
1219 old_nr = tree_low_cst (old_t_nr, 0);
1220 new_nr = remap_eh_region_nr (old_nr, id);
1221
1222 return build_int_cst (integer_type_node, new_nr);
1223}
1224
1225/* Helper for copy_bb. Remap statement STMT using the inlining
1226 information in ID. Return the new statement copy. */
1227
1228static gimple
1229remap_gimple_stmt (gimple stmt, copy_body_data *id)
1230{
1231 gimple copy = NULL;
1232 struct walk_stmt_info wi;
1233 tree new_block;
1234 bool skip_first = false;
1235
1236 /* Begin by recognizing trees that we'll completely rewrite for the
1237 inlining context. Our output for these trees is completely
1238 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1239 into an edge). Further down, we'll handle trees that get
1240 duplicated and/or tweaked. */
1241
1242 /* When requested, GIMPLE_RETURNs should be transformed to just the
1243 contained GIMPLE_ASSIGN. The branch semantics of the return will
1244 be handled elsewhere by manipulating the CFG rather than the
1245 statement. */
1246 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1247 {
1248 tree retval = gimple_return_retval (stmt);
1249
1250 /* If we're returning something, just turn that into an
1251 assignment into the equivalent of the original RESULT_DECL.
1252 If RETVAL is just the result decl, the result decl has
1253 already been set (e.g. a recent "foo (&result_decl, ...)");
1254 just toss the entire GIMPLE_RETURN. */
1255 if (retval
1256 && (TREE_CODE (retval) != RESULT_DECL
1257 && (TREE_CODE (retval) != SSA_NAME
1258 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1259 {
1260 copy = gimple_build_assign (id->retvar, retval);
1261 /* id->retvar is already substituted. Skip it on later remapping. */
1262 skip_first = true;
1263 }
1264 else
1265 return gimple_build_nop ();
1266 }
1267 else if (gimple_has_substatements (stmt))
1268 {
1269 gimple_seq s1, s2;
1270
1271 /* When cloning bodies from the C++ front end, we will be handed bodies
1272 in High GIMPLE form. Handle here all the High GIMPLE statements that
1273 have embedded statements. */
1274 switch (gimple_code (stmt))
1275 {
1276 case GIMPLE_BIND:
1277 copy = copy_gimple_bind (stmt, id);
1278 break;
1279
1280 case GIMPLE_CATCH:
1281 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1282 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1283 break;
1284
1285 case GIMPLE_EH_FILTER:
1286 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1287 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1288 break;
1289
1290 case GIMPLE_TRY:
1291 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1292 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1293 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1294 break;
1295
1296 case GIMPLE_WITH_CLEANUP_EXPR:
1297 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1298 copy = gimple_build_wce (s1);
1299 break;
1300
1301 case GIMPLE_OMP_PARALLEL:
1302 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1303 copy = gimple_build_omp_parallel
1304 (s1,
1305 gimple_omp_parallel_clauses (stmt),
1306 gimple_omp_parallel_child_fn (stmt),
1307 gimple_omp_parallel_data_arg (stmt));
1308 break;
1309
1310 case GIMPLE_OMP_TASK:
1311 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1312 copy = gimple_build_omp_task
1313 (s1,
1314 gimple_omp_task_clauses (stmt),
1315 gimple_omp_task_child_fn (stmt),
1316 gimple_omp_task_data_arg (stmt),
1317 gimple_omp_task_copy_fn (stmt),
1318 gimple_omp_task_arg_size (stmt),
1319 gimple_omp_task_arg_align (stmt));
1320 break;
1321
1322 case GIMPLE_OMP_FOR:
1323 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1324 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1325 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1326 gimple_omp_for_collapse (stmt), s2);
1327 {
1328 size_t i;
1329 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1330 {
1331 gimple_omp_for_set_index (copy, i,
1332 gimple_omp_for_index (stmt, i));
1333 gimple_omp_for_set_initial (copy, i,
1334 gimple_omp_for_initial (stmt, i));
1335 gimple_omp_for_set_final (copy, i,
1336 gimple_omp_for_final (stmt, i));
1337 gimple_omp_for_set_incr (copy, i,
1338 gimple_omp_for_incr (stmt, i));
1339 gimple_omp_for_set_cond (copy, i,
1340 gimple_omp_for_cond (stmt, i));
1341 }
1342 }
1343 break;
1344
1345 case GIMPLE_OMP_MASTER:
1346 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1347 copy = gimple_build_omp_master (s1);
1348 break;
1349
1350 case GIMPLE_OMP_ORDERED:
1351 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1352 copy = gimple_build_omp_ordered (s1);
1353 break;
1354
1355 case GIMPLE_OMP_SECTION:
1356 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1357 copy = gimple_build_omp_section (s1);
1358 break;
1359
1360 case GIMPLE_OMP_SECTIONS:
1361 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1362 copy = gimple_build_omp_sections
1363 (s1, gimple_omp_sections_clauses (stmt));
1364 break;
1365
1366 case GIMPLE_OMP_SINGLE:
1367 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1368 copy = gimple_build_omp_single
1369 (s1, gimple_omp_single_clauses (stmt));
1370 break;
1371
1372 case GIMPLE_OMP_CRITICAL:
1373 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1374 copy
1375 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1376 break;
1377
1378 case GIMPLE_TRANSACTION:
1379 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1380 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1381 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1382 break;
1383
1384 default:
1385 gcc_unreachable ();
1386 }
1387 }
1388 else
1389 {
1390 if (gimple_assign_copy_p (stmt)
1391 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1392 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1393 {
1394 /* Here we handle statements that are not completely rewritten.
1395 First we detect some inlining-induced bogosities for
1396 discarding. */
1397
1398 /* Some assignments VAR = VAR; don't generate any rtl code
1399 and thus don't count as variable modification. Avoid
1400 keeping bogosities like 0 = 0. */
1401 tree decl = gimple_assign_lhs (stmt), value;
1402 tree *n;
1403
1404 n = (tree *) pointer_map_contains (id->decl_map, decl);
1405 if (n)
1406 {
1407 value = *n;
1408 STRIP_TYPE_NOPS (value);
1409 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1410 return gimple_build_nop ();
1411 }
1412 }
1413
1414 if (gimple_debug_bind_p (stmt))
1415 {
1416 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1417 gimple_debug_bind_get_value (stmt),
1418 stmt);
1419 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1420 return copy;
1421 }
1422 if (gimple_debug_source_bind_p (stmt))
1423 {
1424 copy = gimple_build_debug_source_bind
1425 (gimple_debug_source_bind_get_var (stmt),
1426 gimple_debug_source_bind_get_value (stmt), stmt);
1427 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1428 return copy;
1429 }
1430
1431 /* Create a new deep copy of the statement. */
1432 copy = gimple_copy (stmt);
1433
1434 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1435 RESX and EH_DISPATCH. */
1436 if (id->eh_map)
1437 switch (gimple_code (copy))
1438 {
1439 case GIMPLE_CALL:
1440 {
1441 tree r, fndecl = gimple_call_fndecl (copy);
1442 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1443 switch (DECL_FUNCTION_CODE (fndecl))
1444 {
1445 case BUILT_IN_EH_COPY_VALUES:
1446 r = gimple_call_arg (copy, 1);
1447 r = remap_eh_region_tree_nr (r, id);
1448 gimple_call_set_arg (copy, 1, r);
1449 /* FALLTHRU */
1450
1451 case BUILT_IN_EH_POINTER:
1452 case BUILT_IN_EH_FILTER:
1453 r = gimple_call_arg (copy, 0);
1454 r = remap_eh_region_tree_nr (r, id);
1455 gimple_call_set_arg (copy, 0, r);
1456 break;
1457
1458 default:
1459 break;
1460 }
1461
1462 /* Reset alias info if we didn't apply measures to
1463 keep it valid over inlining by setting DECL_PT_UID. */
1464 if (!id->src_cfun->gimple_df
1465 || !id->src_cfun->gimple_df->ipa_pta)
1466 gimple_call_reset_alias_info (copy);
1467 }
1468 break;
1469
1470 case GIMPLE_RESX:
1471 {
1472 int r = gimple_resx_region (copy);
1473 r = remap_eh_region_nr (r, id);
1474 gimple_resx_set_region (copy, r);
1475 }
1476 break;
1477
1478 case GIMPLE_EH_DISPATCH:
1479 {
1480 int r = gimple_eh_dispatch_region (copy);
1481 r = remap_eh_region_nr (r, id);
1482 gimple_eh_dispatch_set_region (copy, r);
1483 }
1484 break;
1485
1486 default:
1487 break;
1488 }
1489 }
1490
1491 /* If STMT has a block defined, map it to the newly constructed
1492 block. When inlining we want statements without a block to
1493 appear in the block of the function call. */
1494 new_block = id->block;
1495 if (gimple_block (copy))
1496 {
1497 tree *n;
1498 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1499 gcc_assert (n);
1500 new_block = *n;
1501 }
1502
1503 gimple_set_block (copy, new_block);
1504
1505 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1506 return copy;
1507
1508 /* Remap all the operands in COPY. */
1509 memset (&wi, 0, sizeof (wi));
1510 wi.info = id;
1511 if (skip_first)
1512 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1513 else
1514 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1515
1516 /* Clear the copied virtual operands. We are not remapping them here
1517 but are going to recreate them from scratch. */
1518 if (gimple_has_mem_ops (copy))
1519 {
1520 gimple_set_vdef (copy, NULL_TREE);
1521 gimple_set_vuse (copy, NULL_TREE);
1522 }
1523
1524 return copy;
1525}
1526
1527
1528/* Copy basic block, scale profile accordingly. Edges will be taken care of
1529 later */
1530
1531static basic_block
1532copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1533 gcov_type count_scale)
1534{
1535 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1536 basic_block copy_basic_block;
1537 tree decl;
1538 gcov_type freq;
1539 basic_block prev;
1540
1541 /* Search for previous copied basic block. */
1542 prev = bb->prev_bb;
1543 while (!prev->aux)
1544 prev = prev->prev_bb;
1545
1546 /* create_basic_block() will append every new block to
1547 basic_block_info automatically. */
1548 copy_basic_block = create_basic_block (NULL, (void *) 0,
1549 (basic_block) prev->aux);
1550 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1551
1552 /* We are going to rebuild frequencies from scratch. These values
1553 have just small importance to drive canonicalize_loop_headers. */
1554 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1555
1556 /* We recompute frequencies after inlining, so this is quite safe. */
1557 if (freq > BB_FREQ_MAX)
1558 freq = BB_FREQ_MAX;
1559 copy_basic_block->frequency = freq;
1560
1561 copy_gsi = gsi_start_bb (copy_basic_block);
1562
1563 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1564 {
1565 gimple stmt = gsi_stmt (gsi);
1566 gimple orig_stmt = stmt;
1567
1568 id->regimplify = false;
1569 stmt = remap_gimple_stmt (stmt, id);
1570 if (gimple_nop_p (stmt))
1571 continue;
1572
1573 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1574 seq_gsi = copy_gsi;
1575
1576 /* With return slot optimization we can end up with
1577 non-gimple (foo *)&this->m, fix that here. */
1578 if (is_gimple_assign (stmt)
1579 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1580 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1581 {
1582 tree new_rhs;
1583 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1584 gimple_assign_rhs1 (stmt),
1585 true, NULL, false,
1586 GSI_CONTINUE_LINKING);
1587 gimple_assign_set_rhs1 (stmt, new_rhs);
1588 id->regimplify = false;
1589 }
1590
1591 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1592
1593 if (id->regimplify)
1594 gimple_regimplify_operands (stmt, &seq_gsi);
1595
1596 /* If copy_basic_block has been empty at the start of this iteration,
1597 call gsi_start_bb again to get at the newly added statements. */
1598 if (gsi_end_p (copy_gsi))
1599 copy_gsi = gsi_start_bb (copy_basic_block);
1600 else
1601 gsi_next (&copy_gsi);
1602
1603 /* Process the new statement. The call to gimple_regimplify_operands
1604 possibly turned the statement into multiple statements, we
1605 need to process all of them. */
1606 do
1607 {
1608 tree fn;
1609
1610 stmt = gsi_stmt (copy_gsi);
1611 if (is_gimple_call (stmt)
1612 && gimple_call_va_arg_pack_p (stmt)
1613 && id->gimple_call)
1614 {
1615 /* __builtin_va_arg_pack () should be replaced by
1616 all arguments corresponding to ... in the caller. */
1617 tree p;
1618 gimple new_call;
1619 VEC(tree, heap) *argarray;
1620 size_t nargs = gimple_call_num_args (id->gimple_call);
1621 size_t n;
1622
1623 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1624 nargs--;
1625
1626 /* Create the new array of arguments. */
1627 n = nargs + gimple_call_num_args (stmt);
1628 argarray = VEC_alloc (tree, heap, n);
1629 VEC_safe_grow (tree, heap, argarray, n);
1630
1631 /* Copy all the arguments before '...' */
1632 memcpy (VEC_address (tree, argarray),
1633 gimple_call_arg_ptr (stmt, 0),
1634 gimple_call_num_args (stmt) * sizeof (tree));
1635
1636 /* Append the arguments passed in '...' */
1637 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1638 gimple_call_arg_ptr (id->gimple_call, 0)
1639 + (gimple_call_num_args (id->gimple_call) - nargs),
1640 nargs * sizeof (tree));
1641
1642 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1643 argarray);
1644
1645 VEC_free (tree, heap, argarray);
1646
1647 /* Copy all GIMPLE_CALL flags, location and block, except
1648 GF_CALL_VA_ARG_PACK. */
1649 gimple_call_copy_flags (new_call, stmt);
1650 gimple_call_set_va_arg_pack (new_call, false);
1651 gimple_set_location (new_call, gimple_location (stmt));
1652 gimple_set_block (new_call, gimple_block (stmt));
1653 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1654
1655 gsi_replace (&copy_gsi, new_call, false);
1656 stmt = new_call;
1657 }
1658 else if (is_gimple_call (stmt)
1659 && id->gimple_call
1660 && (decl = gimple_call_fndecl (stmt))
1661 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1662 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1663 {
1664 /* __builtin_va_arg_pack_len () should be replaced by
1665 the number of anonymous arguments. */
1666 size_t nargs = gimple_call_num_args (id->gimple_call);
1667 tree count, p;
1668 gimple new_stmt;
1669
1670 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1671 nargs--;
1672
1673 count = build_int_cst (integer_type_node, nargs);
1674 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1675 gsi_replace (&copy_gsi, new_stmt, false);
1676 stmt = new_stmt;
1677 }
1678
1679 /* Statements produced by inlining can be unfolded, especially
1680 when we constant propagated some operands. We can't fold
1681 them right now for two reasons:
1682 1) folding require SSA_NAME_DEF_STMTs to be correct
1683 2) we can't change function calls to builtins.
1684 So we just mark statement for later folding. We mark
1685 all new statements, instead just statements that has changed
1686 by some nontrivial substitution so even statements made
1687 foldable indirectly are updated. If this turns out to be
1688 expensive, copy_body can be told to watch for nontrivial
1689 changes. */
1690 if (id->statements_to_fold)
1691 pointer_set_insert (id->statements_to_fold, stmt);
1692
1693 /* We're duplicating a CALL_EXPR. Find any corresponding
1694 callgraph edges and update or duplicate them. */
1695 if (is_gimple_call (stmt))
1696 {
1697 struct cgraph_edge *edge;
1698 int flags;
1699
1700 switch (id->transform_call_graph_edges)
1701 {
1702 case CB_CGE_DUPLICATE:
1703 edge = cgraph_edge (id->src_node, orig_stmt);
1704 if (edge)
1705 {
1706 int edge_freq = edge->frequency;
1707 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1708 gimple_uid (stmt),
1709 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1710 true);
1711 /* We could also just rescale the frequency, but
1712 doing so would introduce roundoff errors and make
1713 verifier unhappy. */
1714 edge->frequency
1715 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1716 copy_basic_block);
1717 if (dump_file
1718 && profile_status_for_function (cfun) != PROFILE_ABSENT
1719 && (edge_freq > edge->frequency + 10
1720 || edge_freq < edge->frequency - 10))
1721 {
1722 fprintf (dump_file, "Edge frequency estimated by "
1723 "cgraph %i diverge from inliner's estimate %i\n",
1724 edge_freq,
1725 edge->frequency);
1726 fprintf (dump_file,
1727 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1728 bb->index,
1729 bb->frequency,
1730 copy_basic_block->frequency);
1731 }
1732 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1733 }
1734 break;
1735
1736 case CB_CGE_MOVE_CLONES:
1737 cgraph_set_call_stmt_including_clones (id->dst_node,
1738 orig_stmt, stmt);
1739 edge = cgraph_edge (id->dst_node, stmt);
1740 break;
1741
1742 case CB_CGE_MOVE:
1743 edge = cgraph_edge (id->dst_node, orig_stmt);
1744 if (edge)
1745 cgraph_set_call_stmt (edge, stmt);
1746 break;
1747
1748 default:
1749 gcc_unreachable ();
1750 }
1751
1752 /* Constant propagation on argument done during inlining
1753 may create new direct call. Produce an edge for it. */
1754 if ((!edge
1755 || (edge->indirect_inlining_edge
1756 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1757 && id->dst_node->analyzed
1758 && (fn = gimple_call_fndecl (stmt)) != NULL)
1759 {
1760 struct cgraph_node *dest = cgraph_get_node (fn);
1761
1762 /* We have missing edge in the callgraph. This can happen
1763 when previous inlining turned an indirect call into a
1764 direct call by constant propagating arguments or we are
1765 producing dead clone (for further cloning). In all
1766 other cases we hit a bug (incorrect node sharing is the
1767 most common reason for missing edges). */
1768 gcc_assert (dest->needed || !dest->analyzed
1769 || dest->address_taken
1770 || !id->src_node->analyzed
1771 || !id->dst_node->analyzed);
1772 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1773 cgraph_create_edge_including_clones
1774 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1775 compute_call_stmt_bb_frequency (id->dst_node->decl,
1776 copy_basic_block),
1777 CIF_ORIGINALLY_INDIRECT_CALL);
1778 else
1779 cgraph_create_edge (id->dst_node, dest, stmt,
1780 bb->count,
1781 compute_call_stmt_bb_frequency
1782 (id->dst_node->decl, copy_basic_block))->inline_failed
1783 = CIF_ORIGINALLY_INDIRECT_CALL;
1784 if (dump_file)
1785 {
1786 fprintf (dump_file, "Created new direct edge to %s\n",
1787 cgraph_node_name (dest));
1788 }
1789 }
1790
1791 flags = gimple_call_flags (stmt);
1792 if (flags & ECF_MAY_BE_ALLOCA)
1793 cfun->calls_alloca = true;
1794 if (flags & ECF_RETURNS_TWICE)
1795 cfun->calls_setjmp = true;
1796 }
1797
1798 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1799 id->eh_map, id->eh_lp_nr);
1800
1801 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1802 {
1803 ssa_op_iter i;
1804 tree def;
1805
1806 find_new_referenced_vars (gsi_stmt (copy_gsi));
1807 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1808 if (TREE_CODE (def) == SSA_NAME)
1809 SSA_NAME_DEF_STMT (def) = stmt;
1810 }
1811
1812 gsi_next (&copy_gsi);
1813 }
1814 while (!gsi_end_p (copy_gsi));
1815
1816 copy_gsi = gsi_last_bb (copy_basic_block);
1817 }
1818
1819 return copy_basic_block;
1820}
1821
1822/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1823 form is quite easy, since dominator relationship for old basic blocks does
1824 not change.
1825
1826 There is however exception where inlining might change dominator relation
1827 across EH edges from basic block within inlined functions destinating
1828 to landing pads in function we inline into.
1829
1830 The function fills in PHI_RESULTs of such PHI nodes if they refer
1831 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1832 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1833 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1834 set, and this means that there will be no overlapping live ranges
1835 for the underlying symbol.
1836
1837 This might change in future if we allow redirecting of EH edges and
1838 we might want to change way build CFG pre-inlining to include
1839 all the possible edges then. */
1840static void
1841update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1842 bool can_throw, bool nonlocal_goto)
1843{
1844 edge e;
1845 edge_iterator ei;
1846
1847 FOR_EACH_EDGE (e, ei, bb->succs)
1848 if (!e->dest->aux
1849 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1850 {
1851 gimple phi;
1852 gimple_stmt_iterator si;
1853
1854 if (!nonlocal_goto)
1855 gcc_assert (e->flags & EDGE_EH);
1856
1857 if (!can_throw)
1858 gcc_assert (!(e->flags & EDGE_EH));
1859
1860 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1861 {
1862 edge re;
1863
1864 phi = gsi_stmt (si);
1865
1866 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1867 gcc_assert (!e->dest->aux);
1868
1869 gcc_assert ((e->flags & EDGE_EH)
1870 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1871
1872 if (!is_gimple_reg (PHI_RESULT (phi)))
1873 {
1874 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1875 continue;
1876 }
1877
1878 re = find_edge (ret_bb, e->dest);
1879 gcc_assert (re);
1880 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1881 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1882
1883 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1884 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1885 }
1886 }
1887}
1888
1889
1890/* Copy edges from BB into its copy constructed earlier, scale profile
1891 accordingly. Edges will be taken care of later. Assume aux
1892 pointers to point to the copies of each BB. Return true if any
1893 debug stmts are left after a statement that must end the basic block. */
1894
1895static bool
1896copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1897{
1898 basic_block new_bb = (basic_block) bb->aux;
1899 edge_iterator ei;
1900 edge old_edge;
1901 gimple_stmt_iterator si;
1902 int flags;
1903 bool need_debug_cleanup = false;
1904
1905 /* Use the indices from the original blocks to create edges for the
1906 new ones. */
1907 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1908 if (!(old_edge->flags & EDGE_EH))
1909 {
1910 edge new_edge;
1911
1912 flags = old_edge->flags;
1913
1914 /* Return edges do get a FALLTHRU flag when the get inlined. */
1915 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1916 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1917 flags |= EDGE_FALLTHRU;
1918 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1919 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1920 new_edge->probability = old_edge->probability;
1921 }
1922
1923 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1924 return false;
1925
1926 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1927 {
1928 gimple copy_stmt;
1929 bool can_throw, nonlocal_goto;
1930
1931 copy_stmt = gsi_stmt (si);
1932 if (!is_gimple_debug (copy_stmt))
1933 {
1934 update_stmt (copy_stmt);
1935 if (gimple_in_ssa_p (cfun))
1936 mark_symbols_for_renaming (copy_stmt);
1937 }
1938
1939 /* Do this before the possible split_block. */
1940 gsi_next (&si);
1941
1942 /* If this tree could throw an exception, there are two
1943 cases where we need to add abnormal edge(s): the
1944 tree wasn't in a region and there is a "current
1945 region" in the caller; or the original tree had
1946 EH edges. In both cases split the block after the tree,
1947 and add abnormal edge(s) as needed; we need both
1948 those from the callee and the caller.
1949 We check whether the copy can throw, because the const
1950 propagation can change an INDIRECT_REF which throws
1951 into a COMPONENT_REF which doesn't. If the copy
1952 can throw, the original could also throw. */
1953 can_throw = stmt_can_throw_internal (copy_stmt);
1954 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1955
1956 if (can_throw || nonlocal_goto)
1957 {
1958 if (!gsi_end_p (si))
1959 {
1960 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1961 gsi_next (&si);
1962 if (gsi_end_p (si))
1963 need_debug_cleanup = true;
1964 }
1965 if (!gsi_end_p (si))
1966 /* Note that bb's predecessor edges aren't necessarily
1967 right at this point; split_block doesn't care. */
1968 {
1969 edge e = split_block (new_bb, copy_stmt);
1970
1971 new_bb = e->dest;
1972 new_bb->aux = e->src->aux;
1973 si = gsi_start_bb (new_bb);
1974 }
1975 }
1976
1977 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1978 make_eh_dispatch_edges (copy_stmt);
1979 else if (can_throw)
1980 make_eh_edges (copy_stmt);
1981
1982 if (nonlocal_goto)
1983 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1984
1985 if ((can_throw || nonlocal_goto)
1986 && gimple_in_ssa_p (cfun))
1987 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1988 can_throw, nonlocal_goto);
1989 }
1990 return need_debug_cleanup;
1991}
1992
1993/* Copy the PHIs. All blocks and edges are copied, some blocks
1994 was possibly split and new outgoing EH edges inserted.
1995 BB points to the block of original function and AUX pointers links
1996 the original and newly copied blocks. */
1997
1998static void
1999copy_phis_for_bb (basic_block bb, copy_body_data *id)
2000{
2001 basic_block const new_bb = (basic_block) bb->aux;
2002 edge_iterator ei;
2003 gimple phi;
2004 gimple_stmt_iterator si;
2005 edge new_edge;
2006 bool inserted = false;
2007
2008 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
2009 {
2010 tree res, new_res;
2011 gimple new_phi;
2012
2013 phi = gsi_stmt (si);
2014 res = PHI_RESULT (phi);
2015 new_res = res;
2016 if (is_gimple_reg (res))
2017 {
2018 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2019 SSA_NAME_DEF_STMT (new_res)
2020 = new_phi = create_phi_node (new_res, new_bb);
2021 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2022 {
2023 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2024 tree arg;
2025 tree new_arg;
2026 tree block = id->block;
2027 edge_iterator ei2;
2028
2029 /* When doing partial cloning, we allow PHIs on the entry block
2030 as long as all the arguments are the same. Find any input
2031 edge to see argument to copy. */
2032 if (!old_edge)
2033 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2034 if (!old_edge->src->aux)
2035 break;
2036
2037 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2038 new_arg = arg;
2039 id->block = NULL_TREE;
2040 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2041 id->block = block;
2042 gcc_assert (new_arg);
2043 /* With return slot optimization we can end up with
2044 non-gimple (foo *)&this->m, fix that here. */
2045 if (TREE_CODE (new_arg) != SSA_NAME
2046 && TREE_CODE (new_arg) != FUNCTION_DECL
2047 && !is_gimple_val (new_arg))
2048 {
2049 gimple_seq stmts = NULL;
2050 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2051 gsi_insert_seq_on_edge (new_edge, stmts);
2052 inserted = true;
2053 }
2054 add_phi_arg (new_phi, new_arg, new_edge,
2055 gimple_phi_arg_location_from_edge (phi, old_edge));
2056 }
2057 }
2058 }
2059
2060 /* Commit the delayed edge insertions. */
2061 if (inserted)
2062 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2063 gsi_commit_one_edge_insert (new_edge, NULL);
2064}
2065
2066
2067/* Wrapper for remap_decl so it can be used as a callback. */
2068
2069static tree
2070remap_decl_1 (tree decl, void *data)
2071{
2072 return remap_decl (decl, (copy_body_data *) data);
2073}
2074
2075/* Build struct function and associated datastructures for the new clone
2076 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2077
2078static void
2079initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2080{
2081 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2082 gcov_type count_scale;
2083
2084 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2085 count_scale = (REG_BR_PROB_BASE * count
2086 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2087 else
2088 count_scale = REG_BR_PROB_BASE;
2089
2090 /* Register specific tree functions. */
2091 gimple_register_cfg_hooks ();
2092
2093 /* Get clean struct function. */
2094 push_struct_function (new_fndecl);
2095
2096 /* We will rebuild these, so just sanity check that they are empty. */
2097 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2098 gcc_assert (cfun->local_decls == NULL);
2099 gcc_assert (cfun->cfg == NULL);
2100 gcc_assert (cfun->decl == new_fndecl);
2101
2102 /* Copy items we preserve during cloning. */
2103 cfun->static_chain_decl = src_cfun->static_chain_decl;
2104 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2105 cfun->function_end_locus = src_cfun->function_end_locus;
2106 cfun->curr_properties = src_cfun->curr_properties;
2107 cfun->last_verified = src_cfun->last_verified;
2108 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2109 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2110 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2111 cfun->stdarg = src_cfun->stdarg;
2112 cfun->after_inlining = src_cfun->after_inlining;
2113 cfun->can_throw_non_call_exceptions
2114 = src_cfun->can_throw_non_call_exceptions;
2115 cfun->returns_struct = src_cfun->returns_struct;
2116 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2117 cfun->after_tree_profile = src_cfun->after_tree_profile;
2118
2119 init_empty_tree_cfg ();
2120
2121 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2122 ENTRY_BLOCK_PTR->count =
2123 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2124 REG_BR_PROB_BASE);
2125 ENTRY_BLOCK_PTR->frequency
2126 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2127 EXIT_BLOCK_PTR->count =
2128 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2129 REG_BR_PROB_BASE);
2130 EXIT_BLOCK_PTR->frequency =
2131 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2132 if (src_cfun->eh)
2133 init_eh_for_function ();
2134
2135 if (src_cfun->gimple_df)
2136 {
2137 init_tree_ssa (cfun);
2138 cfun->gimple_df->in_ssa_p = true;
2139 init_ssa_operands ();
2140 }
2141 pop_cfun ();
2142}
2143
2144/* Helper function for copy_cfg_body. Move debug stmts from the end
2145 of NEW_BB to the beginning of successor basic blocks when needed. If the
2146 successor has multiple predecessors, reset them, otherwise keep
2147 their value. */
2148
2149static void
2150maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2151{
2152 edge e;
2153 edge_iterator ei;
2154 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2155
2156 if (gsi_end_p (si)
2157 || gsi_one_before_end_p (si)
2158 || !(stmt_can_throw_internal (gsi_stmt (si))
2159 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2160 return;
2161
2162 FOR_EACH_EDGE (e, ei, new_bb->succs)
2163 {
2164 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2165 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2166 while (is_gimple_debug (gsi_stmt (ssi)))
2167 {
2168 gimple stmt = gsi_stmt (ssi), new_stmt;
2169 tree var;
2170 tree value;
2171
2172 /* For the last edge move the debug stmts instead of copying
2173 them. */
2174 if (ei_one_before_end_p (ei))
2175 {
2176 si = ssi;
2177 gsi_prev (&ssi);
2178 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2179 gimple_debug_bind_reset_value (stmt);
2180 gsi_remove (&si, false);
2181 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2182 continue;
2183 }
2184
2185 if (gimple_debug_bind_p (stmt))
2186 {
2187 var = gimple_debug_bind_get_var (stmt);
2188 if (single_pred_p (e->dest))
2189 {
2190 value = gimple_debug_bind_get_value (stmt);
2191 value = unshare_expr (value);
2192 }
2193 else
2194 value = NULL_TREE;
2195 new_stmt = gimple_build_debug_bind (var, value, stmt);
2196 }
2197 else if (gimple_debug_source_bind_p (stmt))
2198 {
2199 var = gimple_debug_source_bind_get_var (stmt);
2200 value = gimple_debug_source_bind_get_value (stmt);
2201 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2202 }
2203 else
2204 gcc_unreachable ();
2205 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2206 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2207 gsi_prev (&ssi);
2208 }
2209 }
2210}
2211
2212/* Make a copy of the body of FN so that it can be inserted inline in
2213 another function. Walks FN via CFG, returns new fndecl. */
2214
2215static tree
2216copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2217 basic_block entry_block_map, basic_block exit_block_map,
2218 bitmap blocks_to_copy, basic_block new_entry)
2219{
2220 tree callee_fndecl = id->src_fn;
2221 /* Original cfun for the callee, doesn't change. */
2222 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2223 struct function *cfun_to_copy;
2224 basic_block bb;
2225 tree new_fndecl = NULL;
2226 bool need_debug_cleanup = false;
2227 gcov_type count_scale;
2228 int last;
2229 int incoming_frequency = 0;
2230 gcov_type incoming_count = 0;
2231
2232 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2233 count_scale = (REG_BR_PROB_BASE * count
2234 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2235 else
2236 count_scale = REG_BR_PROB_BASE;
2237
2238 /* Register specific tree functions. */
2239 gimple_register_cfg_hooks ();
2240
2241 /* If we are inlining just region of the function, make sure to connect new entry
2242 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2243 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2244 probabilities of edges incoming from nonduplicated region. */
2245 if (new_entry)
2246 {
2247 edge e;
2248 edge_iterator ei;
2249
2250 FOR_EACH_EDGE (e, ei, new_entry->preds)
2251 if (!e->src->aux)
2252 {
2253 incoming_frequency += EDGE_FREQUENCY (e);
2254 incoming_count += e->count;
2255 }
2256 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2257 incoming_frequency
2258 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2259 ENTRY_BLOCK_PTR->count = incoming_count;
2260 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2261 }
2262
2263 /* Must have a CFG here at this point. */
2264 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2265 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2266
2267 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2268
2269 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2270 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2271 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2272 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2273
2274 /* Duplicate any exception-handling regions. */
2275 if (cfun->eh)
2276 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2277 remap_decl_1, id);
2278
2279 /* Use aux pointers to map the original blocks to copy. */
2280 FOR_EACH_BB_FN (bb, cfun_to_copy)
2281 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2282 {
2283 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2284 bb->aux = new_bb;
2285 new_bb->aux = bb;
2286 }
2287
2288 last = last_basic_block;
2289
2290 /* Now that we've duplicated the blocks, duplicate their edges. */
2291 FOR_ALL_BB_FN (bb, cfun_to_copy)
2292 if (!blocks_to_copy
2293 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2294 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2295
2296 if (new_entry)
2297 {
2298 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2299 e->probability = REG_BR_PROB_BASE;
2300 e->count = incoming_count;
2301 }
2302
2303 if (gimple_in_ssa_p (cfun))
2304 FOR_ALL_BB_FN (bb, cfun_to_copy)
2305 if (!blocks_to_copy
2306 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2307 copy_phis_for_bb (bb, id);
2308
2309 FOR_ALL_BB_FN (bb, cfun_to_copy)
2310 if (bb->aux)
2311 {
2312 if (need_debug_cleanup
2313 && bb->index != ENTRY_BLOCK
2314 && bb->index != EXIT_BLOCK)
2315 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2316 ((basic_block)bb->aux)->aux = NULL;
2317 bb->aux = NULL;
2318 }
2319
2320 /* Zero out AUX fields of newly created block during EH edge
2321 insertion. */
2322 for (; last < last_basic_block; last++)
2323 {
2324 if (need_debug_cleanup)
2325 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2326 BASIC_BLOCK (last)->aux = NULL;
2327 }
2328 entry_block_map->aux = NULL;
2329 exit_block_map->aux = NULL;
2330
2331 if (id->eh_map)
2332 {
2333 pointer_map_destroy (id->eh_map);
2334 id->eh_map = NULL;
2335 }
2336
2337 return new_fndecl;
2338}
2339
2340/* Copy the debug STMT using ID. We deal with these statements in a
2341 special way: if any variable in their VALUE expression wasn't
2342 remapped yet, we won't remap it, because that would get decl uids
2343 out of sync, causing codegen differences between -g and -g0. If
2344 this arises, we drop the VALUE expression altogether. */
2345
2346static void
2347copy_debug_stmt (gimple stmt, copy_body_data *id)
2348{
2349 tree t, *n;
2350 struct walk_stmt_info wi;
2351
2352 t = id->block;
2353 if (gimple_block (stmt))
2354 {
2355 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2356 if (n)
2357 t = *n;
2358 }
2359 gimple_set_block (stmt, t);
2360
2361 /* Remap all the operands in COPY. */
2362 memset (&wi, 0, sizeof (wi));
2363 wi.info = id;
2364
2365 processing_debug_stmt = 1;
2366
2367 if (gimple_debug_source_bind_p (stmt))
2368 t = gimple_debug_source_bind_get_var (stmt);
2369 else
2370 t = gimple_debug_bind_get_var (stmt);
2371
2372 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2373 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2374 {
2375 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2376 t = *n;
2377 }
2378 else if (TREE_CODE (t) == VAR_DECL
2379 && !TREE_STATIC (t)
2380 && gimple_in_ssa_p (cfun)
2381 && !pointer_map_contains (id->decl_map, t)
2382 && !var_ann (t))
2383 /* T is a non-localized variable. */;
2384 else
2385 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2386
2387 if (gimple_debug_bind_p (stmt))
2388 {
2389 gimple_debug_bind_set_var (stmt, t);
2390
2391 if (gimple_debug_bind_has_value_p (stmt))
2392 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2393 remap_gimple_op_r, &wi, NULL);
2394
2395 /* Punt if any decl couldn't be remapped. */
2396 if (processing_debug_stmt < 0)
2397 gimple_debug_bind_reset_value (stmt);
2398 }
2399 else if (gimple_debug_source_bind_p (stmt))
2400 {
2401 gimple_debug_source_bind_set_var (stmt, t);
2402 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2403 remap_gimple_op_r, &wi, NULL);
2404 }
2405
2406 processing_debug_stmt = 0;
2407
2408 update_stmt (stmt);
2409 if (gimple_in_ssa_p (cfun))
2410 mark_symbols_for_renaming (stmt);
2411}
2412
2413/* Process deferred debug stmts. In order to give values better odds
2414 of being successfully remapped, we delay the processing of debug
2415 stmts until all other stmts that might require remapping are
2416 processed. */
2417
2418static void
2419copy_debug_stmts (copy_body_data *id)
2420{
2421 size_t i;
2422 gimple stmt;
2423
2424 if (!id->debug_stmts)
2425 return;
2426
2427 FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
2428 copy_debug_stmt (stmt, id);
2429
2430 VEC_free (gimple, heap, id->debug_stmts);
2431}
2432
2433/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2434 another function. */
2435
2436static tree
2437copy_tree_body (copy_body_data *id)
2438{
2439 tree fndecl = id->src_fn;
2440 tree body = DECL_SAVED_TREE (fndecl);
2441
2442 walk_tree (&body, copy_tree_body_r, id, NULL);
2443
2444 return body;
2445}
2446
2447/* Make a copy of the body of FN so that it can be inserted inline in
2448 another function. */
2449
2450static tree
2451copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2452 basic_block entry_block_map, basic_block exit_block_map,
2453 bitmap blocks_to_copy, basic_block new_entry)
2454{
2455 tree fndecl = id->src_fn;
2456 tree body;
2457
2458 /* If this body has a CFG, walk CFG and copy. */
2459 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2460 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2461 blocks_to_copy, new_entry);
2462 copy_debug_stmts (id);
2463
2464 return body;
2465}
2466
2467/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2468 defined in function FN, or of a data member thereof. */
2469
2470static bool
2471self_inlining_addr_expr (tree value, tree fn)
2472{
2473 tree var;
2474
2475 if (TREE_CODE (value) != ADDR_EXPR)
2476 return false;
2477
2478 var = get_base_address (TREE_OPERAND (value, 0));
2479
2480 return var && auto_var_in_fn_p (var, fn);
2481}
2482
2483/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2484 lexical block and line number information from base_stmt, if given,
2485 or from the last stmt of the block otherwise. */
2486
2487static gimple
2488insert_init_debug_bind (copy_body_data *id,
2489 basic_block bb, tree var, tree value,
2490 gimple base_stmt)
2491{
2492 gimple note;
2493 gimple_stmt_iterator gsi;
2494 tree tracked_var;
2495
2496 if (!gimple_in_ssa_p (id->src_cfun))
2497 return NULL;
2498
2499 if (!MAY_HAVE_DEBUG_STMTS)
2500 return NULL;
2501
2502 tracked_var = target_for_debug_bind (var);
2503 if (!tracked_var)
2504 return NULL;
2505
2506 if (bb)
2507 {
2508 gsi = gsi_last_bb (bb);
2509 if (!base_stmt && !gsi_end_p (gsi))
2510 base_stmt = gsi_stmt (gsi);
2511 }
2512
2513 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2514
2515 if (bb)
2516 {
2517 if (!gsi_end_p (gsi))
2518 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2519 else
2520 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2521 }
2522
2523 return note;
2524}
2525
2526static void
2527insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2528{
2529 /* If VAR represents a zero-sized variable, it's possible that the
2530 assignment statement may result in no gimple statements. */
2531 if (init_stmt)
2532 {
2533 gimple_stmt_iterator si = gsi_last_bb (bb);
2534
2535 /* We can end up with init statements that store to a non-register
2536 from a rhs with a conversion. Handle that here by forcing the
2537 rhs into a temporary. gimple_regimplify_operands is not
2538 prepared to do this for us. */
2539 if (!is_gimple_debug (init_stmt)
2540 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2541 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2542 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2543 {
2544 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2545 gimple_expr_type (init_stmt),
2546 gimple_assign_rhs1 (init_stmt));
2547 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2548 GSI_NEW_STMT);
2549 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2550 gimple_assign_set_rhs1 (init_stmt, rhs);
2551 }
2552 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2553 gimple_regimplify_operands (init_stmt, &si);
2554 mark_symbols_for_renaming (init_stmt);
2555
2556 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2557 {
2558 tree var, def = gimple_assign_lhs (init_stmt);
2559
2560 if (TREE_CODE (def) == SSA_NAME)
2561 var = SSA_NAME_VAR (def);
2562 else
2563 var = def;
2564
2565 insert_init_debug_bind (id, bb, var, def, init_stmt);
2566 }
2567 }
2568}
2569
2570/* Initialize parameter P with VALUE. If needed, produce init statement
2571 at the end of BB. When BB is NULL, we return init statement to be
2572 output later. */
2573static gimple
2574setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2575 basic_block bb, tree *vars)
2576{
2577 gimple init_stmt = NULL;
2578 tree var;
2579 tree rhs = value;
2580 tree def = (gimple_in_ssa_p (cfun)
2581 ? gimple_default_def (id->src_cfun, p) : NULL);
2582
2583 if (value
2584 && value != error_mark_node
2585 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2586 {
2587 /* If we can match up types by promotion/demotion do so. */
2588 if (fold_convertible_p (TREE_TYPE (p), value))
2589 rhs = fold_convert (TREE_TYPE (p), value);
2590 else
2591 {
2592 /* ??? For valid programs we should not end up here.
2593 Still if we end up with truly mismatched types here, fall back
2594 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2595 GIMPLE to the following passes. */
2596 if (!is_gimple_reg_type (TREE_TYPE (value))
2597 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2598 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2599 else
2600 rhs = build_zero_cst (TREE_TYPE (p));
2601 }
2602 }
2603
2604 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2605 here since the type of this decl must be visible to the calling
2606 function. */
2607 var = copy_decl_to_var (p, id);
2608
2609 /* We're actually using the newly-created var. */
2610 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2611 add_referenced_var (var);
2612
2613 /* Declare this new variable. */
2614 DECL_CHAIN (var) = *vars;
2615 *vars = var;
2616
2617 /* Make gimplifier happy about this variable. */
2618 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2619
2620 /* We are eventually using the value - make sure all variables
2621 referenced therein are properly recorded. */
2622 if (value
2623 && gimple_in_ssa_p (cfun)
2624 && TREE_CODE (value) == ADDR_EXPR)
2625 {
2626 tree base = get_base_address (TREE_OPERAND (value, 0));
2627 if (base && TREE_CODE (base) == VAR_DECL)
2628 add_referenced_var (base);
2629 }
2630
2631 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2632 we would not need to create a new variable here at all, if it
2633 weren't for debug info. Still, we can just use the argument
2634 value. */
2635 if (TREE_READONLY (p)
2636 && !TREE_ADDRESSABLE (p)
2637 && value && !TREE_SIDE_EFFECTS (value)
2638 && !def)
2639 {
2640 /* We may produce non-gimple trees by adding NOPs or introduce
2641 invalid sharing when operand is not really constant.
2642 It is not big deal to prohibit constant propagation here as
2643 we will constant propagate in DOM1 pass anyway. */
2644 if (is_gimple_min_invariant (value)
2645 && useless_type_conversion_p (TREE_TYPE (p),
2646 TREE_TYPE (value))
2647 /* We have to be very careful about ADDR_EXPR. Make sure
2648 the base variable isn't a local variable of the inlined
2649 function, e.g., when doing recursive inlining, direct or
2650 mutually-recursive or whatever, which is why we don't
2651 just test whether fn == current_function_decl. */
2652 && ! self_inlining_addr_expr (value, fn))
2653 {
2654 insert_decl_map (id, p, value);
2655 insert_debug_decl_map (id, p, var);
2656 return insert_init_debug_bind (id, bb, var, value, NULL);
2657 }
2658 }
2659
2660 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2661 that way, when the PARM_DECL is encountered, it will be
2662 automatically replaced by the VAR_DECL. */
2663 insert_decl_map (id, p, var);
2664
2665 /* Even if P was TREE_READONLY, the new VAR should not be.
2666 In the original code, we would have constructed a
2667 temporary, and then the function body would have never
2668 changed the value of P. However, now, we will be
2669 constructing VAR directly. The constructor body may
2670 change its value multiple times as it is being
2671 constructed. Therefore, it must not be TREE_READONLY;
2672 the back-end assumes that TREE_READONLY variable is
2673 assigned to only once. */
2674 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2675 TREE_READONLY (var) = 0;
2676
2677 /* If there is no setup required and we are in SSA, take the easy route
2678 replacing all SSA names representing the function parameter by the
2679 SSA name passed to function.
2680
2681 We need to construct map for the variable anyway as it might be used
2682 in different SSA names when parameter is set in function.
2683
2684 Do replacement at -O0 for const arguments replaced by constant.
2685 This is important for builtin_constant_p and other construct requiring
2686 constant argument to be visible in inlined function body. */
2687 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2688 && (optimize
2689 || (TREE_READONLY (p)
2690 && is_gimple_min_invariant (rhs)))
2691 && (TREE_CODE (rhs) == SSA_NAME
2692 || is_gimple_min_invariant (rhs))
2693 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2694 {
2695 insert_decl_map (id, def, rhs);
2696 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2697 }
2698
2699 /* If the value of argument is never used, don't care about initializing
2700 it. */
2701 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2702 {
2703 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2704 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2705 }
2706
2707 /* Initialize this VAR_DECL from the equivalent argument. Convert
2708 the argument to the proper type in case it was promoted. */
2709 if (value)
2710 {
2711 if (rhs == error_mark_node)
2712 {
2713 insert_decl_map (id, p, var);
2714 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2715 }
2716
2717 STRIP_USELESS_TYPE_CONVERSION (rhs);
2718
2719 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2720 keep our trees in gimple form. */
2721 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2722 {
2723 def = remap_ssa_name (def, id);
2724 init_stmt = gimple_build_assign (def, rhs);
2725 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2726 set_default_def (var, NULL);
2727 }
2728 else
2729 init_stmt = gimple_build_assign (var, rhs);
2730
2731 if (bb && init_stmt)
2732 insert_init_stmt (id, bb, init_stmt);
2733 }
2734 return init_stmt;
2735}
2736
2737/* Generate code to initialize the parameters of the function at the
2738 top of the stack in ID from the GIMPLE_CALL STMT. */
2739
2740static void
2741initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2742 tree fn, basic_block bb)
2743{
2744 tree parms;
2745 size_t i;
2746 tree p;
2747 tree vars = NULL_TREE;
2748 tree static_chain = gimple_call_chain (stmt);
2749
2750 /* Figure out what the parameters are. */
2751 parms = DECL_ARGUMENTS (fn);
2752
2753 /* Loop through the parameter declarations, replacing each with an
2754 equivalent VAR_DECL, appropriately initialized. */
2755 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2756 {
2757 tree val;
2758 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2759 setup_one_parameter (id, p, val, fn, bb, &vars);
2760 }
2761 /* After remapping parameters remap their types. This has to be done
2762 in a second loop over all parameters to appropriately remap
2763 variable sized arrays when the size is specified in a
2764 parameter following the array. */
2765 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2766 {
2767 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2768 if (varp
2769 && TREE_CODE (*varp) == VAR_DECL)
2770 {
2771 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2772 ? gimple_default_def (id->src_cfun, p) : NULL);
2773 tree var = *varp;
2774 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2775 /* Also remap the default definition if it was remapped
2776 to the default definition of the parameter replacement
2777 by the parameter setup. */
2778 if (def)
2779 {
2780 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2781 if (defp
2782 && TREE_CODE (*defp) == SSA_NAME
2783 && SSA_NAME_VAR (*defp) == var)
2784 TREE_TYPE (*defp) = TREE_TYPE (var);
2785 }
2786 }
2787 }
2788
2789 /* Initialize the static chain. */
2790 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2791 gcc_assert (fn != current_function_decl);
2792 if (p)
2793 {
2794 /* No static chain? Seems like a bug in tree-nested.c. */
2795 gcc_assert (static_chain);
2796
2797 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2798 }
2799
2800 declare_inline_vars (id->block, vars);
2801}
2802
2803
2804/* Declare a return variable to replace the RESULT_DECL for the
2805 function we are calling. An appropriate DECL_STMT is returned.
2806 The USE_STMT is filled to contain a use of the declaration to
2807 indicate the return value of the function.
2808
2809 RETURN_SLOT, if non-null is place where to store the result. It
2810 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2811 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2812
2813 The return value is a (possibly null) value that holds the result
2814 as seen by the caller. */
2815
2816static tree
2817declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2818 basic_block entry_bb)
2819{
2820 tree callee = id->src_fn;
2821 tree result = DECL_RESULT (callee);
2822 tree callee_type = TREE_TYPE (result);
2823 tree caller_type;
2824 tree var, use;
2825
2826 /* Handle type-mismatches in the function declaration return type
2827 vs. the call expression. */
2828 if (modify_dest)
2829 caller_type = TREE_TYPE (modify_dest);
2830 else
2831 caller_type = TREE_TYPE (TREE_TYPE (callee));
2832
2833 /* We don't need to do anything for functions that don't return anything. */
2834 if (VOID_TYPE_P (callee_type))
2835 return NULL_TREE;
2836
2837 /* If there was a return slot, then the return value is the
2838 dereferenced address of that object. */
2839 if (return_slot)
2840 {
2841 /* The front end shouldn't have used both return_slot and
2842 a modify expression. */
2843 gcc_assert (!modify_dest);
2844 if (DECL_BY_REFERENCE (result))
2845 {
2846 tree return_slot_addr = build_fold_addr_expr (return_slot);
2847 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2848
2849 /* We are going to construct *&return_slot and we can't do that
2850 for variables believed to be not addressable.
2851
2852 FIXME: This check possibly can match, because values returned
2853 via return slot optimization are not believed to have address
2854 taken by alias analysis. */
2855 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2856 var = return_slot_addr;
2857 }
2858 else
2859 {
2860 var = return_slot;
2861 gcc_assert (TREE_CODE (var) != SSA_NAME);
2862 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2863 }
2864 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2865 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2866 && !DECL_GIMPLE_REG_P (result)
2867 && DECL_P (var))
2868 DECL_GIMPLE_REG_P (var) = 0;
2869 use = NULL;
2870 goto done;
2871 }
2872
2873 /* All types requiring non-trivial constructors should have been handled. */
2874 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2875
2876 /* Attempt to avoid creating a new temporary variable. */
2877 if (modify_dest
2878 && TREE_CODE (modify_dest) != SSA_NAME)
2879 {
2880 bool use_it = false;
2881
2882 /* We can't use MODIFY_DEST if there's type promotion involved. */
2883 if (!useless_type_conversion_p (callee_type, caller_type))
2884 use_it = false;
2885
2886 /* ??? If we're assigning to a variable sized type, then we must
2887 reuse the destination variable, because we've no good way to
2888 create variable sized temporaries at this point. */
2889 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2890 use_it = true;
2891
2892 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2893 reuse it as the result of the call directly. Don't do this if
2894 it would promote MODIFY_DEST to addressable. */
2895 else if (TREE_ADDRESSABLE (result))
2896 use_it = false;
2897 else
2898 {
2899 tree base_m = get_base_address (modify_dest);
2900
2901 /* If the base isn't a decl, then it's a pointer, and we don't
2902 know where that's going to go. */
2903 if (!DECL_P (base_m))
2904 use_it = false;
2905 else if (is_global_var (base_m))
2906 use_it = false;
2907 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2908 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2909 && !DECL_GIMPLE_REG_P (result)
2910 && DECL_GIMPLE_REG_P (base_m))
2911 use_it = false;
2912 else if (!TREE_ADDRESSABLE (base_m))
2913 use_it = true;
2914 }
2915
2916 if (use_it)
2917 {
2918 var = modify_dest;
2919 use = NULL;
2920 goto done;
2921 }
2922 }
2923
2924 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2925
2926 var = copy_result_decl_to_var (result, id);
2927 if (gimple_in_ssa_p (cfun))
2928 add_referenced_var (var);
2929
2930 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2931
2932 /* Do not have the rest of GCC warn about this variable as it should
2933 not be visible to the user. */
2934 TREE_NO_WARNING (var) = 1;
2935
2936 declare_inline_vars (id->block, var);
2937
2938 /* Build the use expr. If the return type of the function was
2939 promoted, convert it back to the expected type. */
2940 use = var;
2941 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2942 {
2943 /* If we can match up types by promotion/demotion do so. */
2944 if (fold_convertible_p (caller_type, var))
2945 use = fold_convert (caller_type, var);
2946 else
2947 {
2948 /* ??? For valid programs we should not end up here.
2949 Still if we end up with truly mismatched types here, fall back
2950 to using a MEM_REF to not leak invalid GIMPLE to the following
2951 passes. */
2952 /* Prevent var from being written into SSA form. */
2953 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
2954 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
2955 DECL_GIMPLE_REG_P (var) = false;
2956 else if (is_gimple_reg_type (TREE_TYPE (var)))
2957 TREE_ADDRESSABLE (var) = true;
2958 use = fold_build2 (MEM_REF, caller_type,
2959 build_fold_addr_expr (var),
2960 build_int_cst (ptr_type_node, 0));
2961 }
2962 }
2963
2964 STRIP_USELESS_TYPE_CONVERSION (use);
2965
2966 if (DECL_BY_REFERENCE (result))
2967 {
2968 TREE_ADDRESSABLE (var) = 1;
2969 var = build_fold_addr_expr (var);
2970 }
2971
2972 done:
2973 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2974 way, when the RESULT_DECL is encountered, it will be
2975 automatically replaced by the VAR_DECL.
2976
2977 When returning by reference, ensure that RESULT_DECL remaps to
2978 gimple_val. */
2979 if (DECL_BY_REFERENCE (result)
2980 && !is_gimple_val (var))
2981 {
2982 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2983 if (gimple_in_ssa_p (id->src_cfun))
2984 add_referenced_var (temp);
2985 insert_decl_map (id, result, temp);
2986 /* When RESULT_DECL is in SSA form, we need to use it's default_def
2987 SSA_NAME. */
2988 if (gimple_in_ssa_p (id->src_cfun) && gimple_default_def (id->src_cfun, result))
2989 temp = remap_ssa_name (gimple_default_def (id->src_cfun, result), id);
2990 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2991 }
2992 else
2993 insert_decl_map (id, result, var);
2994
2995 /* Remember this so we can ignore it in remap_decls. */
2996 id->retvar = var;
2997
2998 return use;
2999}
3000
3001/* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3002 to a local label. */
3003
3004static tree
3005has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3006{
3007 tree node = *nodep;
3008 tree fn = (tree) fnp;
3009
3010 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3011 return node;
3012
3013 if (TYPE_P (node))
3014 *walk_subtrees = 0;
3015
3016 return NULL_TREE;
3017}
3018
3019/* Determine if the function can be copied. If so return NULL. If
3020 not return a string describng the reason for failure. */
3021
3022static const char *
3023copy_forbidden (struct function *fun, tree fndecl)
3024{
3025 const char *reason = fun->cannot_be_copied_reason;
3026 tree decl;
3027 unsigned ix;
3028
3029 /* Only examine the function once. */
3030 if (fun->cannot_be_copied_set)
3031 return reason;
3032
3033 /* We cannot copy a function that receives a non-local goto
3034 because we cannot remap the destination label used in the
3035 function that is performing the non-local goto. */
3036 /* ??? Actually, this should be possible, if we work at it.
3037 No doubt there's just a handful of places that simply
3038 assume it doesn't happen and don't substitute properly. */
3039 if (fun->has_nonlocal_label)
3040 {
3041 reason = G_("function %q+F can never be copied "
3042 "because it receives a non-local goto");
3043 goto fail;
3044 }
3045
3046 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3047 if (TREE_CODE (decl) == VAR_DECL
3048 && TREE_STATIC (decl)
3049 && !DECL_EXTERNAL (decl)
3050 && DECL_INITIAL (decl)
3051 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3052 has_label_address_in_static_1,
3053 fndecl))
3054 {
3055 reason = G_("function %q+F can never be copied because it saves "
3056 "address of local label in a static variable");
3057 goto fail;
3058 }
3059
3060 fail:
3061 fun->cannot_be_copied_reason = reason;
3062 fun->cannot_be_copied_set = true;
3063 return reason;
3064}
3065
3066
3067static const char *inline_forbidden_reason;
3068
3069/* A callback for walk_gimple_seq to handle statements. Returns non-null
3070 iff a function can not be inlined. Also sets the reason why. */
3071
3072static tree
3073inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3074 struct walk_stmt_info *wip)
3075{
3076 tree fn = (tree) wip->info;
3077 tree t;
3078 gimple stmt = gsi_stmt (*gsi);
3079
3080 switch (gimple_code (stmt))
3081 {
3082 case GIMPLE_CALL:
3083 /* Refuse to inline alloca call unless user explicitly forced so as
3084 this may change program's memory overhead drastically when the
3085 function using alloca is called in loop. In GCC present in
3086 SPEC2000 inlining into schedule_block cause it to require 2GB of
3087 RAM instead of 256MB. Don't do so for alloca calls emitted for
3088 VLA objects as those can't cause unbounded growth (they're always
3089 wrapped inside stack_save/stack_restore regions. */
3090 if (gimple_alloca_call_p (stmt)
3091 && !gimple_call_alloca_for_var_p (stmt)
3092 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3093 {
3094 inline_forbidden_reason
3095 = G_("function %q+F can never be inlined because it uses "
3096 "alloca (override using the always_inline attribute)");
3097 *handled_ops_p = true;
3098 return fn;
3099 }
3100
3101 t = gimple_call_fndecl (stmt);
3102 if (t == NULL_TREE)
3103 break;
3104
3105 /* We cannot inline functions that call setjmp. */
3106 if (setjmp_call_p (t))
3107 {
3108 inline_forbidden_reason
3109 = G_("function %q+F can never be inlined because it uses setjmp");
3110 *handled_ops_p = true;
3111 return t;
3112 }
3113
3114 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3115 switch (DECL_FUNCTION_CODE (t))
3116 {
3117 /* We cannot inline functions that take a variable number of
3118 arguments. */
3119 case BUILT_IN_VA_START:
3120 case BUILT_IN_NEXT_ARG:
3121 case BUILT_IN_VA_END:
3122 inline_forbidden_reason
3123 = G_("function %q+F can never be inlined because it "
3124 "uses variable argument lists");
3125 *handled_ops_p = true;
3126 return t;
3127
3128 case BUILT_IN_LONGJMP:
3129 /* We can't inline functions that call __builtin_longjmp at
3130 all. The non-local goto machinery really requires the
3131 destination be in a different function. If we allow the
3132 function calling __builtin_longjmp to be inlined into the
3133 function calling __builtin_setjmp, Things will Go Awry. */
3134 inline_forbidden_reason
3135 = G_("function %q+F can never be inlined because "
3136 "it uses setjmp-longjmp exception handling");
3137 *handled_ops_p = true;
3138 return t;
3139
3140 case BUILT_IN_NONLOCAL_GOTO:
3141 /* Similarly. */
3142 inline_forbidden_reason
3143 = G_("function %q+F can never be inlined because "
3144 "it uses non-local goto");
3145 *handled_ops_p = true;
3146 return t;
3147
3148 case BUILT_IN_RETURN:
3149 case BUILT_IN_APPLY_ARGS:
3150 /* If a __builtin_apply_args caller would be inlined,
3151 it would be saving arguments of the function it has
3152 been inlined into. Similarly __builtin_return would
3153 return from the function the inline has been inlined into. */
3154 inline_forbidden_reason
3155 = G_("function %q+F can never be inlined because "
3156 "it uses __builtin_return or __builtin_apply_args");
3157 *handled_ops_p = true;
3158 return t;
3159
3160 default:
3161 break;
3162 }
3163 break;
3164
3165 case GIMPLE_GOTO:
3166 t = gimple_goto_dest (stmt);
3167
3168 /* We will not inline a function which uses computed goto. The
3169 addresses of its local labels, which may be tucked into
3170 global storage, are of course not constant across
3171 instantiations, which causes unexpected behavior. */
3172 if (TREE_CODE (t) != LABEL_DECL)
3173 {
3174 inline_forbidden_reason
3175 = G_("function %q+F can never be inlined "
3176 "because it contains a computed goto");
3177 *handled_ops_p = true;
3178 return t;
3179 }
3180 break;
3181
3182 default:
3183 break;
3184 }
3185
3186 *handled_ops_p = false;
3187 return NULL_TREE;
3188}
3189
3190/* Return true if FNDECL is a function that cannot be inlined into
3191 another one. */
3192
3193static bool
3194inline_forbidden_p (tree fndecl)
3195{
3196 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3197 struct walk_stmt_info wi;
3198 struct pointer_set_t *visited_nodes;
3199 basic_block bb;
3200 bool forbidden_p = false;
3201
3202 /* First check for shared reasons not to copy the code. */
3203 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3204 if (inline_forbidden_reason != NULL)
3205 return true;
3206
3207 /* Next, walk the statements of the function looking for
3208 constraucts we can't handle, or are non-optimal for inlining. */
3209 visited_nodes = pointer_set_create ();
3210 memset (&wi, 0, sizeof (wi));
3211 wi.info = (void *) fndecl;
3212 wi.pset = visited_nodes;
3213
3214 FOR_EACH_BB_FN (bb, fun)
3215 {
3216 gimple ret;
3217 gimple_seq seq = bb_seq (bb);
3218 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3219 forbidden_p = (ret != NULL);
3220 if (forbidden_p)
3221 break;
3222 }
3223
3224 pointer_set_destroy (visited_nodes);
3225 return forbidden_p;
3226}
3227
3228/* Returns nonzero if FN is a function that does not have any
3229 fundamental inline blocking properties. */
3230
3231bool
3232tree_inlinable_function_p (tree fn)
3233{
3234 bool inlinable = true;
3235 bool do_warning;
3236 tree always_inline;
3237
3238 /* If we've already decided this function shouldn't be inlined,
3239 there's no need to check again. */
3240 if (DECL_UNINLINABLE (fn))
3241 return false;
3242
3243 /* We only warn for functions declared `inline' by the user. */
3244 do_warning = (warn_inline
3245 && DECL_DECLARED_INLINE_P (fn)
3246 && !DECL_NO_INLINE_WARNING_P (fn)
3247 && !DECL_IN_SYSTEM_HEADER (fn));
3248
3249 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3250
3251 if (flag_no_inline
3252 && always_inline == NULL)
3253 {
3254 if (do_warning)
3255 warning (OPT_Winline, "function %q+F can never be inlined because it "
3256 "is suppressed using -fno-inline", fn);
3257 inlinable = false;
3258 }
3259
3260 else if (!function_attribute_inlinable_p (fn))
3261 {
3262 if (do_warning)
3263 warning (OPT_Winline, "function %q+F can never be inlined because it "
3264 "uses attributes conflicting with inlining", fn);
3265 inlinable = false;
3266 }
3267
3268 else if (inline_forbidden_p (fn))
3269 {
3270 /* See if we should warn about uninlinable functions. Previously,
3271 some of these warnings would be issued while trying to expand
3272 the function inline, but that would cause multiple warnings
3273 about functions that would for example call alloca. But since
3274 this a property of the function, just one warning is enough.
3275 As a bonus we can now give more details about the reason why a
3276 function is not inlinable. */
3277 if (always_inline)
3278 error (inline_forbidden_reason, fn);
3279 else if (do_warning)
3280 warning (OPT_Winline, inline_forbidden_reason, fn);
3281
3282 inlinable = false;
3283 }
3284
3285 /* Squirrel away the result so that we don't have to check again. */
3286 DECL_UNINLINABLE (fn) = !inlinable;
3287
3288 return inlinable;
3289}
3290
3291/* Estimate the cost of a memory move. Use machine dependent
3292 word size and take possible memcpy call into account. */
3293
3294int
3295estimate_move_cost (tree type)
3296{
3297 HOST_WIDE_INT size;
3298
3299 gcc_assert (!VOID_TYPE_P (type));
3300
3301 if (TREE_CODE (type) == VECTOR_TYPE)
3302 {
3303 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3304 enum machine_mode simd
3305 = targetm.vectorize.preferred_simd_mode (inner);
3306 int simd_mode_size = GET_MODE_SIZE (simd);
3307 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3308 / simd_mode_size);
3309 }
3310
3311 size = int_size_in_bytes (type);
3312
3313 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3314 /* Cost of a memcpy call, 3 arguments and the call. */
3315 return 4;
3316 else
3317 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3318}
3319
3320/* Returns cost of operation CODE, according to WEIGHTS */
3321
3322static int
3323estimate_operator_cost (enum tree_code code, eni_weights *weights,
3324 tree op1 ATTRIBUTE_UNUSED, tree op2)
3325{
3326 switch (code)
3327 {
3328 /* These are "free" conversions, or their presumed cost
3329 is folded into other operations. */
3330 case RANGE_EXPR:
3331 CASE_CONVERT:
3332 case COMPLEX_EXPR:
3333 case PAREN_EXPR:
3334 case VIEW_CONVERT_EXPR:
3335 return 0;
3336
3337 /* Assign cost of 1 to usual operations.
3338 ??? We may consider mapping RTL costs to this. */
3339 case COND_EXPR:
3340 case VEC_COND_EXPR:
3341 case VEC_PERM_EXPR:
3342
3343 case PLUS_EXPR:
3344 case POINTER_PLUS_EXPR:
3345 case MINUS_EXPR:
3346 case MULT_EXPR:
3347 case FMA_EXPR:
3348
3349 case ADDR_SPACE_CONVERT_EXPR:
3350 case FIXED_CONVERT_EXPR:
3351 case FIX_TRUNC_EXPR:
3352
3353 case NEGATE_EXPR:
3354 case FLOAT_EXPR:
3355 case MIN_EXPR:
3356 case MAX_EXPR:
3357 case ABS_EXPR:
3358
3359 case LSHIFT_EXPR:
3360 case RSHIFT_EXPR:
3361 case LROTATE_EXPR:
3362 case RROTATE_EXPR:
3363 case VEC_LSHIFT_EXPR:
3364 case VEC_RSHIFT_EXPR:
3365
3366 case BIT_IOR_EXPR:
3367 case BIT_XOR_EXPR:
3368 case BIT_AND_EXPR:
3369 case BIT_NOT_EXPR:
3370
3371 case TRUTH_ANDIF_EXPR:
3372 case TRUTH_ORIF_EXPR:
3373 case TRUTH_AND_EXPR:
3374 case TRUTH_OR_EXPR:
3375 case TRUTH_XOR_EXPR:
3376 case TRUTH_NOT_EXPR:
3377
3378 case LT_EXPR:
3379 case LE_EXPR:
3380 case GT_EXPR:
3381 case GE_EXPR:
3382 case EQ_EXPR:
3383 case NE_EXPR:
3384 case ORDERED_EXPR:
3385 case UNORDERED_EXPR:
3386
3387 case UNLT_EXPR:
3388 case UNLE_EXPR:
3389 case UNGT_EXPR:
3390 case UNGE_EXPR:
3391 case UNEQ_EXPR:
3392 case LTGT_EXPR:
3393
3394 case CONJ_EXPR:
3395
3396 case PREDECREMENT_EXPR:
3397 case PREINCREMENT_EXPR:
3398 case POSTDECREMENT_EXPR:
3399 case POSTINCREMENT_EXPR:
3400
3401 case REALIGN_LOAD_EXPR:
3402
3403 case REDUC_MAX_EXPR:
3404 case REDUC_MIN_EXPR:
3405 case REDUC_PLUS_EXPR:
3406 case WIDEN_SUM_EXPR:
3407 case WIDEN_MULT_EXPR:
3408 case DOT_PROD_EXPR:
3409 case WIDEN_MULT_PLUS_EXPR:
3410 case WIDEN_MULT_MINUS_EXPR:
3411 case WIDEN_LSHIFT_EXPR:
3412
3413 case VEC_WIDEN_MULT_HI_EXPR:
3414 case VEC_WIDEN_MULT_LO_EXPR:
3415 case VEC_UNPACK_HI_EXPR:
3416 case VEC_UNPACK_LO_EXPR:
3417 case VEC_UNPACK_FLOAT_HI_EXPR:
3418 case VEC_UNPACK_FLOAT_LO_EXPR:
3419 case VEC_PACK_TRUNC_EXPR:
3420 case VEC_PACK_SAT_EXPR:
3421 case VEC_PACK_FIX_TRUNC_EXPR:
3422 case VEC_WIDEN_LSHIFT_HI_EXPR:
3423 case VEC_WIDEN_LSHIFT_LO_EXPR:
3424
3425 return 1;
3426
3427 /* Few special cases of expensive operations. This is useful
3428 to avoid inlining on functions having too many of these. */
3429 case TRUNC_DIV_EXPR:
3430 case CEIL_DIV_EXPR:
3431 case FLOOR_DIV_EXPR:
3432 case ROUND_DIV_EXPR:
3433 case EXACT_DIV_EXPR:
3434 case TRUNC_MOD_EXPR:
3435 case CEIL_MOD_EXPR:
3436 case FLOOR_MOD_EXPR:
3437 case ROUND_MOD_EXPR:
3438 case RDIV_EXPR:
3439 if (TREE_CODE (op2) != INTEGER_CST)
3440 return weights->div_mod_cost;
3441 return 1;
3442
3443 default:
3444 /* We expect a copy assignment with no operator. */
3445 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3446 return 0;
3447 }
3448}
3449
3450
3451/* Estimate number of instructions that will be created by expanding
3452 the statements in the statement sequence STMTS.
3453 WEIGHTS contains weights attributed to various constructs. */
3454
3455static
3456int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3457{
3458 int cost;
3459 gimple_stmt_iterator gsi;
3460
3461 cost = 0;
3462 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3463 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3464
3465 return cost;
3466}
3467
3468
3469/* Estimate number of instructions that will be created by expanding STMT.
3470 WEIGHTS contains weights attributed to various constructs. */
3471
3472int
3473estimate_num_insns (gimple stmt, eni_weights *weights)
3474{
3475 unsigned cost, i;
3476 enum gimple_code code = gimple_code (stmt);
3477 tree lhs;
3478 tree rhs;
3479
3480 switch (code)
3481 {
3482 case GIMPLE_ASSIGN:
3483 /* Try to estimate the cost of assignments. We have three cases to
3484 deal with:
3485 1) Simple assignments to registers;
3486 2) Stores to things that must live in memory. This includes
3487 "normal" stores to scalars, but also assignments of large
3488 structures, or constructors of big arrays;
3489
3490 Let us look at the first two cases, assuming we have "a = b + C":
3491 <GIMPLE_ASSIGN <var_decl "a">
3492 <plus_expr <var_decl "b"> <constant C>>
3493 If "a" is a GIMPLE register, the assignment to it is free on almost
3494 any target, because "a" usually ends up in a real register. Hence
3495 the only cost of this expression comes from the PLUS_EXPR, and we
3496 can ignore the GIMPLE_ASSIGN.
3497 If "a" is not a GIMPLE register, the assignment to "a" will most
3498 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3499 of moving something into "a", which we compute using the function
3500 estimate_move_cost. */
3501 if (gimple_clobber_p (stmt))
3502 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3503
3504 lhs = gimple_assign_lhs (stmt);
3505 rhs = gimple_assign_rhs1 (stmt);
3506
3507 if (is_gimple_reg (lhs))
3508 cost = 0;
3509 else
3510 cost = estimate_move_cost (TREE_TYPE (lhs));
3511
3512 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3513 cost += estimate_move_cost (TREE_TYPE (rhs));
3514
3515 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3516 gimple_assign_rhs1 (stmt),
3517 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3518 == GIMPLE_BINARY_RHS
3519 ? gimple_assign_rhs2 (stmt) : NULL);
3520 break;
3521
3522 case GIMPLE_COND:
3523 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3524 gimple_op (stmt, 0),
3525 gimple_op (stmt, 1));
3526 break;
3527
3528 case GIMPLE_SWITCH:
3529 /* Take into account cost of the switch + guess 2 conditional jumps for
3530 each case label.
3531
3532 TODO: once the switch expansion logic is sufficiently separated, we can
3533 do better job on estimating cost of the switch. */
3534 if (weights->time_based)
3535 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3536 else
3537 cost = gimple_switch_num_labels (stmt) * 2;
3538 break;
3539
3540 case GIMPLE_CALL:
3541 {
3542 tree decl = gimple_call_fndecl (stmt);
3543 struct cgraph_node *node = NULL;
3544
3545 /* Do not special case builtins where we see the body.
3546 This just confuse inliner. */
3547 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3548 ;
3549 /* For buitins that are likely expanded to nothing or
3550 inlined do not account operand costs. */
3551 else if (is_simple_builtin (decl))
3552 return 0;
3553 else if (is_inexpensive_builtin (decl))
3554 return weights->target_builtin_call_cost;
3555 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3556 {
3557 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3558 specialize the cheap expansion we do here.
3559 ??? This asks for a more general solution. */
3560 switch (DECL_FUNCTION_CODE (decl))
3561 {
3562 case BUILT_IN_POW:
3563 case BUILT_IN_POWF:
3564 case BUILT_IN_POWL:
3565 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3566 && REAL_VALUES_EQUAL
3567 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3568 return estimate_operator_cost (MULT_EXPR, weights,
3569 gimple_call_arg (stmt, 0),
3570 gimple_call_arg (stmt, 0));
3571 break;
3572
3573 default:
3574 break;
3575 }
3576 }
3577
3578 cost = node ? weights->call_cost : weights->indirect_call_cost;
3579 if (gimple_call_lhs (stmt))
3580 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3581 for (i = 0; i < gimple_call_num_args (stmt); i++)
3582 {
3583 tree arg = gimple_call_arg (stmt, i);
3584 cost += estimate_move_cost (TREE_TYPE (arg));
3585 }
3586 break;
3587 }
3588
3589 case GIMPLE_RETURN:
3590 return weights->return_cost;
3591
3592 case GIMPLE_GOTO:
3593 case GIMPLE_LABEL:
3594 case GIMPLE_NOP:
3595 case GIMPLE_PHI:
3596 case GIMPLE_PREDICT:
3597 case GIMPLE_DEBUG:
3598 return 0;
3599
3600 case GIMPLE_ASM:
3601 return asm_str_count (gimple_asm_string (stmt));
3602
3603 case GIMPLE_RESX:
3604 /* This is either going to be an external function call with one
3605 argument, or two register copy statements plus a goto. */
3606 return 2;
3607
3608 case GIMPLE_EH_DISPATCH:
3609 /* ??? This is going to turn into a switch statement. Ideally
3610 we'd have a look at the eh region and estimate the number of
3611 edges involved. */
3612 return 10;
3613
3614 case GIMPLE_BIND:
3615 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3616
3617 case GIMPLE_EH_FILTER:
3618 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3619
3620 case GIMPLE_CATCH:
3621 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3622
3623 case GIMPLE_TRY:
3624 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3625 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3626
3627 /* OpenMP directives are generally very expensive. */
3628
3629 case GIMPLE_OMP_RETURN:
3630 case GIMPLE_OMP_SECTIONS_SWITCH:
3631 case GIMPLE_OMP_ATOMIC_STORE:
3632 case GIMPLE_OMP_CONTINUE:
3633 /* ...except these, which are cheap. */
3634 return 0;
3635
3636 case GIMPLE_OMP_ATOMIC_LOAD:
3637 return weights->omp_cost;
3638
3639 case GIMPLE_OMP_FOR:
3640 return (weights->omp_cost
3641 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3642 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3643
3644 case GIMPLE_OMP_PARALLEL:
3645 case GIMPLE_OMP_TASK:
3646 case GIMPLE_OMP_CRITICAL:
3647 case GIMPLE_OMP_MASTER:
3648 case GIMPLE_OMP_ORDERED:
3649 case GIMPLE_OMP_SECTION:
3650 case GIMPLE_OMP_SECTIONS:
3651 case GIMPLE_OMP_SINGLE:
3652 return (weights->omp_cost
3653 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3654
3655 case GIMPLE_TRANSACTION:
3656 return (weights->tm_cost
3657 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3658 weights));
3659
3660 default:
3661 gcc_unreachable ();
3662 }
3663
3664 return cost;
3665}
3666
3667/* Estimate number of instructions that will be created by expanding
3668 function FNDECL. WEIGHTS contains weights attributed to various
3669 constructs. */
3670
3671int
3672estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3673{
3674 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3675 gimple_stmt_iterator bsi;
3676 basic_block bb;
3677 int n = 0;
3678
3679 gcc_assert (my_function && my_function->cfg);
3680 FOR_EACH_BB_FN (bb, my_function)
3681 {
3682 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3683 n += estimate_num_insns (gsi_stmt (bsi), weights);
3684 }
3685
3686 return n;
3687}
3688
3689
3690/* Initializes weights used by estimate_num_insns. */
3691
3692void
3693init_inline_once (void)
3694{
3695 eni_size_weights.call_cost = 1;
3696 eni_size_weights.indirect_call_cost = 3;
3697 eni_size_weights.target_builtin_call_cost = 1;
3698 eni_size_weights.div_mod_cost = 1;
3699 eni_size_weights.omp_cost = 40;
3700 eni_size_weights.tm_cost = 10;
3701 eni_size_weights.time_based = false;
3702 eni_size_weights.return_cost = 1;
3703
3704 /* Estimating time for call is difficult, since we have no idea what the
3705 called function does. In the current uses of eni_time_weights,
3706 underestimating the cost does less harm than overestimating it, so
3707 we choose a rather small value here. */
3708 eni_time_weights.call_cost = 10;
3709 eni_time_weights.indirect_call_cost = 15;
3710 eni_time_weights.target_builtin_call_cost = 1;
3711 eni_time_weights.div_mod_cost = 10;
3712 eni_time_weights.omp_cost = 40;
3713 eni_time_weights.tm_cost = 40;
3714 eni_time_weights.time_based = true;
3715 eni_time_weights.return_cost = 2;
3716}
3717
3718/* Estimate the number of instructions in a gimple_seq. */
3719
3720int
3721count_insns_seq (gimple_seq seq, eni_weights *weights)
3722{
3723 gimple_stmt_iterator gsi;
3724 int n = 0;
3725 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3726 n += estimate_num_insns (gsi_stmt (gsi), weights);
3727
3728 return n;
3729}
3730
3731
3732/* Install new lexical TREE_BLOCK underneath 'current_block'. */
3733
3734static void
3735prepend_lexical_block (tree current_block, tree new_block)
3736{
3737 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3738 BLOCK_SUBBLOCKS (current_block) = new_block;
3739 BLOCK_SUPERCONTEXT (new_block) = current_block;
3740}
3741
3742/* Add local variables from CALLEE to CALLER. */
3743
3744static inline void
3745add_local_variables (struct function *callee, struct function *caller,
3746 copy_body_data *id, bool check_var_ann)
3747{
3748 tree var;
3749 unsigned ix;
3750
3751 FOR_EACH_LOCAL_DECL (callee, ix, var)
3752 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3753 {
3754 if (!check_var_ann
3755 || (var_ann (var) && add_referenced_var (var)))
3756 add_local_decl (caller, var);
3757 }
3758 else if (!can_be_nonlocal (var, id))
3759 {
3760 tree new_var = remap_decl (var, id);
3761
3762 /* Remap debug-expressions. */
3763 if (TREE_CODE (new_var) == VAR_DECL
3764 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3765 && new_var != var)
3766 {
3767 tree tem = DECL_DEBUG_EXPR (var);
3768 bool old_regimplify = id->regimplify;
3769 id->remapping_type_depth++;
3770 walk_tree (&tem, copy_tree_body_r, id, NULL);
3771 id->remapping_type_depth--;
3772 id->regimplify = old_regimplify;
3773 SET_DECL_DEBUG_EXPR (new_var, tem);
3774 }
3775 add_local_decl (caller, new_var);
3776 }
3777}
3778
3779/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3780
3781static bool
3782expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3783{
3784 tree use_retvar;
3785 tree fn;
3786 struct pointer_map_t *st, *dst;
3787 tree return_slot;
3788 tree modify_dest;
3789 location_t saved_location;
3790 struct cgraph_edge *cg_edge;
3791 cgraph_inline_failed_t reason;
3792 basic_block return_block;
3793 edge e;
3794 gimple_stmt_iterator gsi, stmt_gsi;
3795 bool successfully_inlined = FALSE;
3796 bool purge_dead_abnormal_edges;
3797
3798 /* Set input_location here so we get the right instantiation context
3799 if we call instantiate_decl from inlinable_function_p. */
3800 saved_location = input_location;
3801 if (gimple_has_location (stmt))
3802 input_location = gimple_location (stmt);
3803
3804 /* From here on, we're only interested in CALL_EXPRs. */
3805 if (gimple_code (stmt) != GIMPLE_CALL)
3806 goto egress;
3807
3808 cg_edge = cgraph_edge (id->dst_node, stmt);
3809 gcc_checking_assert (cg_edge);
3810 /* First, see if we can figure out what function is being called.
3811 If we cannot, then there is no hope of inlining the function. */
3812 if (cg_edge->indirect_unknown_callee)
3813 goto egress;
3814 fn = cg_edge->callee->decl;
3815 gcc_checking_assert (fn);
3816
3817 /* If FN is a declaration of a function in a nested scope that was
3818 globally declared inline, we don't set its DECL_INITIAL.
3819 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3820 C++ front-end uses it for cdtors to refer to their internal
3821 declarations, that are not real functions. Fortunately those
3822 don't have trees to be saved, so we can tell by checking their
3823 gimple_body. */
3824 if (!DECL_INITIAL (fn)
3825 && DECL_ABSTRACT_ORIGIN (fn)
3826 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3827 fn = DECL_ABSTRACT_ORIGIN (fn);
3828
3829 /* Don't try to inline functions that are not well-suited to inlining. */
3830 if (!cgraph_inline_p (cg_edge, &reason))
3831 {
3832 /* If this call was originally indirect, we do not want to emit any
3833 inlining related warnings or sorry messages because there are no
3834 guarantees regarding those. */
3835 if (cg_edge->indirect_inlining_edge)
3836 goto egress;
3837
3838 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3839 /* Avoid warnings during early inline pass. */
3840 && cgraph_global_info_ready
3841 /* PR 20090218-1_0.c. Body can be provided by another module. */
3842 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
3843 {
3844 error ("inlining failed in call to always_inline %q+F: %s", fn,
3845 cgraph_inline_failed_string (reason));
3846 error ("called from here");
3847 }
3848 else if (warn_inline
3849 && DECL_DECLARED_INLINE_P (fn)
3850 && !DECL_NO_INLINE_WARNING_P (fn)
3851 && !DECL_IN_SYSTEM_HEADER (fn)
3852 && reason != CIF_UNSPECIFIED
3853 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3854 /* Do not warn about not inlined recursive calls. */
3855 && !cgraph_edge_recursive_p (cg_edge)
3856 /* Avoid warnings during early inline pass. */
fdc4107c
JM
3857 && cgraph_global_info_ready
3858 && reason != CIF_UNLIKELY_CALL)
e4b17023
JM
3859 {
3860 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3861 fn, _(cgraph_inline_failed_string (reason)));
3862 warning (OPT_Winline, "called from here");
3863 }
3864 goto egress;
3865 }
3866 fn = cg_edge->callee->decl;
3867
3868#ifdef ENABLE_CHECKING
3869 if (cg_edge->callee->decl != id->dst_node->decl)
3870 verify_cgraph_node (cg_edge->callee);
3871#endif
3872
3873 /* We will be inlining this callee. */
3874 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3875
3876 /* Update the callers EH personality. */
3877 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3878 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3879 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3880
3881 /* Split the block holding the GIMPLE_CALL. */
3882 e = split_block (bb, stmt);
3883 bb = e->src;
3884 return_block = e->dest;
3885 remove_edge (e);
3886
3887 /* split_block splits after the statement; work around this by
3888 moving the call into the second block manually. Not pretty,
3889 but seems easier than doing the CFG manipulation by hand
3890 when the GIMPLE_CALL is in the last statement of BB. */
3891 stmt_gsi = gsi_last_bb (bb);