1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
47 /* Return an expression tree corresponding to the RHS of GIMPLE
51 gimple_assign_rhs_to_tree (gimple stmt)
54 enum gimple_rhs_class grhs_class;
56 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
58 if (grhs_class == GIMPLE_BINARY_RHS)
59 t = build2 (gimple_assign_rhs_code (stmt),
60 TREE_TYPE (gimple_assign_lhs (stmt)),
61 gimple_assign_rhs1 (stmt),
62 gimple_assign_rhs2 (stmt));
63 else if (grhs_class == GIMPLE_UNARY_RHS)
64 t = build1 (gimple_assign_rhs_code (stmt),
65 TREE_TYPE (gimple_assign_lhs (stmt)),
66 gimple_assign_rhs1 (stmt));
67 else if (grhs_class == GIMPLE_SINGLE_RHS)
68 t = gimple_assign_rhs1 (stmt);
75 /* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND
79 gimple_cond_pred_to_tree (gimple stmt)
81 return build2 (gimple_cond_code (stmt), boolean_type_node,
82 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
85 /* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression
86 inside *TP. DATA is the location to set. */
89 set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data)
91 location_t *loc = (location_t *) data;
93 SET_EXPR_LOCATION (*tp, *loc);
99 /* RTL expansion has traditionally been done on trees, so the
100 transition to doing it on GIMPLE tuples is very invasive to the RTL
101 expander. To facilitate the transition, this function takes a
102 GIMPLE tuple STMT and returns the same statement in the form of a
106 gimple_to_tree (gimple stmt)
110 tree_ann_common_t ann;
113 switch (gimple_code (stmt))
117 tree lhs = gimple_assign_lhs (stmt);
119 t = gimple_assign_rhs_to_tree (stmt);
120 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
121 if (gimple_assign_nontemporal_move_p (stmt))
122 MOVE_NONTEMPORAL (t) = true;
127 t = gimple_cond_pred_to_tree (stmt);
128 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
132 t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt));
136 t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt));
141 tree retval = gimple_return_retval (stmt);
143 if (retval && retval != error_mark_node)
145 tree result = DECL_RESULT (current_function_decl);
147 /* If we are not returning the current function's RESULT_DECL,
148 build an assignment to it. */
149 if (retval != result)
151 /* I believe that a function's RESULT_DECL is unique. */
152 gcc_assert (TREE_CODE (retval) != RESULT_DECL);
154 retval = build2 (MODIFY_EXPR, TREE_TYPE (result),
158 t = build1 (RETURN_EXPR, void_type_node, retval);
169 n = gimple_asm_noutputs (stmt);
172 t = out = gimple_asm_output_op (stmt, 0);
173 for (i = 1; i < n; i++)
175 TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
176 t = gimple_asm_output_op (stmt, i);
181 n = gimple_asm_ninputs (stmt);
184 t = in = gimple_asm_input_op (stmt, 0);
185 for (i = 1; i < n; i++)
187 TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
188 t = gimple_asm_input_op (stmt, i);
193 n = gimple_asm_nclobbers (stmt);
196 t = cl = gimple_asm_clobber_op (stmt, 0);
197 for (i = 1; i < n; i++)
199 TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
200 t = gimple_asm_clobber_op (stmt, i);
204 s = gimple_asm_string (stmt);
205 t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s),
207 ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt);
208 ASM_INPUT_P (t) = gimple_asm_input_p (stmt);
216 tree_ann_common_t ann;
218 t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
220 CALL_EXPR_FN (t) = gimple_call_fn (stmt);
221 TREE_TYPE (t) = gimple_call_return_type (stmt);
222 CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt);
224 for (i = 0; i < gimple_call_num_args (stmt); i++)
225 CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i);
227 if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
228 TREE_SIDE_EFFECTS (t) = 1;
230 if (gimple_call_flags (stmt) & ECF_NOTHROW)
231 TREE_NOTHROW (t) = 1;
233 CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt);
234 CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt);
235 CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt);
236 CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt);
237 CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt);
239 /* If the call has a LHS then create a MODIFY_EXPR to hold it. */
241 tree lhs = gimple_call_lhs (stmt);
244 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
247 /* Record the original call statement, as it may be used
248 to retrieve profile information during expansion. */
250 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
251 && DECL_BUILT_IN (fn))
253 ann = get_tree_common_ann (t);
263 tree elt = gimple_switch_label (stmt, 0);
265 label_vec = make_tree_vec (gimple_switch_num_labels (stmt));
267 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
269 for (i = 1; i < gimple_switch_num_labels (stmt); i++)
270 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i);
272 /* The default case in a SWITCH_EXPR must be at the end of
274 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0);
278 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
279 TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i);
282 t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt),
289 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
293 t = build_resx (gimple_resx_region (stmt));
299 error ("Unrecognized GIMPLE statement during RTL expansion");
300 print_gimple_stmt (stderr, stmt, 4, 0);
305 /* Ignore any bad gimple codes if we're going to die anyhow,
306 so we can at least set TREE_ASM_WRITTEN and have the rest
307 of compilation advance without sudden ICE death. */
308 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
313 /* If STMT is inside an exception region, record it in the generated
315 rn = lookup_stmt_eh_region (stmt);
318 tree call = get_call_expr_in (t);
320 ann = get_tree_common_ann (t);
323 /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up
324 the CALL_EXPR not the assignment statment for EH region number. */
325 if (call && call != t)
327 ann = get_tree_common_ann (call);
332 /* Set EXPR_LOCATION in all the embedded expressions. */
333 loc = gimple_location (stmt);
334 walk_tree (&t, set_expr_location_r, (void *) &loc, NULL);
336 TREE_BLOCK (t) = gimple_block (stmt);
342 /* Release back to GC memory allocated by gimple_to_tree. */
345 release_stmt_tree (gimple stmt, tree stmt_tree)
347 tree_ann_common_t ann;
349 switch (gimple_code (stmt))
352 if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS)
353 ggc_free (TREE_OPERAND (stmt_tree, 1));
356 ggc_free (COND_EXPR_COND (stmt_tree));
359 if (TREE_OPERAND (stmt_tree, 0)
360 && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR)
361 ggc_free (TREE_OPERAND (stmt_tree, 0));
364 if (gimple_call_lhs (stmt))
366 ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1));
369 ggc_free (TREE_OPERAND (stmt_tree, 1));
375 ann = tree_common_ann (stmt_tree);
378 ggc_free (stmt_tree);
382 /* Verify that there is exactly single jump instruction since last and attach
383 REG_BR_PROB note specifying probability.
384 ??? We really ought to pass the probability down to RTL expanders and let it
385 re-distribute it when the conditional expands into multiple conditionals.
386 This is however difficult to do. */
388 add_reg_br_prob_note (rtx last, int probability)
390 if (profile_status == PROFILE_ABSENT)
392 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
395 /* It is common to emit condjump-around-jump sequence when we don't know
396 how to reverse the conditional. Special case this. */
397 if (!any_condjump_p (last)
398 || !JUMP_P (NEXT_INSN (last))
399 || !simplejump_p (NEXT_INSN (last))
400 || !NEXT_INSN (NEXT_INSN (last))
401 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
402 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
403 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
404 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
406 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
407 add_reg_note (last, REG_BR_PROB,
408 GEN_INT (REG_BR_PROB_BASE - probability));
411 if (!last || !JUMP_P (last) || !any_condjump_p (last))
413 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
414 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
418 fprintf (dump_file, "Failed to add probability note\n");
422 #ifndef STACK_ALIGNMENT_NEEDED
423 #define STACK_ALIGNMENT_NEEDED 1
427 /* This structure holds data relevant to one variable that will be
428 placed in a stack slot. */
434 /* The offset of the variable. During partitioning, this is the
435 offset relative to the partition. After partitioning, this
436 is relative to the stack frame. */
437 HOST_WIDE_INT offset;
439 /* Initially, the size of the variable. Later, the size of the partition,
440 if this variable becomes it's partition's representative. */
443 /* The *byte* alignment required for this variable. Or as, with the
444 size, the alignment for this partition. */
447 /* The partition representative. */
448 size_t representative;
450 /* The next stack variable in the partition, or EOC. */
454 #define EOC ((size_t)-1)
456 /* We have an array of such objects while deciding allocation. */
457 static struct stack_var *stack_vars;
458 static size_t stack_vars_alloc;
459 static size_t stack_vars_num;
461 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
462 is non-decreasing. */
463 static size_t *stack_vars_sorted;
465 /* We have an interference graph between such objects. This graph
466 is lower triangular. */
467 static bool *stack_vars_conflict;
468 static size_t stack_vars_conflict_alloc;
470 /* The phase of the stack frame. This is the known misalignment of
471 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
472 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
473 static int frame_phase;
475 /* Used during expand_used_vars to remember if we saw any decls for
476 which we'd like to enable stack smashing protection. */
477 static bool has_protected_decls;
479 /* Used during expand_used_vars. Remember if we say a character buffer
480 smaller than our cutoff threshold. Used for -Wstack-protector. */
481 static bool has_short_buffer;
483 /* Discover the byte alignment to use for DECL. Ignore alignment
484 we can't do with expected alignment of the stack boundary. */
487 get_decl_align_unit (tree decl)
491 align = LOCAL_DECL_ALIGNMENT (decl);
493 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
494 align = MAX_SUPPORTED_STACK_ALIGNMENT;
496 if (SUPPORTS_STACK_ALIGNMENT)
498 if (crtl->stack_alignment_estimated < align)
500 gcc_assert(!crtl->stack_realign_processed);
501 crtl->stack_alignment_estimated = align;
505 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
506 So here we only make sure stack_alignment_needed >= align. */
507 if (crtl->stack_alignment_needed < align)
508 crtl->stack_alignment_needed = align;
509 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
510 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
512 return align / BITS_PER_UNIT;
515 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
516 Return the frame offset. */
519 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
521 HOST_WIDE_INT offset, new_frame_offset;
523 new_frame_offset = frame_offset;
524 if (FRAME_GROWS_DOWNWARD)
526 new_frame_offset -= size + frame_phase;
527 new_frame_offset &= -align;
528 new_frame_offset += frame_phase;
529 offset = new_frame_offset;
533 new_frame_offset -= frame_phase;
534 new_frame_offset += align - 1;
535 new_frame_offset &= -align;
536 new_frame_offset += frame_phase;
537 offset = new_frame_offset;
538 new_frame_offset += size;
540 frame_offset = new_frame_offset;
542 if (frame_offset_overflow (frame_offset, cfun->decl))
543 frame_offset = offset = 0;
548 /* Accumulate DECL into STACK_VARS. */
551 add_stack_var (tree decl)
553 if (stack_vars_num >= stack_vars_alloc)
555 if (stack_vars_alloc)
556 stack_vars_alloc = stack_vars_alloc * 3 / 2;
558 stack_vars_alloc = 32;
560 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
562 stack_vars[stack_vars_num].decl = decl;
563 stack_vars[stack_vars_num].offset = 0;
564 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
565 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
567 /* All variables are initially in their own partition. */
568 stack_vars[stack_vars_num].representative = stack_vars_num;
569 stack_vars[stack_vars_num].next = EOC;
571 /* Ensure that this decl doesn't get put onto the list twice. */
572 SET_DECL_RTL (decl, pc_rtx);
577 /* Compute the linear index of a lower-triangular coordinate (I, J). */
580 triangular_index (size_t i, size_t j)
587 return (i * (i + 1)) / 2 + j;
590 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
593 resize_stack_vars_conflict (size_t n)
595 size_t size = triangular_index (n-1, n-1) + 1;
597 if (size <= stack_vars_conflict_alloc)
600 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
601 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
602 (size - stack_vars_conflict_alloc) * sizeof (bool));
603 stack_vars_conflict_alloc = size;
606 /* Make the decls associated with luid's X and Y conflict. */
609 add_stack_var_conflict (size_t x, size_t y)
611 size_t index = triangular_index (x, y);
612 gcc_assert (index < stack_vars_conflict_alloc);
613 stack_vars_conflict[index] = true;
616 /* Check whether the decls associated with luid's X and Y conflict. */
619 stack_var_conflict_p (size_t x, size_t y)
621 size_t index = triangular_index (x, y);
622 gcc_assert (index < stack_vars_conflict_alloc);
623 return stack_vars_conflict[index];
626 /* Returns true if TYPE is or contains a union type. */
629 aggregate_contains_union_type (tree type)
633 if (TREE_CODE (type) == UNION_TYPE
634 || TREE_CODE (type) == QUAL_UNION_TYPE)
636 if (TREE_CODE (type) == ARRAY_TYPE)
637 return aggregate_contains_union_type (TREE_TYPE (type));
638 if (TREE_CODE (type) != RECORD_TYPE)
641 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
642 if (TREE_CODE (field) == FIELD_DECL)
643 if (aggregate_contains_union_type (TREE_TYPE (field)))
649 /* A subroutine of expand_used_vars. If two variables X and Y have alias
650 sets that do not conflict, then do add a conflict for these variables
651 in the interference graph. We also need to make sure to add conflicts
652 for union containing structures. Else RTL alias analysis comes along
653 and due to type based aliasing rules decides that for two overlapping
654 union temporaries { short s; int i; } accesses to the same mem through
655 different types may not alias and happily reorders stores across
656 life-time boundaries of the temporaries (See PR25654).
657 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
660 add_alias_set_conflicts (void)
662 size_t i, j, n = stack_vars_num;
664 for (i = 0; i < n; ++i)
666 tree type_i = TREE_TYPE (stack_vars[i].decl);
667 bool aggr_i = AGGREGATE_TYPE_P (type_i);
670 contains_union = aggregate_contains_union_type (type_i);
671 for (j = 0; j < i; ++j)
673 tree type_j = TREE_TYPE (stack_vars[j].decl);
674 bool aggr_j = AGGREGATE_TYPE_P (type_j);
676 /* Either the objects conflict by means of type based
677 aliasing rules, or we need to add a conflict. */
678 || !objects_must_conflict_p (type_i, type_j)
679 /* In case the types do not conflict ensure that access
680 to elements will conflict. In case of unions we have
681 to be careful as type based aliasing rules may say
682 access to the same memory does not conflict. So play
683 safe and add a conflict in this case. */
685 add_stack_var_conflict (i, j);
690 /* A subroutine of partition_stack_vars. A comparison function for qsort,
691 sorting an array of indices by the size of the object. */
694 stack_var_size_cmp (const void *a, const void *b)
696 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
697 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
698 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
699 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
705 /* For stack variables of the same size use the uid of the decl
706 to make the sort stable. */
714 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
715 partitioning algorithm. Partitions A and B are known to be non-conflicting.
716 Merge them into a single partition A.
718 At the same time, add OFFSET to all variables in partition B. At the end
719 of the partitioning process we've have a nice block easy to lay out within
723 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
727 /* Update each element of partition B with the given offset,
728 and merge them into partition A. */
729 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
731 stack_vars[i].offset += offset;
732 stack_vars[i].representative = a;
734 stack_vars[last].next = stack_vars[a].next;
735 stack_vars[a].next = b;
737 /* Update the required alignment of partition A to account for B. */
738 if (stack_vars[a].alignb < stack_vars[b].alignb)
739 stack_vars[a].alignb = stack_vars[b].alignb;
741 /* Update the interference graph and merge the conflicts. */
742 for (last = stack_vars_num, i = 0; i < last; ++i)
743 if (stack_var_conflict_p (b, i))
744 add_stack_var_conflict (a, i);
747 /* A subroutine of expand_used_vars. Binpack the variables into
748 partitions constrained by the interference graph. The overall
749 algorithm used is as follows:
751 Sort the objects by size.
756 Look for the largest non-conflicting object B with size <= S.
766 partition_stack_vars (void)
768 size_t si, sj, n = stack_vars_num;
770 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
771 for (si = 0; si < n; ++si)
772 stack_vars_sorted[si] = si;
777 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
779 /* Special case: detect when all variables conflict, and thus we can't
780 do anything during the partitioning loop. It isn't uncommon (with
781 C code at least) to declare all variables at the top of the function,
782 and if we're not inlining, then all variables will be in the same scope.
783 Take advantage of very fast libc routines for this scan. */
784 gcc_assert (sizeof(bool) == sizeof(char));
785 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
788 for (si = 0; si < n; ++si)
790 size_t i = stack_vars_sorted[si];
791 HOST_WIDE_INT isize = stack_vars[i].size;
792 HOST_WIDE_INT offset = 0;
794 for (sj = si; sj-- > 0; )
796 size_t j = stack_vars_sorted[sj];
797 HOST_WIDE_INT jsize = stack_vars[j].size;
798 unsigned int jalign = stack_vars[j].alignb;
800 /* Ignore objects that aren't partition representatives. */
801 if (stack_vars[j].representative != j)
804 /* Ignore objects too large for the remaining space. */
808 /* Ignore conflicting objects. */
809 if (stack_var_conflict_p (i, j))
812 /* Refine the remaining space check to include alignment. */
813 if (offset & (jalign - 1))
815 HOST_WIDE_INT toff = offset;
817 toff &= -(HOST_WIDE_INT)jalign;
818 if (isize - (toff - offset) < jsize)
821 isize -= toff - offset;
825 /* UNION the objects, placing J at OFFSET. */
826 union_stack_vars (i, j, offset);
835 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
838 dump_stack_var_partition (void)
840 size_t si, i, j, n = stack_vars_num;
842 for (si = 0; si < n; ++si)
844 i = stack_vars_sorted[si];
846 /* Skip variables that aren't partition representatives, for now. */
847 if (stack_vars[i].representative != i)
850 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
851 " align %u\n", (unsigned long) i, stack_vars[i].size,
852 stack_vars[i].alignb);
854 for (j = i; j != EOC; j = stack_vars[j].next)
856 fputc ('\t', dump_file);
857 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
858 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
859 stack_vars[j].offset);
864 /* Assign rtl to DECL at frame offset OFFSET. */
867 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
872 /* If this fails, we've overflowed the stack frame. Error nicely? */
873 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
875 x = plus_constant (virtual_stack_vars_rtx, offset);
876 x = gen_rtx_MEM (DECL_MODE (decl), x);
878 /* Set alignment we actually gave this decl. */
879 offset -= frame_phase;
880 align = offset & -offset;
881 align *= BITS_PER_UNIT;
882 if (align > STACK_BOUNDARY || align == 0)
883 align = STACK_BOUNDARY;
884 DECL_ALIGN (decl) = align;
885 DECL_USER_ALIGN (decl) = 0;
887 set_mem_attributes (x, decl, true);
888 SET_DECL_RTL (decl, x);
891 /* A subroutine of expand_used_vars. Give each partition representative
892 a unique location within the stack frame. Update each partition member
893 with that location. */
896 expand_stack_vars (bool (*pred) (tree))
898 size_t si, i, j, n = stack_vars_num;
900 for (si = 0; si < n; ++si)
902 HOST_WIDE_INT offset;
904 i = stack_vars_sorted[si];
906 /* Skip variables that aren't partition representatives, for now. */
907 if (stack_vars[i].representative != i)
910 /* Skip variables that have already had rtl assigned. See also
911 add_stack_var where we perpetrate this pc_rtx hack. */
912 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
915 /* Check the predicate to see whether this variable should be
916 allocated in this pass. */
917 if (pred && !pred (stack_vars[i].decl))
920 offset = alloc_stack_frame_space (stack_vars[i].size,
921 stack_vars[i].alignb);
923 /* Create rtl for each variable based on their location within the
925 for (j = i; j != EOC; j = stack_vars[j].next)
927 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
928 expand_one_stack_var_at (stack_vars[j].decl,
929 stack_vars[j].offset + offset);
934 /* Take into account all sizes of partitions and reset DECL_RTLs. */
936 account_stack_vars (void)
938 size_t si, j, i, n = stack_vars_num;
939 HOST_WIDE_INT size = 0;
941 for (si = 0; si < n; ++si)
943 i = stack_vars_sorted[si];
945 /* Skip variables that aren't partition representatives, for now. */
946 if (stack_vars[i].representative != i)
949 size += stack_vars[i].size;
950 for (j = i; j != EOC; j = stack_vars[j].next)
951 SET_DECL_RTL (stack_vars[j].decl, NULL);
956 /* A subroutine of expand_one_var. Called to immediately assign rtl
957 to a variable to be allocated in the stack frame. */
960 expand_one_stack_var (tree var)
962 HOST_WIDE_INT size, offset, align;
964 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
965 align = get_decl_align_unit (var);
966 offset = alloc_stack_frame_space (size, align);
968 expand_one_stack_var_at (var, offset);
971 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
972 that will reside in a hard register. */
975 expand_one_hard_reg_var (tree var)
977 rest_of_decl_compilation (var, 0, 0);
980 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
981 that will reside in a pseudo register. */
984 expand_one_register_var (tree var)
986 tree type = TREE_TYPE (var);
987 int unsignedp = TYPE_UNSIGNED (type);
988 enum machine_mode reg_mode
989 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
990 rtx x = gen_reg_rtx (reg_mode);
992 SET_DECL_RTL (var, x);
994 /* Note if the object is a user variable. */
995 if (!DECL_ARTIFICIAL (var))
998 if (POINTER_TYPE_P (type))
999 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
1002 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1003 has some associated error, e.g. its type is error-mark. We just need
1004 to pick something that won't crash the rest of the compiler. */
1007 expand_one_error_var (tree var)
1009 enum machine_mode mode = DECL_MODE (var);
1012 if (mode == BLKmode)
1013 x = gen_rtx_MEM (BLKmode, const0_rtx);
1014 else if (mode == VOIDmode)
1017 x = gen_reg_rtx (mode);
1019 SET_DECL_RTL (var, x);
1022 /* A subroutine of expand_one_var. VAR is a variable that will be
1023 allocated to the local stack frame. Return true if we wish to
1024 add VAR to STACK_VARS so that it will be coalesced with other
1025 variables. Return false to allocate VAR immediately.
1027 This function is used to reduce the number of variables considered
1028 for coalescing, which reduces the size of the quadratic problem. */
1031 defer_stack_allocation (tree var, bool toplevel)
1033 /* If stack protection is enabled, *all* stack variables must be deferred,
1034 so that we can re-order the strings to the top of the frame. */
1035 if (flag_stack_protect)
1038 /* Variables in the outermost scope automatically conflict with
1039 every other variable. The only reason to want to defer them
1040 at all is that, after sorting, we can more efficiently pack
1041 small variables in the stack frame. Continue to defer at -O2. */
1042 if (toplevel && optimize < 2)
1045 /* Without optimization, *most* variables are allocated from the
1046 stack, which makes the quadratic problem large exactly when we
1047 want compilation to proceed as quickly as possible. On the
1048 other hand, we don't want the function's stack frame size to
1049 get completely out of hand. So we avoid adding scalars and
1050 "small" aggregates to the list at all. */
1051 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1057 /* A subroutine of expand_used_vars. Expand one variable according to
1058 its flavor. Variables to be placed on the stack are not actually
1059 expanded yet, merely recorded.
1060 When REALLY_EXPAND is false, only add stack values to be allocated.
1061 Return stack usage this variable is supposed to take.
1064 static HOST_WIDE_INT
1065 expand_one_var (tree var, bool toplevel, bool really_expand)
1067 if (SUPPORTS_STACK_ALIGNMENT
1068 && TREE_TYPE (var) != error_mark_node
1069 && TREE_CODE (var) == VAR_DECL)
1073 /* Because we don't know if VAR will be in register or on stack,
1074 we conservatively assume it will be on stack even if VAR is
1075 eventually put into register after RA pass. For non-automatic
1076 variables, which won't be on stack, we collect alignment of
1077 type and ignore user specified alignment. */
1078 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1079 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1080 TYPE_MODE (TREE_TYPE (var)),
1081 TYPE_ALIGN (TREE_TYPE (var)));
1083 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1085 if (crtl->stack_alignment_estimated < align)
1087 /* stack_alignment_estimated shouldn't change after stack
1088 realign decision made */
1089 gcc_assert(!crtl->stack_realign_processed);
1090 crtl->stack_alignment_estimated = align;
1094 if (TREE_CODE (var) != VAR_DECL)
1096 else if (DECL_EXTERNAL (var))
1098 else if (DECL_HAS_VALUE_EXPR_P (var))
1100 else if (TREE_STATIC (var))
1102 else if (DECL_RTL_SET_P (var))
1104 else if (TREE_TYPE (var) == error_mark_node)
1107 expand_one_error_var (var);
1109 else if (DECL_HARD_REGISTER (var))
1112 expand_one_hard_reg_var (var);
1114 else if (use_register_for_decl (var))
1117 expand_one_register_var (var);
1119 else if (defer_stack_allocation (var, toplevel))
1120 add_stack_var (var);
1124 expand_one_stack_var (var);
1125 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1130 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1131 expanding variables. Those variables that can be put into registers
1132 are allocated pseudos; those that can't are put on the stack.
1134 TOPLEVEL is true if this is the outermost BLOCK. */
1137 expand_used_vars_for_block (tree block, bool toplevel)
1139 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1142 old_sv_num = toplevel ? 0 : stack_vars_num;
1144 /* Expand all variables at this level. */
1145 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1147 expand_one_var (t, toplevel, true);
1149 this_sv_num = stack_vars_num;
1151 /* Expand all variables at containing levels. */
1152 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1153 expand_used_vars_for_block (t, false);
1155 /* Since we do not track exact variable lifetimes (which is not even
1156 possible for variables whose address escapes), we mirror the block
1157 tree in the interference graph. Here we cause all variables at this
1158 level, and all sublevels, to conflict. Do make certain that a
1159 variable conflicts with itself. */
1160 if (old_sv_num < this_sv_num)
1162 new_sv_num = stack_vars_num;
1163 resize_stack_vars_conflict (new_sv_num);
1165 for (i = old_sv_num; i < new_sv_num; ++i)
1166 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1167 add_stack_var_conflict (i, j);
1171 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1172 and clear TREE_USED on all local variables. */
1175 clear_tree_used (tree block)
1179 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1180 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1183 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1184 clear_tree_used (t);
1187 /* Examine TYPE and determine a bit mask of the following features. */
1189 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1190 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1191 #define SPCT_HAS_ARRAY 4
1192 #define SPCT_HAS_AGGREGATE 8
1195 stack_protect_classify_type (tree type)
1197 unsigned int ret = 0;
1200 switch (TREE_CODE (type))
1203 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1204 if (t == char_type_node
1205 || t == signed_char_type_node
1206 || t == unsigned_char_type_node)
1208 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1209 unsigned HOST_WIDE_INT len;
1211 if (!TYPE_SIZE_UNIT (type)
1212 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1215 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1218 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1220 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1223 ret = SPCT_HAS_ARRAY;
1227 case QUAL_UNION_TYPE:
1229 ret = SPCT_HAS_AGGREGATE;
1230 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1231 if (TREE_CODE (t) == FIELD_DECL)
1232 ret |= stack_protect_classify_type (TREE_TYPE (t));
1242 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1243 part of the local stack frame. Remember if we ever return nonzero for
1244 any variable in this function. The return value is the phase number in
1245 which the variable should be allocated. */
1248 stack_protect_decl_phase (tree decl)
1250 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1253 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1254 has_short_buffer = true;
1256 if (flag_stack_protect == 2)
1258 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1259 && !(bits & SPCT_HAS_AGGREGATE))
1261 else if (bits & SPCT_HAS_ARRAY)
1265 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1268 has_protected_decls = true;
1273 /* Two helper routines that check for phase 1 and phase 2. These are used
1274 as callbacks for expand_stack_vars. */
1277 stack_protect_decl_phase_1 (tree decl)
1279 return stack_protect_decl_phase (decl) == 1;
1283 stack_protect_decl_phase_2 (tree decl)
1285 return stack_protect_decl_phase (decl) == 2;
1288 /* Ensure that variables in different stack protection phases conflict
1289 so that they are not merged and share the same stack slot. */
1292 add_stack_protection_conflicts (void)
1294 size_t i, j, n = stack_vars_num;
1295 unsigned char *phase;
1297 phase = XNEWVEC (unsigned char, n);
1298 for (i = 0; i < n; ++i)
1299 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1301 for (i = 0; i < n; ++i)
1303 unsigned char ph_i = phase[i];
1304 for (j = 0; j < i; ++j)
1305 if (ph_i != phase[j])
1306 add_stack_var_conflict (i, j);
1312 /* Create a decl for the guard at the top of the stack frame. */
1315 create_stack_guard (void)
1317 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
1318 TREE_THIS_VOLATILE (guard) = 1;
1319 TREE_USED (guard) = 1;
1320 expand_one_stack_var (guard);
1321 crtl->stack_protect_guard = guard;
1324 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1325 expanding variables. Those variables that can be put into registers
1326 are allocated pseudos; those that can't are put on the stack.
1328 TOPLEVEL is true if this is the outermost BLOCK. */
1330 static HOST_WIDE_INT
1331 account_used_vars_for_block (tree block, bool toplevel)
1333 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1335 HOST_WIDE_INT size = 0;
1337 old_sv_num = toplevel ? 0 : stack_vars_num;
1339 /* Expand all variables at this level. */
1340 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1342 size += expand_one_var (t, toplevel, false);
1344 this_sv_num = stack_vars_num;
1346 /* Expand all variables at containing levels. */
1347 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1348 size += account_used_vars_for_block (t, false);
1350 /* Since we do not track exact variable lifetimes (which is not even
1351 possible for variables whose address escapes), we mirror the block
1352 tree in the interference graph. Here we cause all variables at this
1353 level, and all sublevels, to conflict. Do make certain that a
1354 variable conflicts with itself. */
1355 if (old_sv_num < this_sv_num)
1357 new_sv_num = stack_vars_num;
1358 resize_stack_vars_conflict (new_sv_num);
1360 for (i = old_sv_num; i < new_sv_num; ++i)
1361 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1362 add_stack_var_conflict (i, j);
1367 /* Prepare for expanding variables. */
1369 init_vars_expansion (void)
1372 /* Set TREE_USED on all variables in the local_decls. */
1373 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1374 TREE_USED (TREE_VALUE (t)) = 1;
1376 /* Clear TREE_USED on all variables associated with a block scope. */
1377 clear_tree_used (DECL_INITIAL (current_function_decl));
1379 /* Initialize local stack smashing state. */
1380 has_protected_decls = false;
1381 has_short_buffer = false;
1384 /* Free up stack variable graph data. */
1386 fini_vars_expansion (void)
1388 XDELETEVEC (stack_vars);
1389 XDELETEVEC (stack_vars_sorted);
1390 XDELETEVEC (stack_vars_conflict);
1392 stack_vars_alloc = stack_vars_num = 0;
1393 stack_vars_conflict = NULL;
1394 stack_vars_conflict_alloc = 0;
1397 /* Make a fair guess for the size of the stack frame of the current
1398 function. This doesn't have to be exact, the result is only used
1399 in the inline heuristics. So we don't want to run the full stack
1400 var packing algorithm (which is quadratic in the number of stack
1401 vars). Instead, we calculate the total size of all stack vars.
1402 This turns out to be a pretty fair estimate -- packing of stack
1403 vars doesn't happen very often. */
1406 estimated_stack_frame_size (void)
1408 HOST_WIDE_INT size = 0;
1410 tree t, outer_block = DECL_INITIAL (current_function_decl);
1412 init_vars_expansion ();
1414 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1416 tree var = TREE_VALUE (t);
1418 if (TREE_USED (var))
1419 size += expand_one_var (var, true, false);
1420 TREE_USED (var) = 1;
1422 size += account_used_vars_for_block (outer_block, true);
1424 if (stack_vars_num > 0)
1426 /* Fake sorting the stack vars for account_stack_vars (). */
1427 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1428 for (i = 0; i < stack_vars_num; ++i)
1429 stack_vars_sorted[i] = i;
1430 size += account_stack_vars ();
1431 fini_vars_expansion ();
1437 /* Expand all variables used in the function. */
1440 expand_used_vars (void)
1442 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1444 /* Compute the phase of the stack frame for this function. */
1446 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1447 int off = STARTING_FRAME_OFFSET % align;
1448 frame_phase = off ? align - off : 0;
1451 init_vars_expansion ();
1453 /* At this point all variables on the local_decls with TREE_USED
1454 set are not associated with any block scope. Lay them out. */
1455 t = cfun->local_decls;
1456 cfun->local_decls = NULL_TREE;
1459 tree var = TREE_VALUE (t);
1460 bool expand_now = false;
1462 next = TREE_CHAIN (t);
1464 /* We didn't set a block for static or extern because it's hard
1465 to tell the difference between a global variable (re)declared
1466 in a local scope, and one that's really declared there to
1467 begin with. And it doesn't really matter much, since we're
1468 not giving them stack space. Expand them now. */
1469 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1472 /* Any variable that could have been hoisted into an SSA_NAME
1473 will have been propagated anywhere the optimizers chose,
1474 i.e. not confined to their original block. Allocate them
1475 as if they were defined in the outermost scope. */
1476 else if (is_gimple_reg (var))
1479 /* If the variable is not associated with any block, then it
1480 was created by the optimizers, and could be live anywhere
1482 else if (TREE_USED (var))
1485 /* Finally, mark all variables on the list as used. We'll use
1486 this in a moment when we expand those associated with scopes. */
1487 TREE_USED (var) = 1;
1491 expand_one_var (var, true, true);
1492 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1494 rtx rtl = DECL_RTL_IF_SET (var);
1496 /* Keep artificial non-ignored vars in cfun->local_decls
1497 chain until instantiate_decls. */
1498 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1500 TREE_CHAIN (t) = cfun->local_decls;
1501 cfun->local_decls = t;
1510 /* At this point, all variables within the block tree with TREE_USED
1511 set are actually used by the optimized function. Lay them out. */
1512 expand_used_vars_for_block (outer_block, true);
1514 if (stack_vars_num > 0)
1516 /* Due to the way alias sets work, no variables with non-conflicting
1517 alias sets may be assigned the same address. Add conflicts to
1519 add_alias_set_conflicts ();
1521 /* If stack protection is enabled, we don't share space between
1522 vulnerable data and non-vulnerable data. */
1523 if (flag_stack_protect)
1524 add_stack_protection_conflicts ();
1526 /* Now that we have collected all stack variables, and have computed a
1527 minimal interference graph, attempt to save some stack space. */
1528 partition_stack_vars ();
1530 dump_stack_var_partition ();
1533 /* There are several conditions under which we should create a
1534 stack guard: protect-all, alloca used, protected decls present. */
1535 if (flag_stack_protect == 2
1536 || (flag_stack_protect
1537 && (cfun->calls_alloca || has_protected_decls)))
1538 create_stack_guard ();
1540 /* Assign rtl to each variable based on these partitions. */
1541 if (stack_vars_num > 0)
1543 /* Reorder decls to be protected by iterating over the variables
1544 array multiple times, and allocating out of each phase in turn. */
1545 /* ??? We could probably integrate this into the qsort we did
1546 earlier, such that we naturally see these variables first,
1547 and thus naturally allocate things in the right order. */
1548 if (has_protected_decls)
1550 /* Phase 1 contains only character arrays. */
1551 expand_stack_vars (stack_protect_decl_phase_1);
1553 /* Phase 2 contains other kinds of arrays. */
1554 if (flag_stack_protect == 2)
1555 expand_stack_vars (stack_protect_decl_phase_2);
1558 expand_stack_vars (NULL);
1560 fini_vars_expansion ();
1563 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1564 if (STACK_ALIGNMENT_NEEDED)
1566 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1567 if (!FRAME_GROWS_DOWNWARD)
1568 frame_offset += align - 1;
1569 frame_offset &= -align;
1574 /* If we need to produce a detailed dump, print the tree representation
1575 for STMT to the dump file. SINCE is the last RTX after which the RTL
1576 generated for STMT should have been appended. */
1579 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1581 if (dump_file && (dump_flags & TDF_DETAILS))
1583 fprintf (dump_file, "\n;; ");
1584 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1585 fprintf (dump_file, "\n");
1587 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1591 /* Maps the blocks that do not contain tree labels to rtx labels. */
1593 static struct pointer_map_t *lab_rtx_for_bb;
1595 /* Returns the label_rtx expression for a label starting basic block BB. */
1598 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1600 gimple_stmt_iterator gsi;
1605 if (bb->flags & BB_RTL)
1606 return block_label (bb);
1608 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1612 /* Find the tree label if it is present. */
1614 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1616 lab_stmt = gsi_stmt (gsi);
1617 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1620 lab = gimple_label_label (lab_stmt);
1621 if (DECL_NONLOCAL (lab))
1624 return label_rtx (lab);
1627 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1628 *elt = gen_label_rtx ();
1633 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1634 Returns a new basic block if we've terminated the current basic
1635 block and created a new one. */
1638 expand_gimple_cond (basic_block bb, gimple stmt)
1640 basic_block new_bb, dest;
1644 tree pred = gimple_cond_pred_to_tree (stmt);
1647 last2 = last = get_last_insn ();
1649 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1650 if (gimple_has_location (stmt))
1652 set_curr_insn_source_location (gimple_location (stmt));
1653 set_curr_insn_block (gimple_block (stmt));
1656 /* These flags have no purpose in RTL land. */
1657 true_edge->flags &= ~EDGE_TRUE_VALUE;
1658 false_edge->flags &= ~EDGE_FALSE_VALUE;
1660 /* We can either have a pure conditional jump with one fallthru edge or
1661 two-way jump that needs to be decomposed into two basic blocks. */
1662 if (false_edge->dest == bb->next_bb)
1664 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1665 add_reg_br_prob_note (last, true_edge->probability);
1666 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1667 if (true_edge->goto_locus)
1669 set_curr_insn_source_location (true_edge->goto_locus);
1670 set_curr_insn_block (true_edge->goto_block);
1671 true_edge->goto_locus = curr_insn_locator ();
1673 true_edge->goto_block = NULL;
1674 false_edge->flags |= EDGE_FALLTHRU;
1678 if (true_edge->dest == bb->next_bb)
1680 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
1681 add_reg_br_prob_note (last, false_edge->probability);
1682 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1683 if (false_edge->goto_locus)
1685 set_curr_insn_source_location (false_edge->goto_locus);
1686 set_curr_insn_block (false_edge->goto_block);
1687 false_edge->goto_locus = curr_insn_locator ();
1689 false_edge->goto_block = NULL;
1690 true_edge->flags |= EDGE_FALLTHRU;
1695 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1696 add_reg_br_prob_note (last, true_edge->probability);
1697 last = get_last_insn ();
1698 if (false_edge->goto_locus)
1700 set_curr_insn_source_location (false_edge->goto_locus);
1701 set_curr_insn_block (false_edge->goto_block);
1702 false_edge->goto_locus = curr_insn_locator ();
1704 false_edge->goto_block = NULL;
1705 emit_jump (label_rtx_for_bb (false_edge->dest));
1708 if (BARRIER_P (BB_END (bb)))
1709 BB_END (bb) = PREV_INSN (BB_END (bb));
1710 update_bb_for_insn (bb);
1712 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1713 dest = false_edge->dest;
1714 redirect_edge_succ (false_edge, new_bb);
1715 false_edge->flags |= EDGE_FALLTHRU;
1716 new_bb->count = false_edge->count;
1717 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1718 new_edge = make_edge (new_bb, dest, 0);
1719 new_edge->probability = REG_BR_PROB_BASE;
1720 new_edge->count = new_bb->count;
1721 if (BARRIER_P (BB_END (new_bb)))
1722 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1723 update_bb_for_insn (new_bb);
1725 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1727 if (true_edge->goto_locus)
1729 set_curr_insn_source_location (true_edge->goto_locus);
1730 set_curr_insn_block (true_edge->goto_block);
1731 true_edge->goto_locus = curr_insn_locator ();
1733 true_edge->goto_block = NULL;
1739 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
1740 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1741 generated a tail call (something that might be denied by the ABI
1742 rules governing the call; see calls.c).
1744 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1745 can still reach the rest of BB. The case here is __builtin_sqrt,
1746 where the NaN result goes through the external function (with a
1747 tailcall) and the normal result happens via a sqrt instruction. */
1750 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
1757 tree stmt_tree = gimple_to_tree (stmt);
1759 last2 = last = get_last_insn ();
1761 expand_expr_stmt (stmt_tree);
1763 release_stmt_tree (stmt, stmt_tree);
1765 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1766 if (CALL_P (last) && SIBLING_CALL_P (last))
1769 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1771 *can_fallthru = true;
1775 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1776 Any instructions emitted here are about to be deleted. */
1777 do_pending_stack_adjust ();
1779 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1780 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1781 EH or abnormal edges, we shouldn't have created a tail call in
1782 the first place. So it seems to me we should just be removing
1783 all edges here, or redirecting the existing fallthru edge to
1789 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1791 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1793 if (e->dest != EXIT_BLOCK_PTR)
1795 e->dest->count -= e->count;
1796 e->dest->frequency -= EDGE_FREQUENCY (e);
1797 if (e->dest->count < 0)
1799 if (e->dest->frequency < 0)
1800 e->dest->frequency = 0;
1803 probability += e->probability;
1810 /* This is somewhat ugly: the call_expr expander often emits instructions
1811 after the sibcall (to perform the function return). These confuse the
1812 find_many_sub_basic_blocks code, so we need to get rid of these. */
1813 last = NEXT_INSN (last);
1814 gcc_assert (BARRIER_P (last));
1816 *can_fallthru = false;
1817 while (NEXT_INSN (last))
1819 /* For instance an sqrt builtin expander expands if with
1820 sibcall in the then and label for `else`. */
1821 if (LABEL_P (NEXT_INSN (last)))
1823 *can_fallthru = true;
1826 delete_insn (NEXT_INSN (last));
1829 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1830 e->probability += probability;
1833 update_bb_for_insn (bb);
1835 if (NEXT_INSN (last))
1837 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1840 if (BARRIER_P (last))
1841 BB_END (bb) = PREV_INSN (last);
1844 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1849 /* Expand basic block BB from GIMPLE trees to RTL. */
1852 expand_gimple_basic_block (basic_block bb)
1854 gimple_stmt_iterator gsi;
1863 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
1866 /* Note that since we are now transitioning from GIMPLE to RTL, we
1867 cannot use the gsi_*_bb() routines because they expect the basic
1868 block to be in GIMPLE, instead of RTL. Therefore, we need to
1869 access the BB sequence directly. */
1870 stmts = bb_seq (bb);
1871 bb->il.gimple = NULL;
1872 rtl_profile_for_bb (bb);
1873 init_rtl_bb_info (bb);
1874 bb->flags |= BB_RTL;
1876 /* Remove the RETURN_EXPR if we may fall though to the exit
1878 gsi = gsi_last (stmts);
1879 if (!gsi_end_p (gsi)
1880 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
1882 gimple ret_stmt = gsi_stmt (gsi);
1884 gcc_assert (single_succ_p (bb));
1885 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
1887 if (bb->next_bb == EXIT_BLOCK_PTR
1888 && !gimple_return_retval (ret_stmt))
1890 gsi_remove (&gsi, false);
1891 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
1895 gsi = gsi_start (stmts);
1896 if (!gsi_end_p (gsi))
1898 stmt = gsi_stmt (gsi);
1899 if (gimple_code (stmt) != GIMPLE_LABEL)
1903 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1907 last = get_last_insn ();
1911 tree stmt_tree = gimple_to_tree (stmt);
1912 expand_expr_stmt (stmt_tree);
1913 release_stmt_tree (stmt, stmt_tree);
1918 emit_label ((rtx) *elt);
1920 /* Java emits line number notes in the top of labels.
1921 ??? Make this go away once line number notes are obsoleted. */
1922 BB_HEAD (bb) = NEXT_INSN (last);
1923 if (NOTE_P (BB_HEAD (bb)))
1924 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1925 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1927 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1930 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1932 NOTE_BASIC_BLOCK (note) = bb;
1934 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1936 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1937 e->flags &= ~EDGE_EXECUTABLE;
1939 /* At the moment not all abnormal edges match the RTL representation.
1940 It is safe to remove them here as find_many_sub_basic_blocks will
1941 rediscover them. In the future we should get this fixed properly. */
1942 if (e->flags & EDGE_ABNORMAL)
1948 for (; !gsi_end_p (gsi); gsi_next (&gsi))
1950 gimple stmt = gsi_stmt (gsi);
1953 /* Expand this statement, then evaluate the resulting RTL and
1954 fixup the CFG accordingly. */
1955 if (gimple_code (stmt) == GIMPLE_COND)
1957 new_bb = expand_gimple_cond (bb, stmt);
1963 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
1966 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1975 else if (gimple_code (stmt) != GIMPLE_CHANGE_DYNAMIC_TYPE)
1977 tree stmt_tree = gimple_to_tree (stmt);
1978 last = get_last_insn ();
1979 expand_expr_stmt (stmt_tree);
1980 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1981 release_stmt_tree (stmt, stmt_tree);
1986 /* Expand implicit goto and convert goto_locus. */
1987 FOR_EACH_EDGE (e, ei, bb->succs)
1989 if (e->goto_locus && e->goto_block)
1991 set_curr_insn_source_location (e->goto_locus);
1992 set_curr_insn_block (e->goto_block);
1993 e->goto_locus = curr_insn_locator ();
1995 e->goto_block = NULL;
1996 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
1998 emit_jump (label_rtx_for_bb (e->dest));
1999 e->flags &= ~EDGE_FALLTHRU;
2003 do_pending_stack_adjust ();
2005 /* Find the block tail. The last insn in the block is the insn
2006 before a barrier and/or table jump insn. */
2007 last = get_last_insn ();
2008 if (BARRIER_P (last))
2009 last = PREV_INSN (last);
2010 if (JUMP_TABLE_DATA_P (last))
2011 last = PREV_INSN (PREV_INSN (last));
2014 update_bb_for_insn (bb);
2020 /* Create a basic block for initialization code. */
2023 construct_init_block (void)
2025 basic_block init_block, first_block;
2029 /* Multiple entry points not supported yet. */
2030 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
2031 init_rtl_bb_info (ENTRY_BLOCK_PTR);
2032 init_rtl_bb_info (EXIT_BLOCK_PTR);
2033 ENTRY_BLOCK_PTR->flags |= BB_RTL;
2034 EXIT_BLOCK_PTR->flags |= BB_RTL;
2036 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
2038 /* When entry edge points to first basic block, we don't need jump,
2039 otherwise we have to jump into proper target. */
2040 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
2042 tree label = gimple_block_label (e->dest);
2044 emit_jump (label_rtx (label));
2048 flags = EDGE_FALLTHRU;
2050 init_block = create_basic_block (NEXT_INSN (get_insns ()),
2053 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
2054 init_block->count = ENTRY_BLOCK_PTR->count;
2057 first_block = e->dest;
2058 redirect_edge_succ (e, init_block);
2059 e = make_edge (init_block, first_block, flags);
2062 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2063 e->probability = REG_BR_PROB_BASE;
2064 e->count = ENTRY_BLOCK_PTR->count;
2066 update_bb_for_insn (init_block);
2070 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
2071 found in the block tree. */
2074 set_block_levels (tree block, int level)
2078 BLOCK_NUMBER (block) = level;
2079 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
2080 block = BLOCK_CHAIN (block);
2084 /* Create a block containing landing pads and similar stuff. */
2087 construct_exit_block (void)
2089 rtx head = get_last_insn ();
2091 basic_block exit_block;
2095 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
2097 rtl_profile_for_bb (EXIT_BLOCK_PTR);
2099 /* Make sure the locus is set to the end of the function, so that
2100 epilogue line numbers and warnings are set properly. */
2101 if (cfun->function_end_locus != UNKNOWN_LOCATION)
2102 input_location = cfun->function_end_locus;
2104 /* The following insns belong to the top scope. */
2105 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2107 /* Generate rtl for function exit. */
2108 expand_function_end ();
2110 end = get_last_insn ();
2113 /* While emitting the function end we could move end of the last basic block.
2115 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
2116 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
2117 head = NEXT_INSN (head);
2118 exit_block = create_basic_block (NEXT_INSN (head), end,
2119 EXIT_BLOCK_PTR->prev_bb);
2120 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
2121 exit_block->count = EXIT_BLOCK_PTR->count;
2124 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
2126 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
2127 if (!(e->flags & EDGE_ABNORMAL))
2128 redirect_edge_succ (e, exit_block);
2133 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2134 e->probability = REG_BR_PROB_BASE;
2135 e->count = EXIT_BLOCK_PTR->count;
2136 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
2139 e->count -= e2->count;
2140 exit_block->count -= e2->count;
2141 exit_block->frequency -= EDGE_FREQUENCY (e2);
2145 if (exit_block->count < 0)
2146 exit_block->count = 0;
2147 if (exit_block->frequency < 0)
2148 exit_block->frequency = 0;
2149 update_bb_for_insn (exit_block);
2152 /* Helper function for discover_nonconstant_array_refs.
2153 Look for ARRAY_REF nodes with non-constant indexes and mark them
2157 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
2158 void *data ATTRIBUTE_UNUSED)
2162 if (IS_TYPE_OR_DECL_P (t))
2164 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2166 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2167 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
2168 && (!TREE_OPERAND (t, 2)
2169 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2170 || (TREE_CODE (t) == COMPONENT_REF
2171 && (!TREE_OPERAND (t,2)
2172 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2173 || TREE_CODE (t) == BIT_FIELD_REF
2174 || TREE_CODE (t) == REALPART_EXPR
2175 || TREE_CODE (t) == IMAGPART_EXPR
2176 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2177 || CONVERT_EXPR_P (t))
2178 t = TREE_OPERAND (t, 0);
2180 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2182 t = get_base_address (t);
2183 if (t && DECL_P (t))
2184 TREE_ADDRESSABLE (t) = 1;
2193 /* RTL expansion is not able to compile array references with variable
2194 offsets for arrays stored in single register. Discover such
2195 expressions and mark variables as addressable to avoid this
2199 discover_nonconstant_array_refs (void)
2202 gimple_stmt_iterator gsi;
2205 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2207 gimple stmt = gsi_stmt (gsi);
2208 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
2212 /* This function sets crtl->args.internal_arg_pointer to a virtual
2213 register if DRAP is needed. Local register allocator will replace
2214 virtual_incoming_args_rtx with the virtual register. */
2217 expand_stack_alignment (void)
2220 unsigned int preferred_stack_boundary;
2222 if (! SUPPORTS_STACK_ALIGNMENT)
2225 if (cfun->calls_alloca
2226 || cfun->has_nonlocal_label
2227 || crtl->has_nonlocal_goto)
2228 crtl->need_drap = true;
2230 gcc_assert (crtl->stack_alignment_needed
2231 <= crtl->stack_alignment_estimated);
2233 /* Update crtl->stack_alignment_estimated and use it later to align
2234 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
2235 exceptions since callgraph doesn't collect incoming stack alignment
2237 if (flag_non_call_exceptions
2238 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
2239 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2241 preferred_stack_boundary = crtl->preferred_stack_boundary;
2242 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
2243 crtl->stack_alignment_estimated = preferred_stack_boundary;
2244 if (preferred_stack_boundary > crtl->stack_alignment_needed)
2245 crtl->stack_alignment_needed = preferred_stack_boundary;
2247 crtl->stack_realign_needed
2248 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
2249 crtl->stack_realign_tried = crtl->stack_realign_needed;
2251 crtl->stack_realign_processed = true;
2253 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
2255 gcc_assert (targetm.calls.get_drap_rtx != NULL);
2256 drap_rtx = targetm.calls.get_drap_rtx ();
2258 /* stack_realign_drap and drap_rtx must match. */
2259 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
2261 /* Do nothing if NULL is returned, which means DRAP is not needed. */
2262 if (NULL != drap_rtx)
2264 crtl->args.internal_arg_pointer = drap_rtx;
2266 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
2268 fixup_tail_calls ();
2272 /* Translate the intermediate representation contained in the CFG
2273 from GIMPLE trees to RTL.
2275 We do conversion per basic block and preserve/update the tree CFG.
2276 This implies we have to do some magic as the CFG can simultaneously
2277 consist of basic blocks containing RTL and GIMPLE trees. This can
2278 confuse the CFG hooks, so be careful to not manipulate CFG during
2282 gimple_expand_cfg (void)
2284 basic_block bb, init_block;
2289 /* Some backends want to know that we are expanding to RTL. */
2290 currently_expanding_to_rtl = 1;
2292 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
2294 insn_locators_alloc ();
2295 if (!DECL_BUILT_IN (current_function_decl))
2297 /* Eventually, all FEs should explicitly set function_start_locus. */
2298 if (cfun->function_start_locus == UNKNOWN_LOCATION)
2299 set_curr_insn_source_location
2300 (DECL_SOURCE_LOCATION (current_function_decl));
2302 set_curr_insn_source_location (cfun->function_start_locus);
2304 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2305 prologue_locator = curr_insn_locator ();
2307 /* Make sure first insn is a note even if we don't want linenums.
2308 This makes sure the first insn will never be deleted.
2309 Also, final expects a note to appear there. */
2310 emit_note (NOTE_INSN_DELETED);
2312 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
2313 discover_nonconstant_array_refs ();
2315 targetm.expand_to_rtl_hook ();
2316 crtl->stack_alignment_needed = STACK_BOUNDARY;
2317 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
2318 crtl->stack_alignment_estimated = STACK_BOUNDARY;
2319 crtl->preferred_stack_boundary = STACK_BOUNDARY;
2320 cfun->cfg->max_jumptable_ents = 0;
2323 /* Expand the variables recorded during gimple lowering. */
2324 expand_used_vars ();
2326 /* Honor stack protection warnings. */
2327 if (warn_stack_protect)
2329 if (cfun->calls_alloca)
2330 warning (OPT_Wstack_protector,
2331 "not protecting local variables: variable length buffer");
2332 if (has_short_buffer && !crtl->stack_protect_guard)
2333 warning (OPT_Wstack_protector,
2334 "not protecting function: no buffer at least %d bytes long",
2335 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
2338 /* Set up parameters and prepare for return, for the function. */
2339 expand_function_start (current_function_decl);
2341 /* If this function is `main', emit a call to `__main'
2342 to run global initializers, etc. */
2343 if (DECL_NAME (current_function_decl)
2344 && MAIN_NAME_P (DECL_NAME (current_function_decl))
2345 && DECL_FILE_SCOPE_P (current_function_decl))
2346 expand_main_function ();
2348 /* Initialize the stack_protect_guard field. This must happen after the
2349 call to __main (if any) so that the external decl is initialized. */
2350 if (crtl->stack_protect_guard)
2351 stack_protect_prologue ();
2353 /* Update stack boundary if needed. */
2354 if (SUPPORTS_STACK_ALIGNMENT)
2356 /* Call update_stack_boundary here to update incoming stack
2357 boundary before TARGET_FUNCTION_OK_FOR_SIBCALL is called.
2358 TARGET_FUNCTION_OK_FOR_SIBCALL needs to know the accurate
2359 incoming stack alignment to check if it is OK to perform
2360 sibcall optimization since sibcall optimization will only
2361 align the outgoing stack to incoming stack boundary. */
2362 if (targetm.calls.update_stack_boundary)
2363 targetm.calls.update_stack_boundary ();
2365 /* The incoming stack frame has to be aligned at least at
2366 parm_stack_boundary. */
2367 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2370 /* Register rtl specific functions for cfg. */
2371 rtl_register_cfg_hooks ();
2373 init_block = construct_init_block ();
2375 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
2376 remaining edges in expand_gimple_basic_block. */
2377 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2378 e->flags &= ~EDGE_EXECUTABLE;
2380 lab_rtx_for_bb = pointer_map_create ();
2381 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
2382 bb = expand_gimple_basic_block (bb);
2384 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
2385 conservatively to true until they are all profile aware. */
2386 pointer_map_destroy (lab_rtx_for_bb);
2389 construct_exit_block ();
2390 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2391 insn_locators_finalize ();
2393 /* We're done expanding trees to RTL. */
2394 currently_expanding_to_rtl = 0;
2396 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
2397 convert_from_eh_region_ranges ();
2398 set_eh_throw_stmt_table (cfun, NULL);
2400 rebuild_jump_labels (get_insns ());
2401 find_exception_handler_labels ();
2403 blocks = sbitmap_alloc (last_basic_block);
2404 sbitmap_ones (blocks);
2405 find_many_sub_basic_blocks (blocks);
2406 purge_all_dead_edges ();
2407 sbitmap_free (blocks);
2411 expand_stack_alignment ();
2413 #ifdef ENABLE_CHECKING
2414 verify_flow_info ();
2417 /* There's no need to defer outputting this function any more; we
2418 know we want to output it. */
2419 DECL_DEFER_OUTPUT (current_function_decl) = 0;
2421 /* Now that we're done expanding trees to RTL, we shouldn't have any
2422 more CONCATs anywhere. */
2423 generating_concat_p = 0;
2428 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
2429 /* And the pass manager will dump RTL for us. */
2432 /* If we're emitting a nested function, make sure its parent gets
2433 emitted as well. Doing otherwise confuses debug info. */
2436 for (parent = DECL_CONTEXT (current_function_decl);
2437 parent != NULL_TREE;
2438 parent = get_containing_scope (parent))
2439 if (TREE_CODE (parent) == FUNCTION_DECL)
2440 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
2443 /* We are now committed to emitting code for this function. Do any
2444 preparation, such as emitting abstract debug info for the inline
2445 before it gets mangled by optimization. */
2446 if (cgraph_function_possibly_inlined_p (current_function_decl))
2447 (*debug_hooks->outlining_inline_function) (current_function_decl);
2449 TREE_ASM_WRITTEN (current_function_decl) = 1;
2451 /* After expanding, the return labels are no longer needed. */
2452 return_label = NULL;
2453 naked_return_label = NULL;
2454 /* Tag the blocks with a depth number so that change_scope can find
2455 the common parent easily. */
2456 set_block_levels (DECL_INITIAL (cfun->decl), 0);
2457 default_rtl_profile ();
2461 struct rtl_opt_pass pass_expand =
2465 "expand", /* name */
2467 gimple_expand_cfg, /* execute */
2470 0, /* static_pass_number */
2471 TV_EXPAND, /* tv_id */
2472 /* ??? If TER is enabled, we actually receive GENERIC. */
2473 PROP_gimple_leh | PROP_cfg, /* properties_required */
2474 PROP_rtl, /* properties_provided */
2475 PROP_trees, /* properties_destroyed */
2476 0, /* todo_flags_start */
2477 TODO_dump_func, /* todo_flags_finish */