1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "hard-reg-set.h"
29 #include "double-int.h"
36 #include "fold-const.h"
37 #include "stringpool.h"
39 #include "stor-layout.h"
41 #include "print-tree.h"
46 #include "dominance.h"
51 #include "cfgcleanup.h"
52 #include "basic-block.h"
53 #include "insn-codes.h"
56 #include "statistics.h"
58 #include "fixed-value.h"
59 #include "insn-config.h"
66 #include "langhooks.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
71 #include "gimple-expr.h"
74 #include "gimple-iterator.h"
75 #include "gimple-walk.h"
76 #include "gimple-ssa.h"
78 #include "plugin-api.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-ssanames.h"
87 #include "tree-pass.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
94 #include "tree-inline.h"
95 #include "value-prof.h"
97 #include "tree-ssa-live.h"
98 #include "tree-outof-ssa.h"
101 #include "regs.h" /* For reg_renumber. */
102 #include "insn-attr.h" /* For INSN_SCHEDULING. */
104 #include "tree-ssa-address.h"
107 #include "builtins.h"
108 #include "tree-chkp.h"
109 #include "rtl-chkp.h"
111 /* Some systems use __main in a way incompatible with its use in gcc, in these
112 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
113 give the same symbol without quotes for an alternative entry point. You
114 must define both, or neither. */
116 #define NAME__MAIN "__main"
119 /* This variable holds information helping the rewriting of SSA trees
123 /* This variable holds the currently expanded gimple statement for purposes
124 of comminucating the profile info to the builtin expanders. */
125 gimple currently_expanding_gimple_stmt;
127 static rtx expand_debug_expr (tree);
129 /* Return an expression tree corresponding to the RHS of GIMPLE
133 gimple_assign_rhs_to_tree (gimple stmt)
136 enum gimple_rhs_class grhs_class;
138 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
140 if (grhs_class == GIMPLE_TERNARY_RHS)
141 t = build3 (gimple_assign_rhs_code (stmt),
142 TREE_TYPE (gimple_assign_lhs (stmt)),
143 gimple_assign_rhs1 (stmt),
144 gimple_assign_rhs2 (stmt),
145 gimple_assign_rhs3 (stmt));
146 else if (grhs_class == GIMPLE_BINARY_RHS)
147 t = build2 (gimple_assign_rhs_code (stmt),
148 TREE_TYPE (gimple_assign_lhs (stmt)),
149 gimple_assign_rhs1 (stmt),
150 gimple_assign_rhs2 (stmt));
151 else if (grhs_class == GIMPLE_UNARY_RHS)
152 t = build1 (gimple_assign_rhs_code (stmt),
153 TREE_TYPE (gimple_assign_lhs (stmt)),
154 gimple_assign_rhs1 (stmt));
155 else if (grhs_class == GIMPLE_SINGLE_RHS)
157 t = gimple_assign_rhs1 (stmt);
158 /* Avoid modifying this tree in place below. */
159 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
160 && gimple_location (stmt) != EXPR_LOCATION (t))
161 || (gimple_block (stmt)
162 && currently_expanding_to_rtl
169 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
170 SET_EXPR_LOCATION (t, gimple_location (stmt));
176 #ifndef STACK_ALIGNMENT_NEEDED
177 #define STACK_ALIGNMENT_NEEDED 1
180 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
182 /* Associate declaration T with storage space X. If T is no
183 SSA name this is exactly SET_DECL_RTL, otherwise make the
184 partition of T associated with X. */
186 set_rtl (tree t, rtx x)
188 if (TREE_CODE (t) == SSA_NAME)
190 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
192 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
193 /* For the benefit of debug information at -O0 (where vartracking
194 doesn't run) record the place also in the base DECL if it's
195 a normal variable (not a parameter). */
196 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
198 tree var = SSA_NAME_VAR (t);
199 /* If we don't yet have something recorded, just record it now. */
200 if (!DECL_RTL_SET_P (var))
201 SET_DECL_RTL (var, x);
202 /* If we have it set already to "multiple places" don't
204 else if (DECL_RTL (var) == pc_rtx)
206 /* If we have something recorded and it's not the same place
207 as we want to record now, we have multiple partitions for the
208 same base variable, with different places. We can't just
209 randomly chose one, hence we have to say that we don't know.
210 This only happens with optimization, and there var-tracking
211 will figure out the right thing. */
212 else if (DECL_RTL (var) != x)
213 SET_DECL_RTL (var, pc_rtx);
220 /* This structure holds data relevant to one variable that will be
221 placed in a stack slot. */
227 /* Initially, the size of the variable. Later, the size of the partition,
228 if this variable becomes it's partition's representative. */
231 /* The *byte* alignment required for this variable. Or as, with the
232 size, the alignment for this partition. */
235 /* The partition representative. */
236 size_t representative;
238 /* The next stack variable in the partition, or EOC. */
241 /* The numbers of conflicting stack variables. */
245 #define EOC ((size_t)-1)
247 /* We have an array of such objects while deciding allocation. */
248 static struct stack_var *stack_vars;
249 static size_t stack_vars_alloc;
250 static size_t stack_vars_num;
251 static hash_map<tree, size_t> *decl_to_stack_part;
253 /* Conflict bitmaps go on this obstack. This allows us to destroy
254 all of them in one big sweep. */
255 static bitmap_obstack stack_var_bitmap_obstack;
257 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
258 is non-decreasing. */
259 static size_t *stack_vars_sorted;
261 /* The phase of the stack frame. This is the known misalignment of
262 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
263 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
264 static int frame_phase;
266 /* Used during expand_used_vars to remember if we saw any decls for
267 which we'd like to enable stack smashing protection. */
268 static bool has_protected_decls;
270 /* Used during expand_used_vars. Remember if we say a character buffer
271 smaller than our cutoff threshold. Used for -Wstack-protector. */
272 static bool has_short_buffer;
274 /* Compute the byte alignment to use for DECL. Ignore alignment
275 we can't do with expected alignment of the stack boundary. */
278 align_local_variable (tree decl)
280 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
281 DECL_ALIGN (decl) = align;
282 return align / BITS_PER_UNIT;
285 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
286 Return the frame offset. */
289 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
291 HOST_WIDE_INT offset, new_frame_offset;
293 new_frame_offset = frame_offset;
294 if (FRAME_GROWS_DOWNWARD)
296 new_frame_offset -= size + frame_phase;
297 new_frame_offset &= -align;
298 new_frame_offset += frame_phase;
299 offset = new_frame_offset;
303 new_frame_offset -= frame_phase;
304 new_frame_offset += align - 1;
305 new_frame_offset &= -align;
306 new_frame_offset += frame_phase;
307 offset = new_frame_offset;
308 new_frame_offset += size;
310 frame_offset = new_frame_offset;
312 if (frame_offset_overflow (frame_offset, cfun->decl))
313 frame_offset = offset = 0;
318 /* Accumulate DECL into STACK_VARS. */
321 add_stack_var (tree decl)
325 if (stack_vars_num >= stack_vars_alloc)
327 if (stack_vars_alloc)
328 stack_vars_alloc = stack_vars_alloc * 3 / 2;
330 stack_vars_alloc = 32;
332 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
334 if (!decl_to_stack_part)
335 decl_to_stack_part = new hash_map<tree, size_t>;
337 v = &stack_vars[stack_vars_num];
338 decl_to_stack_part->put (decl, stack_vars_num);
341 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
342 /* Ensure that all variables have size, so that &a != &b for any two
343 variables that are simultaneously live. */
346 v->alignb = align_local_variable (SSAVAR (decl));
347 /* An alignment of zero can mightily confuse us later. */
348 gcc_assert (v->alignb != 0);
350 /* All variables are initially in their own partition. */
351 v->representative = stack_vars_num;
354 /* All variables initially conflict with no other. */
357 /* Ensure that this decl doesn't get put onto the list twice. */
358 set_rtl (decl, pc_rtx);
363 /* Make the decls associated with luid's X and Y conflict. */
366 add_stack_var_conflict (size_t x, size_t y)
368 struct stack_var *a = &stack_vars[x];
369 struct stack_var *b = &stack_vars[y];
371 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
373 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
374 bitmap_set_bit (a->conflicts, y);
375 bitmap_set_bit (b->conflicts, x);
378 /* Check whether the decls associated with luid's X and Y conflict. */
381 stack_var_conflict_p (size_t x, size_t y)
383 struct stack_var *a = &stack_vars[x];
384 struct stack_var *b = &stack_vars[y];
387 /* Partitions containing an SSA name result from gimple registers
388 with things like unsupported modes. They are top-level and
389 hence conflict with everything else. */
390 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
393 if (!a->conflicts || !b->conflicts)
395 return bitmap_bit_p (a->conflicts, y);
398 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
399 enter its partition number into bitmap DATA. */
402 visit_op (gimple, tree op, tree, void *data)
404 bitmap active = (bitmap)data;
405 op = get_base_address (op);
408 && DECL_RTL_IF_SET (op) == pc_rtx)
410 size_t *v = decl_to_stack_part->get (op);
412 bitmap_set_bit (active, *v);
417 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
418 record conflicts between it and all currently active other partitions
422 visit_conflict (gimple, tree op, tree, void *data)
424 bitmap active = (bitmap)data;
425 op = get_base_address (op);
428 && DECL_RTL_IF_SET (op) == pc_rtx)
430 size_t *v = decl_to_stack_part->get (op);
431 if (v && bitmap_set_bit (active, *v))
436 gcc_assert (num < stack_vars_num);
437 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
438 add_stack_var_conflict (num, i);
444 /* Helper routine for add_scope_conflicts, calculating the active partitions
445 at the end of BB, leaving the result in WORK. We're called to generate
446 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
450 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
454 gimple_stmt_iterator gsi;
455 walk_stmt_load_store_addr_fn visit;
458 FOR_EACH_EDGE (e, ei, bb->preds)
459 bitmap_ior_into (work, (bitmap)e->src->aux);
463 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
465 gimple stmt = gsi_stmt (gsi);
466 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
468 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
470 gimple stmt = gsi_stmt (gsi);
472 if (gimple_clobber_p (stmt))
474 tree lhs = gimple_assign_lhs (stmt);
476 /* Nested function lowering might introduce LHSs
477 that are COMPONENT_REFs. */
478 if (TREE_CODE (lhs) != VAR_DECL)
480 if (DECL_RTL_IF_SET (lhs) == pc_rtx
481 && (v = decl_to_stack_part->get (lhs)))
482 bitmap_clear_bit (work, *v);
484 else if (!is_gimple_debug (stmt))
487 && visit == visit_op)
489 /* If this is the first real instruction in this BB we need
490 to add conflicts for everything live at this point now.
491 Unlike classical liveness for named objects we can't
492 rely on seeing a def/use of the names we're interested in.
493 There might merely be indirect loads/stores. We'd not add any
494 conflicts for such partitions. */
497 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
499 struct stack_var *a = &stack_vars[i];
501 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
502 bitmap_ior_into (a->conflicts, work);
504 visit = visit_conflict;
506 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
511 /* Generate stack partition conflicts between all partitions that are
512 simultaneously live. */
515 add_scope_conflicts (void)
519 bitmap work = BITMAP_ALLOC (NULL);
523 /* We approximate the live range of a stack variable by taking the first
524 mention of its name as starting point(s), and by the end-of-scope
525 death clobber added by gimplify as ending point(s) of the range.
526 This overapproximates in the case we for instance moved an address-taken
527 operation upward, without also moving a dereference to it upwards.
528 But it's conservatively correct as a variable never can hold values
529 before its name is mentioned at least once.
531 We then do a mostly classical bitmap liveness algorithm. */
533 FOR_ALL_BB_FN (bb, cfun)
534 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
536 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
537 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
544 for (i = 0; i < n_bbs; i++)
547 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
548 active = (bitmap)bb->aux;
549 add_scope_conflicts_1 (bb, work, false);
550 if (bitmap_ior_into (active, work))
555 FOR_EACH_BB_FN (bb, cfun)
556 add_scope_conflicts_1 (bb, work, true);
560 FOR_ALL_BB_FN (bb, cfun)
561 BITMAP_FREE (bb->aux);
564 /* A subroutine of partition_stack_vars. A comparison function for qsort,
565 sorting an array of indices by the properties of the object. */
568 stack_var_cmp (const void *a, const void *b)
570 size_t ia = *(const size_t *)a;
571 size_t ib = *(const size_t *)b;
572 unsigned int aligna = stack_vars[ia].alignb;
573 unsigned int alignb = stack_vars[ib].alignb;
574 HOST_WIDE_INT sizea = stack_vars[ia].size;
575 HOST_WIDE_INT sizeb = stack_vars[ib].size;
576 tree decla = stack_vars[ia].decl;
577 tree declb = stack_vars[ib].decl;
579 unsigned int uida, uidb;
581 /* Primary compare on "large" alignment. Large comes first. */
582 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
583 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
584 if (largea != largeb)
585 return (int)largeb - (int)largea;
587 /* Secondary compare on size, decreasing */
593 /* Tertiary compare on true alignment, decreasing. */
599 /* Final compare on ID for sort stability, increasing.
600 Two SSA names are compared by their version, SSA names come before
601 non-SSA names, and two normal decls are compared by their DECL_UID. */
602 if (TREE_CODE (decla) == SSA_NAME)
604 if (TREE_CODE (declb) == SSA_NAME)
605 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
609 else if (TREE_CODE (declb) == SSA_NAME)
612 uida = DECL_UID (decla), uidb = DECL_UID (declb);
620 struct part_traits : default_hashmap_traits
625 { return e.m_value == reinterpret_cast<void *> (1); }
627 template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
631 { e.m_value = reinterpret_cast<T> (1); }
636 { e.m_value = NULL; }
639 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
641 /* If the points-to solution *PI points to variables that are in a partition
642 together with other variables add all partition members to the pointed-to
646 add_partitioned_vars_to_ptset (struct pt_solution *pt,
647 part_hashmap *decls_to_partitions,
648 hash_set<bitmap> *visited, bitmap temp)
656 /* The pointed-to vars bitmap is shared, it is enough to
658 || visited->add (pt->vars))
663 /* By using a temporary bitmap to store all members of the partitions
664 we have to add we make sure to visit each of the partitions only
666 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
668 || !bitmap_bit_p (temp, i))
669 && (part = decls_to_partitions->get (i)))
670 bitmap_ior_into (temp, *part);
671 if (!bitmap_empty_p (temp))
672 bitmap_ior_into (pt->vars, temp);
675 /* Update points-to sets based on partition info, so we can use them on RTL.
676 The bitmaps representing stack partitions will be saved until expand,
677 where partitioned decls used as bases in memory expressions will be
681 update_alias_info_with_stack_vars (void)
683 part_hashmap *decls_to_partitions = NULL;
685 tree var = NULL_TREE;
687 for (i = 0; i < stack_vars_num; i++)
691 struct ptr_info_def *pi;
693 /* Not interested in partitions with single variable. */
694 if (stack_vars[i].representative != i
695 || stack_vars[i].next == EOC)
698 if (!decls_to_partitions)
700 decls_to_partitions = new part_hashmap;
701 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
704 /* Create an SSA_NAME that points to the partition for use
705 as base during alias-oracle queries on RTL for bases that
706 have been partitioned. */
707 if (var == NULL_TREE)
708 var = create_tmp_var (ptr_type_node);
709 name = make_ssa_name (var);
711 /* Create bitmaps representing partitions. They will be used for
712 points-to sets later, so use GGC alloc. */
713 part = BITMAP_GGC_ALLOC ();
714 for (j = i; j != EOC; j = stack_vars[j].next)
716 tree decl = stack_vars[j].decl;
717 unsigned int uid = DECL_PT_UID (decl);
718 bitmap_set_bit (part, uid);
719 decls_to_partitions->put (uid, part);
720 cfun->gimple_df->decls_to_pointers->put (decl, name);
721 if (TREE_ADDRESSABLE (decl))
722 TREE_ADDRESSABLE (name) = 1;
725 /* Make the SSA name point to all partition members. */
726 pi = get_ptr_info (name);
727 pt_solution_set (&pi->pt, part, false);
730 /* Make all points-to sets that contain one member of a partition
731 contain all members of the partition. */
732 if (decls_to_partitions)
735 hash_set<bitmap> visited;
736 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
738 for (i = 1; i < num_ssa_names; i++)
740 tree name = ssa_name (i);
741 struct ptr_info_def *pi;
744 && POINTER_TYPE_P (TREE_TYPE (name))
745 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
746 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
750 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
751 decls_to_partitions, &visited, temp);
753 delete decls_to_partitions;
758 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
759 partitioning algorithm. Partitions A and B are known to be non-conflicting.
760 Merge them into a single partition A. */
763 union_stack_vars (size_t a, size_t b)
765 struct stack_var *vb = &stack_vars[b];
769 gcc_assert (stack_vars[b].next == EOC);
770 /* Add B to A's partition. */
771 stack_vars[b].next = stack_vars[a].next;
772 stack_vars[b].representative = a;
773 stack_vars[a].next = b;
775 /* Update the required alignment of partition A to account for B. */
776 if (stack_vars[a].alignb < stack_vars[b].alignb)
777 stack_vars[a].alignb = stack_vars[b].alignb;
779 /* Update the interference graph and merge the conflicts. */
782 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
783 add_stack_var_conflict (a, stack_vars[u].representative);
784 BITMAP_FREE (vb->conflicts);
788 /* A subroutine of expand_used_vars. Binpack the variables into
789 partitions constrained by the interference graph. The overall
790 algorithm used is as follows:
792 Sort the objects by size in descending order.
797 Look for the largest non-conflicting object B with size <= S.
804 partition_stack_vars (void)
806 size_t si, sj, n = stack_vars_num;
808 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
809 for (si = 0; si < n; ++si)
810 stack_vars_sorted[si] = si;
815 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
817 for (si = 0; si < n; ++si)
819 size_t i = stack_vars_sorted[si];
820 unsigned int ialign = stack_vars[i].alignb;
821 HOST_WIDE_INT isize = stack_vars[i].size;
823 /* Ignore objects that aren't partition representatives. If we
824 see a var that is not a partition representative, it must
825 have been merged earlier. */
826 if (stack_vars[i].representative != i)
829 for (sj = si + 1; sj < n; ++sj)
831 size_t j = stack_vars_sorted[sj];
832 unsigned int jalign = stack_vars[j].alignb;
833 HOST_WIDE_INT jsize = stack_vars[j].size;
835 /* Ignore objects that aren't partition representatives. */
836 if (stack_vars[j].representative != j)
839 /* Do not mix objects of "small" (supported) alignment
840 and "large" (unsupported) alignment. */
841 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
842 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
845 /* For Address Sanitizer do not mix objects with different
846 sizes, as the shorter vars wouldn't be adequately protected.
847 Don't do that for "large" (unsupported) alignment objects,
848 those aren't protected anyway. */
849 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
850 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
853 /* Ignore conflicting objects. */
854 if (stack_var_conflict_p (i, j))
857 /* UNION the objects, placing J at OFFSET. */
858 union_stack_vars (i, j);
862 update_alias_info_with_stack_vars ();
865 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
868 dump_stack_var_partition (void)
870 size_t si, i, j, n = stack_vars_num;
872 for (si = 0; si < n; ++si)
874 i = stack_vars_sorted[si];
876 /* Skip variables that aren't partition representatives, for now. */
877 if (stack_vars[i].representative != i)
880 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
881 " align %u\n", (unsigned long) i, stack_vars[i].size,
882 stack_vars[i].alignb);
884 for (j = i; j != EOC; j = stack_vars[j].next)
886 fputc ('\t', dump_file);
887 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
889 fputc ('\n', dump_file);
893 /* Assign rtl to DECL at BASE + OFFSET. */
896 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
897 HOST_WIDE_INT offset)
902 /* If this fails, we've overflowed the stack frame. Error nicely? */
903 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
905 x = plus_constant (Pmode, base, offset);
906 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
908 if (TREE_CODE (decl) != SSA_NAME)
910 /* Set alignment we actually gave this decl if it isn't an SSA name.
911 If it is we generate stack slots only accidentally so it isn't as
912 important, we'll simply use the alignment that is already set. */
913 if (base == virtual_stack_vars_rtx)
914 offset -= frame_phase;
915 align = offset & -offset;
916 align *= BITS_PER_UNIT;
917 if (align == 0 || align > base_align)
920 /* One would think that we could assert that we're not decreasing
921 alignment here, but (at least) the i386 port does exactly this
922 via the MINIMUM_ALIGNMENT hook. */
924 DECL_ALIGN (decl) = align;
925 DECL_USER_ALIGN (decl) = 0;
928 set_mem_attributes (x, SSAVAR (decl), true);
932 struct stack_vars_data
934 /* Vector of offset pairs, always end of some padding followed
935 by start of the padding that needs Address Sanitizer protection.
936 The vector is in reversed, highest offset pairs come first. */
937 vec<HOST_WIDE_INT> asan_vec;
939 /* Vector of partition representative decls in between the paddings. */
940 vec<tree> asan_decl_vec;
942 /* Base pseudo register for Address Sanitizer protected automatic vars. */
945 /* Alignment needed for the Address Sanitizer protected automatic vars. */
946 unsigned int asan_alignb;
949 /* A subroutine of expand_used_vars. Give each partition representative
950 a unique location within the stack frame. Update each partition member
951 with that location. */
954 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
956 size_t si, i, j, n = stack_vars_num;
957 HOST_WIDE_INT large_size = 0, large_alloc = 0;
958 rtx large_base = NULL;
959 unsigned large_align = 0;
962 /* Determine if there are any variables requiring "large" alignment.
963 Since these are dynamically allocated, we only process these if
964 no predicate involved. */
965 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
966 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
968 /* Find the total size of these variables. */
969 for (si = 0; si < n; ++si)
973 i = stack_vars_sorted[si];
974 alignb = stack_vars[i].alignb;
976 /* All "large" alignment decls come before all "small" alignment
977 decls, but "large" alignment decls are not sorted based on
978 their alignment. Increase large_align to track the largest
979 required alignment. */
980 if ((alignb * BITS_PER_UNIT) > large_align)
981 large_align = alignb * BITS_PER_UNIT;
983 /* Stop when we get to the first decl with "small" alignment. */
984 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
987 /* Skip variables that aren't partition representatives. */
988 if (stack_vars[i].representative != i)
991 /* Skip variables that have already had rtl assigned. See also
992 add_stack_var where we perpetrate this pc_rtx hack. */
993 decl = stack_vars[i].decl;
994 if ((TREE_CODE (decl) == SSA_NAME
995 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
996 : DECL_RTL (decl)) != pc_rtx)
999 large_size += alignb - 1;
1000 large_size &= -(HOST_WIDE_INT)alignb;
1001 large_size += stack_vars[i].size;
1004 /* If there were any, allocate space. */
1006 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
1010 for (si = 0; si < n; ++si)
1013 unsigned base_align, alignb;
1014 HOST_WIDE_INT offset;
1016 i = stack_vars_sorted[si];
1018 /* Skip variables that aren't partition representatives, for now. */
1019 if (stack_vars[i].representative != i)
1022 /* Skip variables that have already had rtl assigned. See also
1023 add_stack_var where we perpetrate this pc_rtx hack. */
1024 decl = stack_vars[i].decl;
1025 if ((TREE_CODE (decl) == SSA_NAME
1026 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1027 : DECL_RTL (decl)) != pc_rtx)
1030 /* Check the predicate to see whether this variable should be
1031 allocated in this pass. */
1032 if (pred && !pred (i))
1035 alignb = stack_vars[i].alignb;
1036 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1038 base = virtual_stack_vars_rtx;
1039 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1041 HOST_WIDE_INT prev_offset = frame_offset;
1042 tree repr_decl = NULL_TREE;
1045 = alloc_stack_frame_space (stack_vars[i].size
1046 + ASAN_RED_ZONE_SIZE,
1047 MAX (alignb, ASAN_RED_ZONE_SIZE));
1048 data->asan_vec.safe_push (prev_offset);
1049 data->asan_vec.safe_push (offset + stack_vars[i].size);
1050 /* Find best representative of the partition.
1051 Prefer those with DECL_NAME, even better
1052 satisfying asan_protect_stack_decl predicate. */
1053 for (j = i; j != EOC; j = stack_vars[j].next)
1054 if (asan_protect_stack_decl (stack_vars[j].decl)
1055 && DECL_NAME (stack_vars[j].decl))
1057 repr_decl = stack_vars[j].decl;
1060 else if (repr_decl == NULL_TREE
1061 && DECL_P (stack_vars[j].decl)
1062 && DECL_NAME (stack_vars[j].decl))
1063 repr_decl = stack_vars[j].decl;
1064 if (repr_decl == NULL_TREE)
1065 repr_decl = stack_vars[i].decl;
1066 data->asan_decl_vec.safe_push (repr_decl);
1067 data->asan_alignb = MAX (data->asan_alignb, alignb);
1068 if (data->asan_base == NULL)
1069 data->asan_base = gen_reg_rtx (Pmode);
1070 base = data->asan_base;
1072 if (!STRICT_ALIGNMENT)
1073 base_align = crtl->max_used_stack_slot_alignment;
1075 base_align = MAX (crtl->max_used_stack_slot_alignment,
1076 GET_MODE_ALIGNMENT (SImode)
1077 << ASAN_SHADOW_SHIFT);
1081 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1082 base_align = crtl->max_used_stack_slot_alignment;
1087 /* Large alignment is only processed in the last pass. */
1090 gcc_assert (large_base != NULL);
1092 large_alloc += alignb - 1;
1093 large_alloc &= -(HOST_WIDE_INT)alignb;
1094 offset = large_alloc;
1095 large_alloc += stack_vars[i].size;
1098 base_align = large_align;
1101 /* Create rtl for each variable based on their location within the
1103 for (j = i; j != EOC; j = stack_vars[j].next)
1105 expand_one_stack_var_at (stack_vars[j].decl,
1111 gcc_assert (large_alloc == large_size);
1114 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1115 static HOST_WIDE_INT
1116 account_stack_vars (void)
1118 size_t si, j, i, n = stack_vars_num;
1119 HOST_WIDE_INT size = 0;
1121 for (si = 0; si < n; ++si)
1123 i = stack_vars_sorted[si];
1125 /* Skip variables that aren't partition representatives, for now. */
1126 if (stack_vars[i].representative != i)
1129 size += stack_vars[i].size;
1130 for (j = i; j != EOC; j = stack_vars[j].next)
1131 set_rtl (stack_vars[j].decl, NULL);
1136 /* A subroutine of expand_one_var. Called to immediately assign rtl
1137 to a variable to be allocated in the stack frame. */
1140 expand_one_stack_var (tree var)
1142 HOST_WIDE_INT size, offset;
1143 unsigned byte_align;
1145 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1146 byte_align = align_local_variable (SSAVAR (var));
1148 /* We handle highly aligned variables in expand_stack_vars. */
1149 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1151 offset = alloc_stack_frame_space (size, byte_align);
1153 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1154 crtl->max_used_stack_slot_alignment, offset);
1157 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1158 that will reside in a hard register. */
1161 expand_one_hard_reg_var (tree var)
1163 rest_of_decl_compilation (var, 0, 0);
1166 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1167 that will reside in a pseudo register. */
1170 expand_one_register_var (tree var)
1172 tree decl = SSAVAR (var);
1173 tree type = TREE_TYPE (decl);
1174 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1175 rtx x = gen_reg_rtx (reg_mode);
1179 /* Note if the object is a user variable. */
1180 if (!DECL_ARTIFICIAL (decl))
1183 if (POINTER_TYPE_P (type))
1184 mark_reg_pointer (x, get_pointer_alignment (var));
1187 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1188 has some associated error, e.g. its type is error-mark. We just need
1189 to pick something that won't crash the rest of the compiler. */
1192 expand_one_error_var (tree var)
1194 machine_mode mode = DECL_MODE (var);
1197 if (mode == BLKmode)
1198 x = gen_rtx_MEM (BLKmode, const0_rtx);
1199 else if (mode == VOIDmode)
1202 x = gen_reg_rtx (mode);
1204 SET_DECL_RTL (var, x);
1207 /* A subroutine of expand_one_var. VAR is a variable that will be
1208 allocated to the local stack frame. Return true if we wish to
1209 add VAR to STACK_VARS so that it will be coalesced with other
1210 variables. Return false to allocate VAR immediately.
1212 This function is used to reduce the number of variables considered
1213 for coalescing, which reduces the size of the quadratic problem. */
1216 defer_stack_allocation (tree var, bool toplevel)
1218 /* Whether the variable is small enough for immediate allocation not to be
1219 a problem with regard to the frame size. */
1221 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1222 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1224 /* If stack protection is enabled, *all* stack variables must be deferred,
1225 so that we can re-order the strings to the top of the frame.
1226 Similarly for Address Sanitizer. */
1227 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1230 /* We handle "large" alignment via dynamic allocation. We want to handle
1231 this extra complication in only one place, so defer them. */
1232 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1235 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1236 might be detached from their block and appear at toplevel when we reach
1237 here. We want to coalesce them with variables from other blocks when
1238 the immediate contribution to the frame size would be noticeable. */
1239 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1242 /* Variables declared in the outermost scope automatically conflict
1243 with every other variable. The only reason to want to defer them
1244 at all is that, after sorting, we can more efficiently pack
1245 small variables in the stack frame. Continue to defer at -O2. */
1246 if (toplevel && optimize < 2)
1249 /* Without optimization, *most* variables are allocated from the
1250 stack, which makes the quadratic problem large exactly when we
1251 want compilation to proceed as quickly as possible. On the
1252 other hand, we don't want the function's stack frame size to
1253 get completely out of hand. So we avoid adding scalars and
1254 "small" aggregates to the list at all. */
1255 if (optimize == 0 && smallish)
1261 /* A subroutine of expand_used_vars. Expand one variable according to
1262 its flavor. Variables to be placed on the stack are not actually
1263 expanded yet, merely recorded.
1264 When REALLY_EXPAND is false, only add stack values to be allocated.
1265 Return stack usage this variable is supposed to take.
1268 static HOST_WIDE_INT
1269 expand_one_var (tree var, bool toplevel, bool really_expand)
1271 unsigned int align = BITS_PER_UNIT;
1276 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1278 /* Because we don't know if VAR will be in register or on stack,
1279 we conservatively assume it will be on stack even if VAR is
1280 eventually put into register after RA pass. For non-automatic
1281 variables, which won't be on stack, we collect alignment of
1282 type and ignore user specified alignment. Similarly for
1283 SSA_NAMEs for which use_register_for_decl returns true. */
1284 if (TREE_STATIC (var)
1285 || DECL_EXTERNAL (var)
1286 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1287 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1288 TYPE_MODE (TREE_TYPE (var)),
1289 TYPE_ALIGN (TREE_TYPE (var)));
1290 else if (DECL_HAS_VALUE_EXPR_P (var)
1291 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1292 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1293 or variables which were assigned a stack slot already by
1294 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1295 changed from the offset chosen to it. */
1296 align = crtl->stack_alignment_estimated;
1298 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1300 /* If the variable alignment is very large we'll dynamicaly allocate
1301 it, which means that in-frame portion is just a pointer. */
1302 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1303 align = POINTER_SIZE;
1306 if (SUPPORTS_STACK_ALIGNMENT
1307 && crtl->stack_alignment_estimated < align)
1309 /* stack_alignment_estimated shouldn't change after stack
1310 realign decision made */
1311 gcc_assert (!crtl->stack_realign_processed);
1312 crtl->stack_alignment_estimated = align;
1315 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1316 So here we only make sure stack_alignment_needed >= align. */
1317 if (crtl->stack_alignment_needed < align)
1318 crtl->stack_alignment_needed = align;
1319 if (crtl->max_used_stack_slot_alignment < align)
1320 crtl->max_used_stack_slot_alignment = align;
1322 if (TREE_CODE (origvar) == SSA_NAME)
1324 gcc_assert (TREE_CODE (var) != VAR_DECL
1325 || (!DECL_EXTERNAL (var)
1326 && !DECL_HAS_VALUE_EXPR_P (var)
1327 && !TREE_STATIC (var)
1328 && TREE_TYPE (var) != error_mark_node
1329 && !DECL_HARD_REGISTER (var)
1332 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1334 else if (DECL_EXTERNAL (var))
1336 else if (DECL_HAS_VALUE_EXPR_P (var))
1338 else if (TREE_STATIC (var))
1340 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1342 else if (TREE_TYPE (var) == error_mark_node)
1345 expand_one_error_var (var);
1347 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1351 expand_one_hard_reg_var (var);
1352 if (!DECL_HARD_REGISTER (var))
1353 /* Invalid register specification. */
1354 expand_one_error_var (var);
1357 else if (use_register_for_decl (var))
1360 expand_one_register_var (origvar);
1362 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1364 /* Reject variables which cover more than half of the address-space. */
1367 error ("size of variable %q+D is too large", var);
1368 expand_one_error_var (var);
1371 else if (defer_stack_allocation (var, toplevel))
1372 add_stack_var (origvar);
1376 expand_one_stack_var (origvar);
1377 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1382 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1383 expanding variables. Those variables that can be put into registers
1384 are allocated pseudos; those that can't are put on the stack.
1386 TOPLEVEL is true if this is the outermost BLOCK. */
1389 expand_used_vars_for_block (tree block, bool toplevel)
1393 /* Expand all variables at this level. */
1394 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1396 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1397 || !DECL_NONSHAREABLE (t)))
1398 expand_one_var (t, toplevel, true);
1400 /* Expand all variables at containing levels. */
1401 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1402 expand_used_vars_for_block (t, false);
1405 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1406 and clear TREE_USED on all local variables. */
1409 clear_tree_used (tree block)
1413 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1414 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1415 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1416 || !DECL_NONSHAREABLE (t))
1419 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1420 clear_tree_used (t);
1424 SPCT_FLAG_DEFAULT = 1,
1426 SPCT_FLAG_STRONG = 3,
1427 SPCT_FLAG_EXPLICIT = 4
1430 /* Examine TYPE and determine a bit mask of the following features. */
1432 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1433 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1434 #define SPCT_HAS_ARRAY 4
1435 #define SPCT_HAS_AGGREGATE 8
1438 stack_protect_classify_type (tree type)
1440 unsigned int ret = 0;
1443 switch (TREE_CODE (type))
1446 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1447 if (t == char_type_node
1448 || t == signed_char_type_node
1449 || t == unsigned_char_type_node)
1451 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1452 unsigned HOST_WIDE_INT len;
1454 if (!TYPE_SIZE_UNIT (type)
1455 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1458 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1461 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1463 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1466 ret = SPCT_HAS_ARRAY;
1470 case QUAL_UNION_TYPE:
1472 ret = SPCT_HAS_AGGREGATE;
1473 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1474 if (TREE_CODE (t) == FIELD_DECL)
1475 ret |= stack_protect_classify_type (TREE_TYPE (t));
1485 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1486 part of the local stack frame. Remember if we ever return nonzero for
1487 any variable in this function. The return value is the phase number in
1488 which the variable should be allocated. */
1491 stack_protect_decl_phase (tree decl)
1493 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1496 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1497 has_short_buffer = true;
1499 if (flag_stack_protect == SPCT_FLAG_ALL
1500 || flag_stack_protect == SPCT_FLAG_STRONG
1501 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1502 && lookup_attribute ("stack_protect",
1503 DECL_ATTRIBUTES (current_function_decl))))
1505 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1506 && !(bits & SPCT_HAS_AGGREGATE))
1508 else if (bits & SPCT_HAS_ARRAY)
1512 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1515 has_protected_decls = true;
1520 /* Two helper routines that check for phase 1 and phase 2. These are used
1521 as callbacks for expand_stack_vars. */
1524 stack_protect_decl_phase_1 (size_t i)
1526 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1530 stack_protect_decl_phase_2 (size_t i)
1532 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1535 /* And helper function that checks for asan phase (with stack protector
1536 it is phase 3). This is used as callback for expand_stack_vars.
1537 Returns true if any of the vars in the partition need to be protected. */
1540 asan_decl_phase_3 (size_t i)
1544 if (asan_protect_stack_decl (stack_vars[i].decl))
1546 i = stack_vars[i].next;
1551 /* Ensure that variables in different stack protection phases conflict
1552 so that they are not merged and share the same stack slot. */
1555 add_stack_protection_conflicts (void)
1557 size_t i, j, n = stack_vars_num;
1558 unsigned char *phase;
1560 phase = XNEWVEC (unsigned char, n);
1561 for (i = 0; i < n; ++i)
1562 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1564 for (i = 0; i < n; ++i)
1566 unsigned char ph_i = phase[i];
1567 for (j = i + 1; j < n; ++j)
1568 if (ph_i != phase[j])
1569 add_stack_var_conflict (i, j);
1575 /* Create a decl for the guard at the top of the stack frame. */
1578 create_stack_guard (void)
1580 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1581 VAR_DECL, NULL, ptr_type_node);
1582 TREE_THIS_VOLATILE (guard) = 1;
1583 TREE_USED (guard) = 1;
1584 expand_one_stack_var (guard);
1585 crtl->stack_protect_guard = guard;
1588 /* Prepare for expanding variables. */
1590 init_vars_expansion (void)
1592 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1593 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1595 /* A map from decl to stack partition. */
1596 decl_to_stack_part = new hash_map<tree, size_t>;
1598 /* Initialize local stack smashing state. */
1599 has_protected_decls = false;
1600 has_short_buffer = false;
1603 /* Free up stack variable graph data. */
1605 fini_vars_expansion (void)
1607 bitmap_obstack_release (&stack_var_bitmap_obstack);
1609 XDELETEVEC (stack_vars);
1610 if (stack_vars_sorted)
1611 XDELETEVEC (stack_vars_sorted);
1613 stack_vars_sorted = NULL;
1614 stack_vars_alloc = stack_vars_num = 0;
1615 delete decl_to_stack_part;
1616 decl_to_stack_part = NULL;
1619 /* Make a fair guess for the size of the stack frame of the function
1620 in NODE. This doesn't have to be exact, the result is only used in
1621 the inline heuristics. So we don't want to run the full stack var
1622 packing algorithm (which is quadratic in the number of stack vars).
1623 Instead, we calculate the total size of all stack vars. This turns
1624 out to be a pretty fair estimate -- packing of stack vars doesn't
1625 happen very often. */
1628 estimated_stack_frame_size (struct cgraph_node *node)
1630 HOST_WIDE_INT size = 0;
1633 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1637 init_vars_expansion ();
1639 FOR_EACH_LOCAL_DECL (fn, i, var)
1640 if (auto_var_in_fn_p (var, fn->decl))
1641 size += expand_one_var (var, true, false);
1643 if (stack_vars_num > 0)
1645 /* Fake sorting the stack vars for account_stack_vars (). */
1646 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1647 for (i = 0; i < stack_vars_num; ++i)
1648 stack_vars_sorted[i] = i;
1649 size += account_stack_vars ();
1652 fini_vars_expansion ();
1657 /* Helper routine to check if a record or union contains an array field. */
1660 record_or_union_type_has_array_p (const_tree tree_type)
1662 tree fields = TYPE_FIELDS (tree_type);
1665 for (f = fields; f; f = DECL_CHAIN (f))
1666 if (TREE_CODE (f) == FIELD_DECL)
1668 tree field_type = TREE_TYPE (f);
1669 if (RECORD_OR_UNION_TYPE_P (field_type)
1670 && record_or_union_type_has_array_p (field_type))
1672 if (TREE_CODE (field_type) == ARRAY_TYPE)
1678 /* Check if the current function has local referenced variables that
1679 have their addresses taken, contain an array, or are arrays. */
1682 stack_protect_decl_p ()
1687 FOR_EACH_LOCAL_DECL (cfun, i, var)
1688 if (!is_global_var (var))
1690 tree var_type = TREE_TYPE (var);
1691 if (TREE_CODE (var) == VAR_DECL
1692 && (TREE_CODE (var_type) == ARRAY_TYPE
1693 || TREE_ADDRESSABLE (var)
1694 || (RECORD_OR_UNION_TYPE_P (var_type)
1695 && record_or_union_type_has_array_p (var_type))))
1701 /* Check if the current function has calls that use a return slot. */
1704 stack_protect_return_slot_p ()
1708 FOR_ALL_BB_FN (bb, cfun)
1709 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1710 !gsi_end_p (gsi); gsi_next (&gsi))
1712 gimple stmt = gsi_stmt (gsi);
1713 /* This assumes that calls to internal-only functions never
1714 use a return slot. */
1715 if (is_gimple_call (stmt)
1716 && !gimple_call_internal_p (stmt)
1717 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1718 gimple_call_fndecl (stmt)))
1724 /* Expand all variables used in the function. */
1727 expand_used_vars (void)
1729 tree var, outer_block = DECL_INITIAL (current_function_decl);
1730 vec<tree> maybe_local_decls = vNULL;
1731 rtx_insn *var_end_seq = NULL;
1734 bool gen_stack_protect_signal = false;
1736 /* Compute the phase of the stack frame for this function. */
1738 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1739 int off = STARTING_FRAME_OFFSET % align;
1740 frame_phase = off ? align - off : 0;
1743 /* Set TREE_USED on all variables in the local_decls. */
1744 FOR_EACH_LOCAL_DECL (cfun, i, var)
1745 TREE_USED (var) = 1;
1746 /* Clear TREE_USED on all variables associated with a block scope. */
1747 clear_tree_used (DECL_INITIAL (current_function_decl));
1749 init_vars_expansion ();
1751 if (targetm.use_pseudo_pic_reg ())
1752 pic_offset_table_rtx = gen_reg_rtx (Pmode);
1754 hash_map<tree, tree> ssa_name_decls;
1755 for (i = 0; i < SA.map->num_partitions; i++)
1757 tree var = partition_to_var (SA.map, i);
1759 gcc_assert (!virtual_operand_p (var));
1761 /* Assign decls to each SSA name partition, share decls for partitions
1762 we could have coalesced (those with the same type). */
1763 if (SSA_NAME_VAR (var) == NULL_TREE)
1765 tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1767 *slot = create_tmp_reg (TREE_TYPE (var));
1768 replace_ssa_name_symbol (var, *slot);
1771 /* Always allocate space for partitions based on VAR_DECLs. But for
1772 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1773 debug info, there is no need to do so if optimization is disabled
1774 because all the SSA_NAMEs based on these DECLs have been coalesced
1775 into a single partition, which is thus assigned the canonical RTL
1776 location of the DECLs. If in_lto_p, we can't rely on optimize,
1777 a function could be compiled with -O1 -flto first and only the
1778 link performed at -O0. */
1779 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1780 expand_one_var (var, true, true);
1781 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1783 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1784 contain the default def (representing the parm or result itself)
1785 we don't do anything here. But those which don't contain the
1786 default def (representing a temporary based on the parm/result)
1787 we need to allocate space just like for normal VAR_DECLs. */
1788 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1790 expand_one_var (var, true, true);
1791 gcc_assert (SA.partition_to_pseudo[i]);
1796 if (flag_stack_protect == SPCT_FLAG_STRONG)
1797 gen_stack_protect_signal
1798 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1800 /* At this point all variables on the local_decls with TREE_USED
1801 set are not associated with any block scope. Lay them out. */
1803 len = vec_safe_length (cfun->local_decls);
1804 FOR_EACH_LOCAL_DECL (cfun, i, var)
1806 bool expand_now = false;
1808 /* Expanded above already. */
1809 if (is_gimple_reg (var))
1811 TREE_USED (var) = 0;
1814 /* We didn't set a block for static or extern because it's hard
1815 to tell the difference between a global variable (re)declared
1816 in a local scope, and one that's really declared there to
1817 begin with. And it doesn't really matter much, since we're
1818 not giving them stack space. Expand them now. */
1819 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1822 /* Expand variables not associated with any block now. Those created by
1823 the optimizers could be live anywhere in the function. Those that
1824 could possibly have been scoped originally and detached from their
1825 block will have their allocation deferred so we coalesce them with
1826 others when optimization is enabled. */
1827 else if (TREE_USED (var))
1830 /* Finally, mark all variables on the list as used. We'll use
1831 this in a moment when we expand those associated with scopes. */
1832 TREE_USED (var) = 1;
1835 expand_one_var (var, true, true);
1838 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1840 rtx rtl = DECL_RTL_IF_SET (var);
1842 /* Keep artificial non-ignored vars in cfun->local_decls
1843 chain until instantiate_decls. */
1844 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1845 add_local_decl (cfun, var);
1846 else if (rtl == NULL_RTX)
1847 /* If rtl isn't set yet, which can happen e.g. with
1848 -fstack-protector, retry before returning from this
1850 maybe_local_decls.safe_push (var);
1854 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1856 +-----------------+-----------------+
1857 | ...processed... | ...duplicates...|
1858 +-----------------+-----------------+
1860 +-- LEN points here.
1862 We just want the duplicates, as those are the artificial
1863 non-ignored vars that we want to keep until instantiate_decls.
1864 Move them down and truncate the array. */
1865 if (!vec_safe_is_empty (cfun->local_decls))
1866 cfun->local_decls->block_remove (0, len);
1868 /* At this point, all variables within the block tree with TREE_USED
1869 set are actually used by the optimized function. Lay them out. */
1870 expand_used_vars_for_block (outer_block, true);
1872 if (stack_vars_num > 0)
1874 add_scope_conflicts ();
1876 /* If stack protection is enabled, we don't share space between
1877 vulnerable data and non-vulnerable data. */
1878 if (flag_stack_protect != 0
1879 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
1880 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1881 && lookup_attribute ("stack_protect",
1882 DECL_ATTRIBUTES (current_function_decl)))))
1883 add_stack_protection_conflicts ();
1885 /* Now that we have collected all stack variables, and have computed a
1886 minimal interference graph, attempt to save some stack space. */
1887 partition_stack_vars ();
1889 dump_stack_var_partition ();
1892 switch (flag_stack_protect)
1895 create_stack_guard ();
1898 case SPCT_FLAG_STRONG:
1899 if (gen_stack_protect_signal
1900 || cfun->calls_alloca || has_protected_decls
1901 || lookup_attribute ("stack_protect",
1902 DECL_ATTRIBUTES (current_function_decl)))
1903 create_stack_guard ();
1906 case SPCT_FLAG_DEFAULT:
1907 if (cfun->calls_alloca || has_protected_decls
1908 || lookup_attribute ("stack_protect",
1909 DECL_ATTRIBUTES (current_function_decl)))
1910 create_stack_guard ();
1913 case SPCT_FLAG_EXPLICIT:
1914 if (lookup_attribute ("stack_protect",
1915 DECL_ATTRIBUTES (current_function_decl)))
1916 create_stack_guard ();
1922 /* Assign rtl to each variable based on these partitions. */
1923 if (stack_vars_num > 0)
1925 struct stack_vars_data data;
1927 data.asan_vec = vNULL;
1928 data.asan_decl_vec = vNULL;
1929 data.asan_base = NULL_RTX;
1930 data.asan_alignb = 0;
1932 /* Reorder decls to be protected by iterating over the variables
1933 array multiple times, and allocating out of each phase in turn. */
1934 /* ??? We could probably integrate this into the qsort we did
1935 earlier, such that we naturally see these variables first,
1936 and thus naturally allocate things in the right order. */
1937 if (has_protected_decls)
1939 /* Phase 1 contains only character arrays. */
1940 expand_stack_vars (stack_protect_decl_phase_1, &data);
1942 /* Phase 2 contains other kinds of arrays. */
1943 if (flag_stack_protect == SPCT_FLAG_ALL
1944 || flag_stack_protect == SPCT_FLAG_STRONG
1945 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1946 && lookup_attribute ("stack_protect",
1947 DECL_ATTRIBUTES (current_function_decl))))
1948 expand_stack_vars (stack_protect_decl_phase_2, &data);
1951 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1952 /* Phase 3, any partitions that need asan protection
1953 in addition to phase 1 and 2. */
1954 expand_stack_vars (asan_decl_phase_3, &data);
1956 if (!data.asan_vec.is_empty ())
1958 HOST_WIDE_INT prev_offset = frame_offset;
1959 HOST_WIDE_INT offset, sz, redzonesz;
1960 redzonesz = ASAN_RED_ZONE_SIZE;
1961 sz = data.asan_vec[0] - prev_offset;
1962 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1963 && data.asan_alignb <= 4096
1964 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1965 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1966 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1968 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1969 data.asan_vec.safe_push (prev_offset);
1970 data.asan_vec.safe_push (offset);
1971 /* Leave space for alignment if STRICT_ALIGNMENT. */
1972 if (STRICT_ALIGNMENT)
1973 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1974 << ASAN_SHADOW_SHIFT)
1975 / BITS_PER_UNIT, 1);
1978 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1981 data.asan_vec.address (),
1982 data.asan_decl_vec.address (),
1983 data.asan_vec.length ());
1986 expand_stack_vars (NULL, &data);
1988 data.asan_vec.release ();
1989 data.asan_decl_vec.release ();
1992 fini_vars_expansion ();
1994 /* If there were any artificial non-ignored vars without rtl
1995 found earlier, see if deferred stack allocation hasn't assigned
1997 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1999 rtx rtl = DECL_RTL_IF_SET (var);
2001 /* Keep artificial non-ignored vars in cfun->local_decls
2002 chain until instantiate_decls. */
2003 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2004 add_local_decl (cfun, var);
2006 maybe_local_decls.release ();
2008 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2009 if (STACK_ALIGNMENT_NEEDED)
2011 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2012 if (!FRAME_GROWS_DOWNWARD)
2013 frame_offset += align - 1;
2014 frame_offset &= -align;
2021 /* If we need to produce a detailed dump, print the tree representation
2022 for STMT to the dump file. SINCE is the last RTX after which the RTL
2023 generated for STMT should have been appended. */
2026 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
2028 if (dump_file && (dump_flags & TDF_DETAILS))
2030 fprintf (dump_file, "\n;; ");
2031 print_gimple_stmt (dump_file, stmt, 0,
2032 TDF_SLIM | (dump_flags & TDF_LINENO));
2033 fprintf (dump_file, "\n");
2035 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2039 /* Maps the blocks that do not contain tree labels to rtx labels. */
2041 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2043 /* Returns the label_rtx expression for a label starting basic block BB. */
2046 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2048 gimple_stmt_iterator gsi;
2051 if (bb->flags & BB_RTL)
2052 return block_label (bb);
2054 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2058 /* Find the tree label if it is present. */
2060 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2064 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2068 lab = gimple_label_label (lab_stmt);
2069 if (DECL_NONLOCAL (lab))
2072 return label_rtx (lab);
2075 rtx_code_label *l = gen_label_rtx ();
2076 lab_rtx_for_bb->put (bb, l);
2081 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2082 of a basic block where we just expanded the conditional at the end,
2083 possibly clean up the CFG and instruction sequence. LAST is the
2084 last instruction before the just emitted jump sequence. */
2087 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2089 /* Special case: when jumpif decides that the condition is
2090 trivial it emits an unconditional jump (and the necessary
2091 barrier). But we still have two edges, the fallthru one is
2092 wrong. purge_dead_edges would clean this up later. Unfortunately
2093 we have to insert insns (and split edges) before
2094 find_many_sub_basic_blocks and hence before purge_dead_edges.
2095 But splitting edges might create new blocks which depend on the
2096 fact that if there are two edges there's no barrier. So the
2097 barrier would get lost and verify_flow_info would ICE. Instead
2098 of auditing all edge splitters to care for the barrier (which
2099 normally isn't there in a cleaned CFG), fix it here. */
2100 if (BARRIER_P (get_last_insn ()))
2104 /* Now, we have a single successor block, if we have insns to
2105 insert on the remaining edge we potentially will insert
2106 it at the end of this block (if the dest block isn't feasible)
2107 in order to avoid splitting the edge. This insertion will take
2108 place in front of the last jump. But we might have emitted
2109 multiple jumps (conditional and one unconditional) to the
2110 same destination. Inserting in front of the last one then
2111 is a problem. See PR 40021. We fix this by deleting all
2112 jumps except the last unconditional one. */
2113 insn = PREV_INSN (get_last_insn ());
2114 /* Make sure we have an unconditional jump. Otherwise we're
2116 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2117 for (insn = PREV_INSN (insn); insn != last;)
2119 insn = PREV_INSN (insn);
2120 if (JUMP_P (NEXT_INSN (insn)))
2122 if (!any_condjump_p (NEXT_INSN (insn)))
2124 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2125 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2127 delete_insn (NEXT_INSN (insn));
2133 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2134 Returns a new basic block if we've terminated the current basic
2135 block and created a new one. */
2138 expand_gimple_cond (basic_block bb, gcond *stmt)
2140 basic_block new_bb, dest;
2144 rtx_insn *last2, *last;
2145 enum tree_code code;
2148 code = gimple_cond_code (stmt);
2149 op0 = gimple_cond_lhs (stmt);
2150 op1 = gimple_cond_rhs (stmt);
2151 /* We're sometimes presented with such code:
2155 This would expand to two comparisons which then later might
2156 be cleaned up by combine. But some pattern matchers like if-conversion
2157 work better when there's only one compare, so make up for this
2158 here as special exception if TER would have made the same change. */
2160 && TREE_CODE (op0) == SSA_NAME
2161 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2162 && TREE_CODE (op1) == INTEGER_CST
2163 && ((gimple_cond_code (stmt) == NE_EXPR
2164 && integer_zerop (op1))
2165 || (gimple_cond_code (stmt) == EQ_EXPR
2166 && integer_onep (op1)))
2167 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2169 gimple second = SSA_NAME_DEF_STMT (op0);
2170 if (gimple_code (second) == GIMPLE_ASSIGN)
2172 enum tree_code code2 = gimple_assign_rhs_code (second);
2173 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2176 op0 = gimple_assign_rhs1 (second);
2177 op1 = gimple_assign_rhs2 (second);
2179 /* If jumps are cheap and the target does not support conditional
2180 compare, turn some more codes into jumpy sequences. */
2181 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2182 && targetm.gen_ccmp_first == NULL)
2184 if ((code2 == BIT_AND_EXPR
2185 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2186 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2187 || code2 == TRUTH_AND_EXPR)
2189 code = TRUTH_ANDIF_EXPR;
2190 op0 = gimple_assign_rhs1 (second);
2191 op1 = gimple_assign_rhs2 (second);
2193 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2195 code = TRUTH_ORIF_EXPR;
2196 op0 = gimple_assign_rhs1 (second);
2197 op1 = gimple_assign_rhs2 (second);
2203 last2 = last = get_last_insn ();
2205 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2206 set_curr_insn_location (gimple_location (stmt));
2208 /* These flags have no purpose in RTL land. */
2209 true_edge->flags &= ~EDGE_TRUE_VALUE;
2210 false_edge->flags &= ~EDGE_FALSE_VALUE;
2212 /* We can either have a pure conditional jump with one fallthru edge or
2213 two-way jump that needs to be decomposed into two basic blocks. */
2214 if (false_edge->dest == bb->next_bb)
2216 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2217 true_edge->probability);
2218 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2219 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2220 set_curr_insn_location (true_edge->goto_locus);
2221 false_edge->flags |= EDGE_FALLTHRU;
2222 maybe_cleanup_end_of_block (false_edge, last);
2225 if (true_edge->dest == bb->next_bb)
2227 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2228 false_edge->probability);
2229 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2230 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2231 set_curr_insn_location (false_edge->goto_locus);
2232 true_edge->flags |= EDGE_FALLTHRU;
2233 maybe_cleanup_end_of_block (true_edge, last);
2237 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2238 true_edge->probability);
2239 last = get_last_insn ();
2240 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2241 set_curr_insn_location (false_edge->goto_locus);
2242 emit_jump (label_rtx_for_bb (false_edge->dest));
2245 if (BARRIER_P (BB_END (bb)))
2246 BB_END (bb) = PREV_INSN (BB_END (bb));
2247 update_bb_for_insn (bb);
2249 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2250 dest = false_edge->dest;
2251 redirect_edge_succ (false_edge, new_bb);
2252 false_edge->flags |= EDGE_FALLTHRU;
2253 new_bb->count = false_edge->count;
2254 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2255 add_bb_to_loop (new_bb, bb->loop_father);
2256 new_edge = make_edge (new_bb, dest, 0);
2257 new_edge->probability = REG_BR_PROB_BASE;
2258 new_edge->count = new_bb->count;
2259 if (BARRIER_P (BB_END (new_bb)))
2260 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2261 update_bb_for_insn (new_bb);
2263 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2265 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2267 set_curr_insn_location (true_edge->goto_locus);
2268 true_edge->goto_locus = curr_insn_location ();
2274 /* Mark all calls that can have a transaction restart. */
2277 mark_transaction_restart_calls (gimple stmt)
2279 struct tm_restart_node dummy;
2280 tm_restart_node **slot;
2282 if (!cfun->gimple_df->tm_restart)
2286 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2289 struct tm_restart_node *n = *slot;
2290 tree list = n->label_or_list;
2293 for (insn = next_real_insn (get_last_insn ());
2295 insn = next_real_insn (insn))
2298 if (TREE_CODE (list) == LABEL_DECL)
2299 add_reg_note (insn, REG_TM, label_rtx (list));
2301 for (; list ; list = TREE_CHAIN (list))
2302 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2306 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2310 expand_call_stmt (gcall *stmt)
2312 tree exp, decl, lhs;
2316 if (gimple_call_internal_p (stmt))
2318 expand_internal_call (stmt);
2322 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2324 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2325 decl = gimple_call_fndecl (stmt);
2326 builtin_p = decl && DECL_BUILT_IN (decl);
2328 /* If this is not a builtin function, the function type through which the
2329 call is made may be different from the type of the function. */
2332 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2333 CALL_EXPR_FN (exp));
2335 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2336 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2338 for (i = 0; i < gimple_call_num_args (stmt); i++)
2340 tree arg = gimple_call_arg (stmt, i);
2342 /* TER addresses into arguments of builtin functions so we have a
2343 chance to infer more correct alignment information. See PR39954. */
2345 && TREE_CODE (arg) == SSA_NAME
2346 && (def = get_gimple_for_ssa_name (arg))
2347 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2348 arg = gimple_assign_rhs1 (def);
2349 CALL_EXPR_ARG (exp, i) = arg;
2352 if (gimple_has_side_effects (stmt))
2353 TREE_SIDE_EFFECTS (exp) = 1;
2355 if (gimple_call_nothrow_p (stmt))
2356 TREE_NOTHROW (exp) = 1;
2358 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2359 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2361 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2362 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2363 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2364 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2366 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2367 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2368 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2369 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2371 /* Ensure RTL is created for debug args. */
2372 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2374 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2379 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2381 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2382 expand_debug_expr (dtemp);
2386 lhs = gimple_call_lhs (stmt);
2388 expand_assignment (lhs, exp, false);
2390 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2392 mark_transaction_restart_calls (stmt);
2396 /* Generate RTL for an asm statement (explicit assembler code).
2397 STRING is a STRING_CST node containing the assembler code text,
2398 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2399 insn is volatile; don't optimize it. */
2402 expand_asm_loc (tree string, int vol, location_t locus)
2406 if (TREE_CODE (string) == ADDR_EXPR)
2407 string = TREE_OPERAND (string, 0);
2409 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2410 ggc_strdup (TREE_STRING_POINTER (string)),
2413 MEM_VOLATILE_P (body) = vol;
2418 /* Return the number of times character C occurs in string S. */
2420 n_occurrences (int c, const char *s)
2428 /* A subroutine of expand_asm_operands. Check that all operands have
2429 the same number of alternatives. Return true if so. */
2432 check_operand_nalternatives (tree outputs, tree inputs)
2434 if (outputs || inputs)
2436 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2438 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2441 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2443 error ("too many alternatives in %<asm%>");
2450 const char *constraint
2451 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2453 if (n_occurrences (',', constraint) != nalternatives)
2455 error ("operand constraints for %<asm%> differ "
2456 "in number of alternatives");
2460 if (TREE_CHAIN (tmp))
2461 tmp = TREE_CHAIN (tmp);
2463 tmp = next, next = 0;
2470 /* Check for overlap between registers marked in CLOBBERED_REGS and
2471 anything inappropriate in T. Emit error and return the register
2472 variable definition for error, NULL_TREE for ok. */
2475 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2477 /* Conflicts between asm-declared register variables and the clobber
2478 list are not allowed. */
2479 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2483 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2484 DECL_NAME (overlap));
2486 /* Reset registerness to stop multiple errors emitted for a single
2488 DECL_REGISTER (overlap) = 0;
2495 /* Generate RTL for an asm statement with arguments.
2496 STRING is the instruction template.
2497 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2498 Each output or input has an expression in the TREE_VALUE and
2499 a tree list in TREE_PURPOSE which in turn contains a constraint
2500 name in TREE_VALUE (or NULL_TREE) and a constraint string
2502 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2503 that is clobbered by this insn.
2505 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2506 should be the fallthru basic block of the asm goto.
2508 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2509 Some elements of OUTPUTS may be replaced with trees representing temporary
2510 values. The caller should copy those temporary values to the originally
2513 VOL nonzero means the insn is volatile; don't optimize it. */
2516 expand_asm_operands (tree string, tree outputs, tree inputs,
2517 tree clobbers, tree labels, basic_block fallthru_bb,
2518 int vol, location_t locus)
2520 rtvec argvec, constraintvec, labelvec;
2522 int ninputs = list_length (inputs);
2523 int noutputs = list_length (outputs);
2524 int nlabels = list_length (labels);
2527 HARD_REG_SET clobbered_regs;
2528 int clobber_conflict_found = 0;
2532 /* Vector of RTX's of evaluated output operands. */
2533 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2534 int *inout_opnum = XALLOCAVEC (int, noutputs);
2535 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2536 machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
2537 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2538 int old_generating_concat_p = generating_concat_p;
2539 rtx_code_label *fallthru_label = NULL;
2541 /* An ASM with no outputs needs to be treated as volatile, for now. */
2545 if (! check_operand_nalternatives (outputs, inputs))
2548 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2550 /* Collect constraints. */
2552 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2553 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2554 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2555 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2557 /* Sometimes we wish to automatically clobber registers across an asm.
2558 Case in point is when the i386 backend moved from cc0 to a hard reg --
2559 maintaining source-level compatibility means automatically clobbering
2560 the flags register. */
2561 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2563 /* Count the number of meaningful clobbered registers, ignoring what
2564 we would ignore later. */
2566 CLEAR_HARD_REG_SET (clobbered_regs);
2567 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2569 const char *regname;
2572 if (TREE_VALUE (tail) == error_mark_node)
2574 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2576 i = decode_reg_name_and_count (regname, &nregs);
2580 error ("unknown register name %qs in %<asm%>", regname);
2582 /* Mark clobbered registers. */
2587 for (reg = i; reg < i + nregs; reg++)
2591 /* Clobbering the PIC register is an error. */
2592 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2594 error ("PIC register clobbered by %qs in %<asm%>", regname);
2598 SET_HARD_REG_BIT (clobbered_regs, reg);
2603 /* First pass over inputs and outputs checks validity and sets
2604 mark_addressable if needed. */
2607 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2609 tree val = TREE_VALUE (tail);
2610 tree type = TREE_TYPE (val);
2611 const char *constraint;
2616 /* If there's an erroneous arg, emit no insn. */
2617 if (type == error_mark_node)
2620 /* Try to parse the output constraint. If that fails, there's
2621 no point in going further. */
2622 constraint = constraints[i];
2623 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2624 &allows_mem, &allows_reg, &is_inout))
2631 && REG_P (DECL_RTL (val))
2632 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2633 mark_addressable (val);
2640 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2642 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2646 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2648 bool allows_reg, allows_mem;
2649 const char *constraint;
2651 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2652 would get VOIDmode and that could cause a crash in reload. */
2653 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2656 constraint = constraints[i + noutputs];
2657 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2658 constraints, &allows_mem, &allows_reg))
2661 if (! allows_reg && allows_mem)
2662 mark_addressable (TREE_VALUE (tail));
2665 /* Second pass evaluates arguments. */
2667 /* Make sure stack is consistent for asm goto. */
2669 do_pending_stack_adjust ();
2672 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2674 tree val = TREE_VALUE (tail);
2675 tree type = TREE_TYPE (val);
2682 ok = parse_output_constraint (&constraints[i], i, ninputs,
2683 noutputs, &allows_mem, &allows_reg,
2687 /* If an output operand is not a decl or indirect ref and our constraint
2688 allows a register, make a temporary to act as an intermediate.
2689 Make the asm insn write into that, then our caller will copy it to
2690 the real output operand. Likewise for promoted variables. */
2692 generating_concat_p = 0;
2694 real_output_rtx[i] = NULL_RTX;
2695 if ((TREE_CODE (val) == INDIRECT_REF
2698 && (allows_mem || REG_P (DECL_RTL (val)))
2699 && ! (REG_P (DECL_RTL (val))
2700 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2704 op = expand_expr (val, NULL_RTX, VOIDmode,
2705 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2707 op = validize_mem (op);
2709 if (! allows_reg && !MEM_P (op))
2710 error ("output number %d not directly addressable", i);
2711 if ((! allows_mem && MEM_P (op))
2712 || GET_CODE (op) == CONCAT)
2714 real_output_rtx[i] = op;
2715 op = gen_reg_rtx (GET_MODE (op));
2717 emit_move_insn (op, real_output_rtx[i]);
2722 op = assign_temp (type, 0, 1);
2723 op = validize_mem (op);
2724 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2725 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2726 TREE_VALUE (tail) = make_tree (type, op);
2730 generating_concat_p = old_generating_concat_p;
2734 inout_mode[ninout] = TYPE_MODE (type);
2735 inout_opnum[ninout++] = i;
2738 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2739 clobber_conflict_found = 1;
2742 /* Make vectors for the expression-rtx, constraint strings,
2743 and named operands. */
2745 argvec = rtvec_alloc (ninputs);
2746 constraintvec = rtvec_alloc (ninputs);
2747 labelvec = rtvec_alloc (nlabels);
2749 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2750 : GET_MODE (output_rtx[0])),
2751 ggc_strdup (TREE_STRING_POINTER (string)),
2752 empty_string, 0, argvec, constraintvec,
2755 MEM_VOLATILE_P (body) = vol;
2757 /* Eval the inputs and put them into ARGVEC.
2758 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2760 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2762 bool allows_reg, allows_mem;
2763 const char *constraint;
2768 constraint = constraints[i + noutputs];
2769 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2770 constraints, &allows_mem, &allows_reg);
2773 generating_concat_p = 0;
2775 val = TREE_VALUE (tail);
2776 type = TREE_TYPE (val);
2777 /* EXPAND_INITIALIZER will not generate code for valid initializer
2778 constants, but will still generate code for other types of operand.
2779 This is the behavior we want for constant constraints. */
2780 op = expand_expr (val, NULL_RTX, VOIDmode,
2781 allows_reg ? EXPAND_NORMAL
2782 : allows_mem ? EXPAND_MEMORY
2783 : EXPAND_INITIALIZER);
2785 /* Never pass a CONCAT to an ASM. */
2786 if (GET_CODE (op) == CONCAT)
2787 op = force_reg (GET_MODE (op), op);
2788 else if (MEM_P (op))
2789 op = validize_mem (op);
2791 if (asm_operand_ok (op, constraint, NULL) <= 0)
2793 if (allows_reg && TYPE_MODE (type) != BLKmode)
2794 op = force_reg (TYPE_MODE (type), op);
2795 else if (!allows_mem)
2796 warning (0, "asm operand %d probably doesn%'t match constraints",
2798 else if (MEM_P (op))
2800 /* We won't recognize either volatile memory or memory
2801 with a queued address as available a memory_operand
2802 at this point. Ignore it: clearly this *is* a memory. */
2808 generating_concat_p = old_generating_concat_p;
2809 ASM_OPERANDS_INPUT (body, i) = op;
2811 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2812 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2813 ggc_strdup (constraints[i + noutputs]),
2816 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2817 clobber_conflict_found = 1;
2820 /* Protect all the operands from the queue now that they have all been
2823 generating_concat_p = 0;
2825 /* For in-out operands, copy output rtx to input rtx. */
2826 for (i = 0; i < ninout; i++)
2828 int j = inout_opnum[i];
2831 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2834 sprintf (buffer, "%d", j);
2835 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2836 = gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
2839 /* Copy labels to the vector. */
2840 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2843 /* If asm goto has any labels in the fallthru basic block, use
2844 a label that we emit immediately after the asm goto. Expansion
2845 may insert further instructions into the same basic block after
2846 asm goto and if we don't do this, insertion of instructions on
2847 the fallthru edge might misbehave. See PR58670. */
2849 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2851 if (fallthru_label == NULL_RTX)
2852 fallthru_label = gen_label_rtx ();
2856 r = label_rtx (TREE_VALUE (tail));
2857 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2860 generating_concat_p = old_generating_concat_p;
2862 /* Now, for each output, construct an rtx
2863 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2864 ARGVEC CONSTRAINTS OPNAMES))
2865 If there is more than one, put them inside a PARALLEL. */
2867 if (nlabels > 0 && nclobbers == 0)
2869 gcc_assert (noutputs == 0);
2870 emit_jump_insn (body);
2872 else if (noutputs == 0 && nclobbers == 0)
2874 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2877 else if (noutputs == 1 && nclobbers == 0)
2879 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2880 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2890 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2892 /* For each output operand, store a SET. */
2893 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2895 XVECEXP (body, 0, i)
2896 = gen_rtx_SET (VOIDmode,
2898 gen_rtx_ASM_OPERANDS
2899 (GET_MODE (output_rtx[i]),
2900 ggc_strdup (TREE_STRING_POINTER (string)),
2901 ggc_strdup (constraints[i]),
2902 i, argvec, constraintvec, labelvec, locus));
2904 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2907 /* If there are no outputs (but there are some clobbers)
2908 store the bare ASM_OPERANDS into the PARALLEL. */
2911 XVECEXP (body, 0, i++) = obody;
2913 /* Store (clobber REG) for each clobbered register specified. */
2915 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2917 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2919 int j = decode_reg_name_and_count (regname, &nregs);
2924 if (j == -3) /* `cc', which is not a register */
2927 if (j == -4) /* `memory', don't cache memory across asm */
2929 XVECEXP (body, 0, i++)
2930 = gen_rtx_CLOBBER (VOIDmode,
2933 gen_rtx_SCRATCH (VOIDmode)));
2937 /* Ignore unknown register, error already signaled. */
2941 for (reg = j; reg < j + nregs; reg++)
2943 /* Use QImode since that's guaranteed to clobber just
2945 clobbered_reg = gen_rtx_REG (QImode, reg);
2947 /* Do sanity check for overlap between clobbers and
2948 respectively input and outputs that hasn't been
2949 handled. Such overlap should have been detected and
2951 if (!clobber_conflict_found)
2955 /* We test the old body (obody) contents to avoid
2956 tripping over the under-construction body. */
2957 for (opno = 0; opno < noutputs; opno++)
2958 if (reg_overlap_mentioned_p (clobbered_reg,
2961 ("asm clobber conflict with output operand");
2963 for (opno = 0; opno < ninputs - ninout; opno++)
2964 if (reg_overlap_mentioned_p (clobbered_reg,
2965 ASM_OPERANDS_INPUT (obody,
2968 ("asm clobber conflict with input operand");
2971 XVECEXP (body, 0, i++)
2972 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2977 emit_jump_insn (body);
2983 emit_label (fallthru_label);
2985 /* For any outputs that needed reloading into registers, spill them
2986 back to where they belong. */
2987 for (i = 0; i < noutputs; ++i)
2988 if (real_output_rtx[i])
2989 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2991 crtl->has_asm_statement = 1;
2997 expand_asm_stmt (gasm *stmt)
3000 tree outputs, tail, t;
3004 tree str, out, in, cl, labels;
3005 location_t locus = gimple_location (stmt);
3006 basic_block fallthru_bb = NULL;
3008 /* Meh... convert the gimple asm operands into real tree lists.
3009 Eventually we should make all routines work on the vectors instead
3010 of relying on TREE_CHAIN. */
3012 n = gimple_asm_noutputs (stmt);
3015 t = out = gimple_asm_output_op (stmt, 0);
3016 for (i = 1; i < n; i++)
3017 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
3021 n = gimple_asm_ninputs (stmt);
3024 t = in = gimple_asm_input_op (stmt, 0);
3025 for (i = 1; i < n; i++)
3026 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
3030 n = gimple_asm_nclobbers (stmt);
3033 t = cl = gimple_asm_clobber_op (stmt, 0);
3034 for (i = 1; i < n; i++)
3035 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
3039 n = gimple_asm_nlabels (stmt);
3042 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3044 fallthru_bb = fallthru->dest;
3045 t = labels = gimple_asm_label_op (stmt, 0);
3046 for (i = 1; i < n; i++)
3047 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
3050 s = gimple_asm_string (stmt);
3051 str = build_string (strlen (s), s);
3053 if (gimple_asm_input_p (stmt))
3055 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
3060 noutputs = gimple_asm_noutputs (stmt);
3061 /* o[I] is the place that output number I should be written. */
3062 o = (tree *) alloca (noutputs * sizeof (tree));
3064 /* Record the contents of OUTPUTS before it is modified. */
3065 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3066 o[i] = TREE_VALUE (tail);
3068 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3069 OUTPUTS some trees for where the values were actually stored. */
3070 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
3071 gimple_asm_volatile_p (stmt), locus);
3073 /* Copy all the intermediate outputs into the specified outputs. */
3074 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3076 if (o[i] != TREE_VALUE (tail))
3078 expand_assignment (o[i], TREE_VALUE (tail), false);
3081 /* Restore the original value so that it's correct the next
3082 time we expand this function. */
3083 TREE_VALUE (tail) = o[i];
3088 /* Emit code to jump to the address
3089 specified by the pointer expression EXP. */
3092 expand_computed_goto (tree exp)
3094 rtx x = expand_normal (exp);
3096 do_pending_stack_adjust ();
3097 emit_indirect_jump (x);
3100 /* Generate RTL code for a `goto' statement with target label LABEL.
3101 LABEL should be a LABEL_DECL tree node that was or will later be
3102 defined with `expand_label'. */
3105 expand_goto (tree label)
3107 #ifdef ENABLE_CHECKING
3108 /* Check for a nonlocal goto to a containing function. Should have
3109 gotten translated to __builtin_nonlocal_goto. */
3110 tree context = decl_function_context (label);
3111 gcc_assert (!context || context == current_function_decl);
3114 emit_jump (label_rtx (label));
3117 /* Output a return with no value. */
3120 expand_null_return_1 (void)
3122 clear_pending_stack_adjust ();
3123 do_pending_stack_adjust ();
3124 emit_jump (return_label);
3127 /* Generate RTL to return from the current function, with no value.
3128 (That is, we do not do anything about returning any value.) */
3131 expand_null_return (void)
3133 /* If this function was declared to return a value, but we
3134 didn't, clobber the return registers so that they are not
3135 propagated live to the rest of the function. */
3136 clobber_return_register ();
3138 expand_null_return_1 ();
3141 /* Generate RTL to return from the current function, with value VAL. */
3144 expand_value_return (rtx val)
3146 /* Copy the value to the return location unless it's already there. */
3148 tree decl = DECL_RESULT (current_function_decl);
3149 rtx return_reg = DECL_RTL (decl);
3150 if (return_reg != val)
3152 tree funtype = TREE_TYPE (current_function_decl);
3153 tree type = TREE_TYPE (decl);
3154 int unsignedp = TYPE_UNSIGNED (type);
3155 machine_mode old_mode = DECL_MODE (decl);
3157 if (DECL_BY_REFERENCE (decl))
3158 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3160 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3162 if (mode != old_mode)
3163 val = convert_modes (mode, old_mode, val, unsignedp);
3165 if (GET_CODE (return_reg) == PARALLEL)
3166 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3168 emit_move_insn (return_reg, val);
3171 expand_null_return_1 ();
3174 /* Generate RTL to evaluate the expression RETVAL and return it
3175 from the current function. */
3178 expand_return (tree retval, tree bounds)
3185 /* If function wants no value, give it none. */
3186 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3188 expand_normal (retval);
3189 expand_null_return ();
3193 if (retval == error_mark_node)
3195 /* Treat this like a return of no value from a function that
3197 expand_null_return ();
3200 else if ((TREE_CODE (retval) == MODIFY_EXPR
3201 || TREE_CODE (retval) == INIT_EXPR)
3202 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3203 retval_rhs = TREE_OPERAND (retval, 1);
3205 retval_rhs = retval;
3207 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3209 /* Put returned bounds to the right place. */
3210 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3217 bnd = expand_normal (bounds);
3218 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3220 else if (REG_P (bounds_rtl))
3222 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3223 addr = gen_rtx_MEM (Pmode, addr);
3224 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3225 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3231 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3233 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3234 addr = gen_rtx_MEM (Pmode, addr);
3236 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3238 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3239 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3240 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3241 rtx bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3242 targetm.calls.store_returned_bounds (slot, bnd);
3246 else if (chkp_function_instrumented_p (current_function_decl)
3247 && !BOUNDED_P (retval_rhs)
3248 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3249 && TREE_CODE (retval_rhs) != RESULT_DECL)
3251 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3252 addr = gen_rtx_MEM (Pmode, addr);
3254 gcc_assert (MEM_P (result_rtl));
3256 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3259 /* If we are returning the RESULT_DECL, then the value has already
3260 been stored into it, so we don't have to do anything special. */
3261 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3262 expand_value_return (result_rtl);
3264 /* If the result is an aggregate that is being returned in one (or more)
3265 registers, load the registers here. */
3267 else if (retval_rhs != 0
3268 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3269 && REG_P (result_rtl))
3271 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3274 /* Use the mode of the result value on the return register. */
3275 PUT_MODE (result_rtl, GET_MODE (val));
3276 expand_value_return (val);
3279 expand_null_return ();
3281 else if (retval_rhs != 0
3282 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3283 && (REG_P (result_rtl)
3284 || (GET_CODE (result_rtl) == PARALLEL)))
3286 /* Compute the return value into a temporary (usually a pseudo reg). */
3288 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3289 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3290 val = force_not_mem (val);
3291 expand_value_return (val);
3295 /* No hard reg used; calculate value into hard return reg. */
3296 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3297 expand_value_return (result_rtl);
3301 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3302 STMT that doesn't require special handling for outgoing edges. That
3303 is no tailcalls and no GIMPLE_COND. */
3306 expand_gimple_stmt_1 (gimple stmt)
3310 set_curr_insn_location (gimple_location (stmt));
3312 switch (gimple_code (stmt))
3315 op0 = gimple_goto_dest (stmt);
3316 if (TREE_CODE (op0) == LABEL_DECL)
3319 expand_computed_goto (op0);
3322 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3325 case GIMPLE_PREDICT:
3328 expand_case (as_a <gswitch *> (stmt));
3331 expand_asm_stmt (as_a <gasm *> (stmt));
3334 expand_call_stmt (as_a <gcall *> (stmt));
3338 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3340 if (op0 && op0 != error_mark_node)
3342 tree result = DECL_RESULT (current_function_decl);
3344 /* If we are not returning the current function's RESULT_DECL,
3345 build an assignment to it. */
3348 /* I believe that a function's RESULT_DECL is unique. */
3349 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3351 /* ??? We'd like to use simply expand_assignment here,
3352 but this fails if the value is of BLKmode but the return
3353 decl is a register. expand_return has special handling
3354 for this combination, which eventually should move
3355 to common code. See comments there. Until then, let's
3356 build a modify expression :-/ */
3357 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3362 expand_null_return ();
3364 expand_return (op0, gimple_return_retbnd (stmt));
3369 gassign *assign_stmt = as_a <gassign *> (stmt);
3370 tree lhs = gimple_assign_lhs (assign_stmt);
3372 /* Tree expand used to fiddle with |= and &= of two bitfield
3373 COMPONENT_REFs here. This can't happen with gimple, the LHS
3374 of binary assigns must be a gimple reg. */
3376 if (TREE_CODE (lhs) != SSA_NAME
3377 || get_gimple_rhs_class (gimple_expr_code (stmt))
3378 == GIMPLE_SINGLE_RHS)
3380 tree rhs = gimple_assign_rhs1 (assign_stmt);
3381 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3382 == GIMPLE_SINGLE_RHS);
3383 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3384 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3385 if (TREE_CLOBBER_P (rhs))
3386 /* This is a clobber to mark the going out of scope for
3390 expand_assignment (lhs, rhs,
3391 gimple_assign_nontemporal_move_p (
3397 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3398 struct separate_ops ops;
3399 bool promoted = false;
3401 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3402 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3405 ops.code = gimple_assign_rhs_code (assign_stmt);
3406 ops.type = TREE_TYPE (lhs);
3407 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3409 case GIMPLE_TERNARY_RHS:
3410 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3412 case GIMPLE_BINARY_RHS:
3413 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3415 case GIMPLE_UNARY_RHS:
3416 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3421 ops.location = gimple_location (stmt);
3423 /* If we want to use a nontemporal store, force the value to
3424 register first. If we store into a promoted register,
3425 don't directly expand to target. */
3426 temp = nontemporal || promoted ? NULL_RTX : target;
3427 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3434 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3435 /* If TEMP is a VOIDmode constant, use convert_modes to make
3436 sure that we properly convert it. */
3437 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3439 temp = convert_modes (GET_MODE (target),
3440 TYPE_MODE (ops.type),
3442 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3443 GET_MODE (target), temp, unsignedp);
3446 convert_move (SUBREG_REG (target), temp, unsignedp);
3448 else if (nontemporal && emit_storent_insn (target, temp))
3452 temp = force_operand (temp, target);
3454 emit_move_insn (target, temp);
3465 /* Expand one gimple statement STMT and return the last RTL instruction
3466 before any of the newly generated ones.
3468 In addition to generating the necessary RTL instructions this also
3469 sets REG_EH_REGION notes if necessary and sets the current source
3470 location for diagnostics. */
3473 expand_gimple_stmt (gimple stmt)
3475 location_t saved_location = input_location;
3476 rtx_insn *last = get_last_insn ();
3481 /* We need to save and restore the current source location so that errors
3482 discovered during expansion are emitted with the right location. But
3483 it would be better if the diagnostic routines used the source location
3484 embedded in the tree nodes rather than globals. */
3485 if (gimple_has_location (stmt))
3486 input_location = gimple_location (stmt);
3488 expand_gimple_stmt_1 (stmt);
3490 /* Free any temporaries used to evaluate this statement. */
3493 input_location = saved_location;
3495 /* Mark all insns that may trap. */
3496 lp_nr = lookup_stmt_eh_lp (stmt);
3500 for (insn = next_real_insn (last); insn;
3501 insn = next_real_insn (insn))
3503 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3504 /* If we want exceptions for non-call insns, any
3505 may_trap_p instruction may throw. */
3506 && GET_CODE (PATTERN (insn)) != CLOBBER
3507 && GET_CODE (PATTERN (insn)) != USE
3508 && insn_could_throw_p (insn))
3509 make_reg_eh_region_note (insn, 0, lp_nr);
3516 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3517 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3518 generated a tail call (something that might be denied by the ABI
3519 rules governing the call; see calls.c).
3521 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3522 can still reach the rest of BB. The case here is __builtin_sqrt,
3523 where the NaN result goes through the external function (with a
3524 tailcall) and the normal result happens via a sqrt instruction. */
3527 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3529 rtx_insn *last2, *last;
3535 last2 = last = expand_gimple_stmt (stmt);
3537 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3538 if (CALL_P (last) && SIBLING_CALL_P (last))
3541 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3543 *can_fallthru = true;
3547 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3548 Any instructions emitted here are about to be deleted. */
3549 do_pending_stack_adjust ();
3551 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3552 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3553 EH or abnormal edges, we shouldn't have created a tail call in
3554 the first place. So it seems to me we should just be removing
3555 all edges here, or redirecting the existing fallthru edge to
3561 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3563 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3565 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3567 e->dest->count -= e->count;
3568 e->dest->frequency -= EDGE_FREQUENCY (e);
3569 if (e->dest->count < 0)
3571 if (e->dest->frequency < 0)
3572 e->dest->frequency = 0;
3575 probability += e->probability;
3582 /* This is somewhat ugly: the call_expr expander often emits instructions
3583 after the sibcall (to perform the function return). These confuse the
3584 find_many_sub_basic_blocks code, so we need to get rid of these. */
3585 last = NEXT_INSN (last);
3586 gcc_assert (BARRIER_P (last));
3588 *can_fallthru = false;
3589 while (NEXT_INSN (last))
3591 /* For instance an sqrt builtin expander expands if with
3592 sibcall in the then and label for `else`. */
3593 if (LABEL_P (NEXT_INSN (last)))
3595 *can_fallthru = true;
3598 delete_insn (NEXT_INSN (last));
3601 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3603 e->probability += probability;
3606 update_bb_for_insn (bb);
3608 if (NEXT_INSN (last))
3610 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3613 if (BARRIER_P (last))
3614 BB_END (bb) = PREV_INSN (last);
3617 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3622 /* Return the difference between the floor and the truncated result of
3623 a signed division by OP1 with remainder MOD. */
3625 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3627 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3628 return gen_rtx_IF_THEN_ELSE
3629 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3630 gen_rtx_IF_THEN_ELSE
3631 (mode, gen_rtx_LT (BImode,
3632 gen_rtx_DIV (mode, op1, mod),
3634 constm1_rtx, const0_rtx),
3638 /* Return the difference between the ceil and the truncated result of
3639 a signed division by OP1 with remainder MOD. */
3641 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3643 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3644 return gen_rtx_IF_THEN_ELSE
3645 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3646 gen_rtx_IF_THEN_ELSE
3647 (mode, gen_rtx_GT (BImode,
3648 gen_rtx_DIV (mode, op1, mod),
3650 const1_rtx, const0_rtx),
3654 /* Return the difference between the ceil and the truncated result of
3655 an unsigned division by OP1 with remainder MOD. */
3657 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3659 /* (mod != 0 ? 1 : 0) */
3660 return gen_rtx_IF_THEN_ELSE
3661 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3662 const1_rtx, const0_rtx);
3665 /* Return the difference between the rounded and the truncated result
3666 of a signed division by OP1 with remainder MOD. Halfway cases are
3667 rounded away from zero, rather than to the nearest even number. */
3669 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3671 /* (abs (mod) >= abs (op1) - abs (mod)
3672 ? (op1 / mod > 0 ? 1 : -1)
3674 return gen_rtx_IF_THEN_ELSE
3675 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3676 gen_rtx_MINUS (mode,
3677 gen_rtx_ABS (mode, op1),
3678 gen_rtx_ABS (mode, mod))),
3679 gen_rtx_IF_THEN_ELSE
3680 (mode, gen_rtx_GT (BImode,
3681 gen_rtx_DIV (mode, op1, mod),
3683 const1_rtx, constm1_rtx),
3687 /* Return the difference between the rounded and the truncated result
3688 of a unsigned division by OP1 with remainder MOD. Halfway cases
3689 are rounded away from zero, rather than to the nearest even
3692 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3694 /* (mod >= op1 - mod ? 1 : 0) */
3695 return gen_rtx_IF_THEN_ELSE
3696 (mode, gen_rtx_GE (BImode, mod,
3697 gen_rtx_MINUS (mode, op1, mod)),
3698 const1_rtx, const0_rtx);
3701 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3705 convert_debug_memory_address (machine_mode mode, rtx x,
3708 machine_mode xmode = GET_MODE (x);
3710 #ifndef POINTERS_EXTEND_UNSIGNED
3711 gcc_assert (mode == Pmode
3712 || mode == targetm.addr_space.address_mode (as));
3713 gcc_assert (xmode == mode || xmode == VOIDmode);
3717 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3719 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3722 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3723 x = simplify_gen_subreg (mode, x, xmode,
3724 subreg_lowpart_offset
3726 else if (POINTERS_EXTEND_UNSIGNED > 0)
3727 x = gen_rtx_ZERO_EXTEND (mode, x);
3728 else if (!POINTERS_EXTEND_UNSIGNED)
3729 x = gen_rtx_SIGN_EXTEND (mode, x);
3732 switch (GET_CODE (x))
3735 if ((SUBREG_PROMOTED_VAR_P (x)
3736 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3737 || (GET_CODE (SUBREG_REG (x)) == PLUS
3738 && REG_P (XEXP (SUBREG_REG (x), 0))
3739 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3740 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3741 && GET_MODE (SUBREG_REG (x)) == mode)
3742 return SUBREG_REG (x);
3745 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3746 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3749 temp = shallow_copy_rtx (x);
3750 PUT_MODE (temp, mode);
3753 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3755 temp = gen_rtx_CONST (mode, temp);
3759 if (CONST_INT_P (XEXP (x, 1)))
3761 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3763 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3769 /* Don't know how to express ptr_extend as operation in debug info. */
3772 #endif /* POINTERS_EXTEND_UNSIGNED */
3777 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3778 by avoid_deep_ter_for_debug. */
3780 static hash_map<tree, tree> *deep_ter_debug_map;
3782 /* Split too deep TER chains for debug stmts using debug temporaries. */
3785 avoid_deep_ter_for_debug (gimple stmt, int depth)
3787 use_operand_p use_p;
3789 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
3791 tree use = USE_FROM_PTR (use_p);
3792 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
3794 gimple g = get_gimple_for_ssa_name (use);
3797 if (depth > 6 && !stmt_ends_bb_p (g))
3799 if (deep_ter_debug_map == NULL)
3800 deep_ter_debug_map = new hash_map<tree, tree>;
3802 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
3805 vexpr = make_node (DEBUG_EXPR_DECL);
3806 gimple def_temp = gimple_build_debug_bind (vexpr, use, g);
3807 DECL_ARTIFICIAL (vexpr) = 1;
3808 TREE_TYPE (vexpr) = TREE_TYPE (use);
3809 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
3810 gimple_stmt_iterator gsi = gsi_for_stmt (g);
3811 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
3812 avoid_deep_ter_for_debug (def_temp, 0);
3815 avoid_deep_ter_for_debug (g, depth + 1);
3819 /* Return an RTX equivalent to the value of the parameter DECL. */
3822 expand_debug_parm_decl (tree decl)
3824 rtx incoming = DECL_INCOMING_RTL (decl);
3827 && GET_MODE (incoming) != BLKmode
3828 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3829 || (MEM_P (incoming)
3830 && REG_P (XEXP (incoming, 0))
3831 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3833 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3835 #ifdef HAVE_window_save
3836 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3837 If the target machine has an explicit window save instruction, the
3838 actual entry value is the corresponding OUTGOING_REGNO instead. */
3839 if (REG_P (incoming)
3840 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3842 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3843 OUTGOING_REGNO (REGNO (incoming)), 0);
3844 else if (MEM_P (incoming))
3846 rtx reg = XEXP (incoming, 0);
3847 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3849 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3850 incoming = replace_equiv_address_nv (incoming, reg);
3853 incoming = copy_rtx (incoming);
3857 ENTRY_VALUE_EXP (rtl) = incoming;
3862 && GET_MODE (incoming) != BLKmode
3863 && !TREE_ADDRESSABLE (decl)
3865 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3866 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3867 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3868 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3869 return copy_rtx (incoming);
3874 /* Return an RTX equivalent to the value of the tree expression EXP. */
3877 expand_debug_expr (tree exp)
3879 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3880 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3881 machine_mode inner_mode = VOIDmode;
3882 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3885 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3887 case tcc_expression:
3888 switch (TREE_CODE (exp))
3893 case WIDEN_MULT_PLUS_EXPR:
3894 case WIDEN_MULT_MINUS_EXPR:
3898 case TRUTH_ANDIF_EXPR:
3899 case TRUTH_ORIF_EXPR:
3900 case TRUTH_AND_EXPR:
3902 case TRUTH_XOR_EXPR:
3905 case TRUTH_NOT_EXPR:
3914 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3921 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3924 switch (TREE_CODE (exp))
3930 case WIDEN_LSHIFT_EXPR:
3931 /* Ensure second operand isn't wider than the first one. */
3932 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
3933 if (SCALAR_INT_MODE_P (inner_mode))
3935 machine_mode opmode = mode;
3936 if (VECTOR_MODE_P (mode))
3937 opmode = GET_MODE_INNER (mode);
3938 if (SCALAR_INT_MODE_P (opmode)
3939 && (GET_MODE_PRECISION (opmode)
3940 < GET_MODE_PRECISION (inner_mode)))
3941 op1 = simplify_gen_subreg (opmode, op1, inner_mode,
3942 subreg_lowpart_offset (opmode,
3953 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3954 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3959 case tcc_comparison:
3960 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
3968 case tcc_exceptional:
3969 case tcc_declaration:
3975 switch (TREE_CODE (exp))
3978 if (!lookup_constant_def (exp))
3980 if (strlen (TREE_STRING_POINTER (exp)) + 1
3981 != (size_t) TREE_STRING_LENGTH (exp))
3983 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3984 op0 = gen_rtx_MEM (BLKmode, op0);
3985 set_mem_attributes (op0, exp, 0);
3988 /* Fall through... */
3993 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3997 gcc_assert (COMPLEX_MODE_P (mode));
3998 op0 = expand_debug_expr (TREE_REALPART (exp));
3999 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4000 return gen_rtx_CONCAT (mode, op0, op1);
4002 case DEBUG_EXPR_DECL:
4003 op0 = DECL_RTL_IF_SET (exp);
4008 op0 = gen_rtx_DEBUG_EXPR (mode);
4009 DEBUG_EXPR_TREE_DECL (op0) = exp;
4010 SET_DECL_RTL (exp, op0);
4020 op0 = DECL_RTL_IF_SET (exp);
4022 /* This decl was probably optimized away. */
4025 if (TREE_CODE (exp) != VAR_DECL
4026 || DECL_EXTERNAL (exp)
4027 || !TREE_STATIC (exp)
4029 || DECL_HARD_REGISTER (exp)
4030 || DECL_IN_CONSTANT_POOL (exp)
4031 || mode == VOIDmode)
4034 op0 = make_decl_rtl_for_debug (exp);
4036 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4037 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4041 op0 = copy_rtx (op0);
4043 if (GET_MODE (op0) == BLKmode
4044 /* If op0 is not BLKmode, but mode is, adjust_mode
4045 below would ICE. While it is likely a FE bug,
4046 try to be robust here. See PR43166. */
4048 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4050 gcc_assert (MEM_P (op0));
4051 op0 = adjust_address_nv (op0, mode, 0);
4061 inner_mode = GET_MODE (op0);
4063 if (mode == inner_mode)
4066 if (inner_mode == VOIDmode)
4068 if (TREE_CODE (exp) == SSA_NAME)
4069 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4071 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4072 if (mode == inner_mode)
4076 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4078 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4079 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4080 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4081 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4083 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4085 else if (FLOAT_MODE_P (mode))
4087 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4088 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4089 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4091 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4093 else if (FLOAT_MODE_P (inner_mode))
4096 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4098 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4100 else if (CONSTANT_P (op0)
4101 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4102 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4103 subreg_lowpart_offset (mode,
4105 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
4106 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4108 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4110 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4116 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4118 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4119 TREE_OPERAND (exp, 0),
4120 TREE_OPERAND (exp, 1));
4122 return expand_debug_expr (newexp);
4126 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4127 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4131 if (TREE_CODE (exp) == MEM_REF)
4133 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4134 || (GET_CODE (op0) == PLUS
4135 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4136 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4137 Instead just use get_inner_reference. */
4140 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4141 if (!op1 || !CONST_INT_P (op1))
4144 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4147 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4149 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4151 if (op0 == NULL_RTX)
4154 op0 = gen_rtx_MEM (mode, op0);
4155 set_mem_attributes (op0, exp, 0);
4156 if (TREE_CODE (exp) == MEM_REF
4157 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4158 set_mem_expr (op0, NULL_TREE);
4159 set_mem_addr_space (op0, as);
4163 case TARGET_MEM_REF:
4164 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4165 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4168 op0 = expand_debug_expr
4169 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4173 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4174 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4176 if (op0 == NULL_RTX)
4179 op0 = gen_rtx_MEM (mode, op0);
4181 set_mem_attributes (op0, exp, 0);
4182 set_mem_addr_space (op0, as);
4188 case ARRAY_RANGE_REF:
4193 case VIEW_CONVERT_EXPR:
4196 HOST_WIDE_INT bitsize, bitpos;
4199 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4200 &mode1, &unsignedp, &volatilep, false);
4206 orig_op0 = op0 = expand_debug_expr (tem);
4213 machine_mode addrmode, offmode;
4218 op0 = XEXP (op0, 0);
4219 addrmode = GET_MODE (op0);
4220 if (addrmode == VOIDmode)
4223 op1 = expand_debug_expr (offset);
4227 offmode = GET_MODE (op1);
4228 if (offmode == VOIDmode)
4229 offmode = TYPE_MODE (TREE_TYPE (offset));
4231 if (addrmode != offmode)
4232 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4233 subreg_lowpart_offset (addrmode,
4236 /* Don't use offset_address here, we don't need a
4237 recognizable address, and we don't want to generate
4239 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4245 if (mode1 == VOIDmode)
4247 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4248 if (bitpos >= BITS_PER_UNIT)
4250 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4251 bitpos %= BITS_PER_UNIT;
4253 else if (bitpos < 0)
4256 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4257 op0 = adjust_address_nv (op0, mode1, units);
4258 bitpos += units * BITS_PER_UNIT;
4260 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4261 op0 = adjust_address_nv (op0, mode, 0);
4262 else if (GET_MODE (op0) != mode1)
4263 op0 = adjust_address_nv (op0, mode1, 0);
4265 op0 = copy_rtx (op0);
4266 if (op0 == orig_op0)
4267 op0 = shallow_copy_rtx (op0);
4268 set_mem_attributes (op0, exp, 0);
4271 if (bitpos == 0 && mode == GET_MODE (op0))
4277 if (GET_MODE (op0) == BLKmode)
4280 if ((bitpos % BITS_PER_UNIT) == 0
4281 && bitsize == GET_MODE_BITSIZE (mode1))
4283 machine_mode opmode = GET_MODE (op0);
4285 if (opmode == VOIDmode)
4286 opmode = TYPE_MODE (TREE_TYPE (tem));
4288 /* This condition may hold if we're expanding the address
4289 right past the end of an array that turned out not to
4290 be addressable (i.e., the address was only computed in
4291 debug stmts). The gen_subreg below would rightfully
4292 crash, and the address doesn't really exist, so just
4294 if (bitpos >= GET_MODE_BITSIZE (opmode))
4297 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4298 return simplify_gen_subreg (mode, op0, opmode,
4299 bitpos / BITS_PER_UNIT);
4302 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4303 && TYPE_UNSIGNED (TREE_TYPE (exp))
4305 : ZERO_EXTRACT, mode,
4306 GET_MODE (op0) != VOIDmode
4308 : TYPE_MODE (TREE_TYPE (tem)),
4309 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4313 return simplify_gen_unary (ABS, mode, op0, mode);
4316 return simplify_gen_unary (NEG, mode, op0, mode);
4319 return simplify_gen_unary (NOT, mode, op0, mode);
4322 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4324 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4327 case FIX_TRUNC_EXPR:
4328 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4331 case POINTER_PLUS_EXPR:
4332 /* For the rare target where pointers are not the same size as
4333 size_t, we need to check for mis-matched modes and correct
4336 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4337 && GET_MODE (op0) != GET_MODE (op1))
4339 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4340 /* If OP0 is a partial mode, then we must truncate, even if it has
4341 the same bitsize as OP1 as GCC's representation of partial modes
4343 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4344 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4345 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4348 /* We always sign-extend, regardless of the signedness of
4349 the operand, because the operand is always unsigned
4350 here even if the original C expression is signed. */
4351 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4356 return simplify_gen_binary (PLUS, mode, op0, op1);
4359 return simplify_gen_binary (MINUS, mode, op0, op1);
4362 return simplify_gen_binary (MULT, mode, op0, op1);
4365 case TRUNC_DIV_EXPR:
4366 case EXACT_DIV_EXPR:
4368 return simplify_gen_binary (UDIV, mode, op0, op1);
4370 return simplify_gen_binary (DIV, mode, op0, op1);
4372 case TRUNC_MOD_EXPR:
4373 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4375 case FLOOR_DIV_EXPR:
4377 return simplify_gen_binary (UDIV, mode, op0, op1);
4380 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4381 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4382 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4383 return simplify_gen_binary (PLUS, mode, div, adj);
4386 case FLOOR_MOD_EXPR:
4388 return simplify_gen_binary (UMOD, mode, op0, op1);
4391 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4392 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4393 adj = simplify_gen_unary (NEG, mode,
4394 simplify_gen_binary (MULT, mode, adj, op1),
4396 return simplify_gen_binary (PLUS, mode, mod, adj);
4402 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4403 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4404 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4405 return simplify_gen_binary (PLUS, mode, div, adj);
4409 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4410 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4411 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4412 return simplify_gen_binary (PLUS, mode, div, adj);
4418 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4419 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4420 adj = simplify_gen_unary (NEG, mode,
4421 simplify_gen_binary (MULT, mode, adj, op1),
4423 return simplify_gen_binary (PLUS, mode, mod, adj);
4427 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4428 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4429 adj = simplify_gen_unary (NEG, mode,
4430 simplify_gen_binary (MULT, mode, adj, op1),
4432 return simplify_gen_binary (PLUS, mode, mod, adj);
4435 case ROUND_DIV_EXPR:
4438 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4439 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4440 rtx adj = round_udiv_adjust (mode, mod, op1);
4441 return simplify_gen_binary (PLUS, mode, div, adj);
4445 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4446 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4447 rtx adj = round_sdiv_adjust (mode, mod, op1);
4448 return simplify_gen_binary (PLUS, mode, div, adj);
4451 case ROUND_MOD_EXPR:
4454 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4455 rtx adj = round_udiv_adjust (mode, mod, op1);
4456 adj = simplify_gen_unary (NEG, mode,
4457 simplify_gen_binary (MULT, mode, adj, op1),
4459 return simplify_gen_binary (PLUS, mode, mod, adj);
4463 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4464 rtx adj = round_sdiv_adjust (mode, mod, op1);
4465 adj = simplify_gen_unary (NEG, mode,
4466 simplify_gen_binary (MULT, mode, adj, op1),
4468 return simplify_gen_binary (PLUS, mode, mod, adj);
4472 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4476 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4478 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4481 return simplify_gen_binary (ROTATE, mode, op0, op1);
4484 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4487 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4490 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4493 case TRUTH_AND_EXPR:
4494 return simplify_gen_binary (AND, mode, op0, op1);
4498 return simplify_gen_binary (IOR, mode, op0, op1);
4501 case TRUTH_XOR_EXPR:
4502 return simplify_gen_binary (XOR, mode, op0, op1);
4504 case TRUTH_ANDIF_EXPR:
4505 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4507 case TRUTH_ORIF_EXPR:
4508 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4510 case TRUTH_NOT_EXPR:
4511 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4514 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4518 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4522 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4526 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4530 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4533 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4535 case UNORDERED_EXPR:
4536 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4539 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4542 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4545 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4548 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4551 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4554 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4557 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4560 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4563 gcc_assert (COMPLEX_MODE_P (mode));
4564 if (GET_MODE (op0) == VOIDmode)
4565 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4566 if (GET_MODE (op1) == VOIDmode)
4567 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4568 return gen_rtx_CONCAT (mode, op0, op1);
4571 if (GET_CODE (op0) == CONCAT)
4572 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4573 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4575 GET_MODE_INNER (mode)));
4578 machine_mode imode = GET_MODE_INNER (mode);
4583 re = adjust_address_nv (op0, imode, 0);
4584 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4588 machine_mode ifmode = int_mode_for_mode (mode);
4589 machine_mode ihmode = int_mode_for_mode (imode);
4591 if (ifmode == BLKmode || ihmode == BLKmode)
4593 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4596 re = gen_rtx_SUBREG (ifmode, re, 0);
4597 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4598 if (imode != ihmode)
4599 re = gen_rtx_SUBREG (imode, re, 0);
4600 im = copy_rtx (op0);
4602 im = gen_rtx_SUBREG (ifmode, im, 0);
4603 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4604 if (imode != ihmode)
4605 im = gen_rtx_SUBREG (imode, im, 0);
4607 im = gen_rtx_NEG (imode, im);
4608 return gen_rtx_CONCAT (mode, re, im);
4612 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4613 if (!op0 || !MEM_P (op0))
4615 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4616 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4617 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4618 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4619 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4620 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4622 if (handled_component_p (TREE_OPERAND (exp, 0)))
4624 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4626 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4627 &bitoffset, &bitsize, &maxsize);
4628 if ((TREE_CODE (decl) == VAR_DECL
4629 || TREE_CODE (decl) == PARM_DECL
4630 || TREE_CODE (decl) == RESULT_DECL)
4631 && (!TREE_ADDRESSABLE (decl)
4632 || target_for_debug_bind (decl))
4633 && (bitoffset % BITS_PER_UNIT) == 0
4635 && bitsize == maxsize)
4637 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4638 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4642 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4643 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4646 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4649 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4650 || (GET_CODE (op0) == PLUS
4651 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4652 && CONST_INT_P (XEXP (op0, 1)))))
4654 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4656 if (!op1 || !CONST_INT_P (op1))
4659 return plus_constant (mode, op0, INTVAL (op1));
4666 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4667 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4675 op0 = gen_rtx_CONCATN
4676 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4678 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4680 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4683 XVECEXP (op0, 0, i) = op1;
4690 if (TREE_CLOBBER_P (exp))
4692 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4697 op0 = gen_rtx_CONCATN
4698 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4700 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4702 op1 = expand_debug_expr (val);
4705 XVECEXP (op0, 0, i) = op1;
4708 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4710 op1 = expand_debug_expr
4711 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4716 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4717 XVECEXP (op0, 0, i) = op1;
4723 goto flag_unsupported;
4726 /* ??? Maybe handle some builtins? */
4731 gimple g = get_gimple_for_ssa_name (exp);
4735 if (deep_ter_debug_map)
4737 tree *slot = deep_ter_debug_map->get (exp);
4742 t = gimple_assign_rhs_to_tree (g);
4743 op0 = expand_debug_expr (t);
4749 int part = var_to_partition (SA.map, exp);
4751 if (part == NO_PARTITION)
4753 /* If this is a reference to an incoming value of parameter
4754 that is never used in the code or where the incoming
4755 value is never used in the code, use PARM_DECL's
4757 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4758 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4760 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4763 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4770 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4772 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4780 /* Vector stuff. For most of the codes we don't have rtl codes. */
4781 case REALIGN_LOAD_EXPR:
4782 case REDUC_MAX_EXPR:
4783 case REDUC_MIN_EXPR:
4784 case REDUC_PLUS_EXPR:
4786 case VEC_PACK_FIX_TRUNC_EXPR:
4787 case VEC_PACK_SAT_EXPR:
4788 case VEC_PACK_TRUNC_EXPR:
4789 case VEC_UNPACK_FLOAT_HI_EXPR:
4790 case VEC_UNPACK_FLOAT_LO_EXPR:
4791 case VEC_UNPACK_HI_EXPR:
4792 case VEC_UNPACK_LO_EXPR:
4793 case VEC_WIDEN_MULT_HI_EXPR:
4794 case VEC_WIDEN_MULT_LO_EXPR:
4795 case VEC_WIDEN_MULT_EVEN_EXPR:
4796 case VEC_WIDEN_MULT_ODD_EXPR:
4797 case VEC_WIDEN_LSHIFT_HI_EXPR:
4798 case VEC_WIDEN_LSHIFT_LO_EXPR:
4803 case ADDR_SPACE_CONVERT_EXPR:
4804 case FIXED_CONVERT_EXPR:
4806 case WITH_SIZE_EXPR:
4810 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4811 && SCALAR_INT_MODE_P (mode))
4814 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4816 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4819 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4821 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4823 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4824 return simplify_gen_binary (PLUS, mode, op0, op2);
4828 case WIDEN_MULT_EXPR:
4829 case WIDEN_MULT_PLUS_EXPR:
4830 case WIDEN_MULT_MINUS_EXPR:
4831 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4832 && SCALAR_INT_MODE_P (mode))
4834 inner_mode = GET_MODE (op0);
4835 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4836 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4838 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4839 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4840 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4842 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4843 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4844 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4846 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4847 return simplify_gen_binary (PLUS, mode, op0, op2);
4849 return simplify_gen_binary (MINUS, mode, op2, op0);
4853 case MULT_HIGHPART_EXPR:
4854 /* ??? Similar to the above. */
4857 case WIDEN_SUM_EXPR:
4858 case WIDEN_LSHIFT_EXPR:
4859 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4860 && SCALAR_INT_MODE_P (mode))
4863 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4865 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4867 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4868 ? ASHIFT : PLUS, mode, op0, op1);
4873 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4877 #ifdef ENABLE_CHECKING
4886 /* Return an RTX equivalent to the source bind value of the tree expression
4890 expand_debug_source_expr (tree exp)
4893 machine_mode mode = VOIDmode, inner_mode;
4895 switch (TREE_CODE (exp))
4899 mode = DECL_MODE (exp);
4900 op0 = expand_debug_parm_decl (exp);
4903 /* See if this isn't an argument that has been completely
4905 if (!DECL_RTL_SET_P (exp)
4906 && !DECL_INCOMING_RTL (exp)
4907 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4909 tree aexp = DECL_ORIGIN (exp);
4910 if (DECL_CONTEXT (aexp)
4911 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4913 vec<tree, va_gc> **debug_args;
4916 debug_args = decl_debug_args_lookup (current_function_decl);
4917 if (debug_args != NULL)
4919 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4922 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4932 if (op0 == NULL_RTX)
4935 inner_mode = GET_MODE (op0);
4936 if (mode == inner_mode)
4939 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4941 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4942 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4943 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4944 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4946 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4948 else if (FLOAT_MODE_P (mode))
4950 else if (FLOAT_MODE_P (inner_mode))
4952 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4953 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4955 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4957 else if (CONSTANT_P (op0)
4958 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4959 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4960 subreg_lowpart_offset (mode, inner_mode));
4961 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4962 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4964 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4969 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4970 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4971 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4974 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
4978 if (exp == NULL_RTX)
4981 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4986 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4987 rtx dval = make_debug_expr_from_rtl (exp);
4989 /* Emit a debug bind insn before INSN. */
4990 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4991 DEBUG_EXPR_TREE_DECL (dval), exp,
4992 VAR_INIT_STATUS_INITIALIZED);
4994 emit_debug_insn_before (bind, insn);
4999 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5001 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5002 switch (*format_ptr++)
5005 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5010 for (j = 0; j < XVECLEN (exp, i); j++)
5011 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5019 /* Expand the _LOCs in debug insns. We run this after expanding all
5020 regular insns, so that any variables referenced in the function
5021 will have their DECL_RTLs set. */
5024 expand_debug_locations (void)
5027 rtx_insn *last = get_last_insn ();
5028 int save_strict_alias = flag_strict_aliasing;
5030 /* New alias sets while setting up memory attributes cause
5031 -fcompare-debug failures, even though it doesn't bring about any
5033 flag_strict_aliasing = 0;
5035 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5036 if (DEBUG_INSN_P (insn))
5038 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5040 rtx_insn *prev_insn, *insn2;
5043 if (value == NULL_TREE)
5047 if (INSN_VAR_LOCATION_STATUS (insn)
5048 == VAR_INIT_STATUS_UNINITIALIZED)
5049 val = expand_debug_source_expr (value);
5050 /* The avoid_deep_ter_for_debug function inserts
5051 debug bind stmts after SSA_NAME definition, with the
5052 SSA_NAME as the whole bind location. Disable temporarily
5053 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5054 being defined in this DEBUG_INSN. */
5055 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5057 tree *slot = deep_ter_debug_map->get (value);
5060 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5065 val = expand_debug_expr (value);
5067 *slot = INSN_VAR_LOCATION_DECL (insn);
5070 val = expand_debug_expr (value);
5071 gcc_assert (last == get_last_insn ());
5075 val = gen_rtx_UNKNOWN_VAR_LOC ();
5078 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5080 gcc_assert (mode == GET_MODE (val)
5081 || (GET_MODE (val) == VOIDmode
5082 && (CONST_SCALAR_INT_P (val)
5083 || GET_CODE (val) == CONST_FIXED
5084 || GET_CODE (val) == LABEL_REF)));
5087 INSN_VAR_LOCATION_LOC (insn) = val;
5088 prev_insn = PREV_INSN (insn);
5089 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5090 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5093 flag_strict_aliasing = save_strict_alias;
5096 /* Performs swapping operands of commutative operations to expand
5097 the expensive one first. */
5100 reorder_operands (basic_block bb)
5102 unsigned int *lattice; /* Hold cost of each statement. */
5103 unsigned int i = 0, n = 0;
5104 gimple_stmt_iterator gsi;
5110 use_operand_p use_p;
5113 /* Compute cost of each statement using estimate_num_insns. */
5114 stmts = bb_seq (bb);
5115 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5117 stmt = gsi_stmt (gsi);
5118 if (!is_gimple_debug (stmt))
5119 gimple_set_uid (stmt, n++);
5121 lattice = XNEWVEC (unsigned int, n);
5122 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5125 stmt = gsi_stmt (gsi);
5126 if (is_gimple_debug (stmt))
5128 cost = estimate_num_insns (stmt, &eni_size_weights);
5130 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5132 tree use = USE_FROM_PTR (use_p);
5134 if (TREE_CODE (use) != SSA_NAME)
5136 def_stmt = get_gimple_for_ssa_name (use);
5139 lattice[i] += lattice[gimple_uid (def_stmt)];
5142 if (!is_gimple_assign (stmt)
5143 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5145 op0 = gimple_op (stmt, 1);
5146 op1 = gimple_op (stmt, 2);
5147 if (TREE_CODE (op0) != SSA_NAME
5148 || TREE_CODE (op1) != SSA_NAME)
5150 /* Swap operands if the second one is more expensive. */
5151 def0 = get_gimple_for_ssa_name (op0);
5152 def1 = get_gimple_for_ssa_name (op1);
5156 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5160 if (dump_file && (dump_flags & TDF_DETAILS))
5162 fprintf (dump_file, "Swap operands in stmt:\n");
5163 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5164 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5165 def0 ? lattice[gimple_uid (def0)] : 0,
5166 lattice[gimple_uid (def1)]);
5168 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5169 gimple_assign_rhs2_ptr (stmt));
5175 /* Expand basic block BB from GIMPLE trees to RTL. */
5178 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5180 gimple_stmt_iterator gsi;
5189 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5192 /* Note that since we are now transitioning from GIMPLE to RTL, we
5193 cannot use the gsi_*_bb() routines because they expect the basic
5194 block to be in GIMPLE, instead of RTL. Therefore, we need to
5195 access the BB sequence directly. */
5197 reorder_operands (bb);
5198 stmts = bb_seq (bb);
5199 bb->il.gimple.seq = NULL;
5200 bb->il.gimple.phi_nodes = NULL;
5201 rtl_profile_for_bb (bb);
5202 init_rtl_bb_info (bb);
5203 bb->flags |= BB_RTL;
5205 /* Remove the RETURN_EXPR if we may fall though to the exit
5207 gsi = gsi_last (stmts);
5208 if (!gsi_end_p (gsi)
5209 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5211 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5213 gcc_assert (single_succ_p (bb));
5214 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5216 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5217 && !gimple_return_retval (ret_stmt))
5219 gsi_remove (&gsi, false);
5220 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5224 gsi = gsi_start (stmts);
5225 if (!gsi_end_p (gsi))
5227 stmt = gsi_stmt (gsi);
5228 if (gimple_code (stmt) != GIMPLE_LABEL)
5232 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5236 last = get_last_insn ();
5240 expand_gimple_stmt (stmt);
5247 /* Java emits line number notes in the top of labels.
5248 ??? Make this go away once line number notes are obsoleted. */
5249 BB_HEAD (bb) = NEXT_INSN (last);
5250 if (NOTE_P (BB_HEAD (bb)))
5251 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5252 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5254 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5257 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5259 NOTE_BASIC_BLOCK (note) = bb;
5261 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5265 stmt = gsi_stmt (gsi);
5267 /* If this statement is a non-debug one, and we generate debug
5268 insns, then this one might be the last real use of a TERed
5269 SSA_NAME, but where there are still some debug uses further
5270 down. Expanding the current SSA name in such further debug
5271 uses by their RHS might lead to wrong debug info, as coalescing
5272 might make the operands of such RHS be placed into the same
5273 pseudo as something else. Like so:
5274 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5278 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5279 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5280 the write to a_2 would actually have clobbered the place which
5283 So, instead of that, we recognize the situation, and generate
5284 debug temporaries at the last real use of TERed SSA names:
5291 if (MAY_HAVE_DEBUG_INSNS
5293 && !is_gimple_debug (stmt))
5299 location_t sloc = curr_insn_location ();
5301 /* Look for SSA names that have their last use here (TERed
5302 names always have only one real use). */
5303 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5304 if ((def = get_gimple_for_ssa_name (op)))
5306 imm_use_iterator imm_iter;
5307 use_operand_p use_p;
5308 bool have_debug_uses = false;
5310 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5312 if (gimple_debug_bind_p (USE_STMT (use_p)))
5314 have_debug_uses = true;
5319 if (have_debug_uses)
5321 /* OP is a TERed SSA name, with DEF its defining
5322 statement, and where OP is used in further debug
5323 instructions. Generate a debug temporary, and
5324 replace all uses of OP in debug insns with that
5327 tree value = gimple_assign_rhs_to_tree (def);
5328 tree vexpr = make_node (DEBUG_EXPR_DECL);
5332 set_curr_insn_location (gimple_location (def));
5334 DECL_ARTIFICIAL (vexpr) = 1;
5335 TREE_TYPE (vexpr) = TREE_TYPE (value);
5337 mode = DECL_MODE (value);
5339 mode = TYPE_MODE (TREE_TYPE (value));
5340 DECL_MODE (vexpr) = mode;
5342 val = gen_rtx_VAR_LOCATION
5343 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5345 emit_debug_insn (val);
5347 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5349 if (!gimple_debug_bind_p (debugstmt))
5352 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5353 SET_USE (use_p, vexpr);
5355 update_stmt (debugstmt);
5359 set_curr_insn_location (sloc);
5362 currently_expanding_gimple_stmt = stmt;
5364 /* Expand this statement, then evaluate the resulting RTL and
5365 fixup the CFG accordingly. */
5366 if (gimple_code (stmt) == GIMPLE_COND)
5368 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5372 else if (gimple_debug_bind_p (stmt))
5374 location_t sloc = curr_insn_location ();
5375 gimple_stmt_iterator nsi = gsi;
5379 tree var = gimple_debug_bind_get_var (stmt);
5384 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5385 && TREE_CODE (var) != LABEL_DECL
5386 && !target_for_debug_bind (var))
5387 goto delink_debug_stmt;
5389 if (gimple_debug_bind_has_value_p (stmt))
5390 value = gimple_debug_bind_get_value (stmt);
5394 last = get_last_insn ();
5396 set_curr_insn_location (gimple_location (stmt));
5399 mode = DECL_MODE (var);
5401 mode = TYPE_MODE (TREE_TYPE (var));
5403 val = gen_rtx_VAR_LOCATION
5404 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5406 emit_debug_insn (val);
5408 if (dump_file && (dump_flags & TDF_DETAILS))
5410 /* We can't dump the insn with a TREE where an RTX
5412 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5413 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5414 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5418 /* In order not to generate too many debug temporaries,
5419 we delink all uses of debug statements we already expanded.
5420 Therefore debug statements between definition and real
5421 use of TERed SSA names will continue to use the SSA name,
5422 and not be replaced with debug temps. */
5423 delink_stmt_imm_use (stmt);
5427 if (gsi_end_p (nsi))
5429 stmt = gsi_stmt (nsi);
5430 if (!gimple_debug_bind_p (stmt))
5434 set_curr_insn_location (sloc);
5436 else if (gimple_debug_source_bind_p (stmt))
5438 location_t sloc = curr_insn_location ();
5439 tree var = gimple_debug_source_bind_get_var (stmt);
5440 tree value = gimple_debug_source_bind_get_value (stmt);
5444 last = get_last_insn ();
5446 set_curr_insn_location (gimple_location (stmt));
5448 mode = DECL_MODE (var);
5450 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5451 VAR_INIT_STATUS_UNINITIALIZED);
5453 emit_debug_insn (val);
5455 if (dump_file && (dump_flags & TDF_DETAILS))
5457 /* We can't dump the insn with a TREE where an RTX
5459 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5460 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5461 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5464 set_curr_insn_location (sloc);
5468 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5470 && gimple_call_tail_p (call_stmt)
5471 && disable_tail_calls)
5472 gimple_call_set_tail (call_stmt, false);
5474 if (call_stmt && gimple_call_tail_p (call_stmt))
5477 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5488 def_operand_p def_p;
5489 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5493 /* Ignore this stmt if it is in the list of
5494 replaceable expressions. */
5496 && bitmap_bit_p (SA.values,
5497 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5500 last = expand_gimple_stmt (stmt);
5501 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5506 currently_expanding_gimple_stmt = NULL;
5508 /* Expand implicit goto and convert goto_locus. */
5509 FOR_EACH_EDGE (e, ei, bb->succs)
5511 if (e->goto_locus != UNKNOWN_LOCATION)
5512 set_curr_insn_location (e->goto_locus);
5513 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5515 emit_jump (label_rtx_for_bb (e->dest));
5516 e->flags &= ~EDGE_FALLTHRU;
5520 /* Expanded RTL can create a jump in the last instruction of block.
5521 This later might be assumed to be a jump to successor and break edge insertion.
5522 We need to insert dummy move to prevent this. PR41440. */
5523 if (single_succ_p (bb)
5524 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5525 && (last = get_last_insn ())
5528 rtx dummy = gen_reg_rtx (SImode);
5529 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5532 do_pending_stack_adjust ();
5534 /* Find the block tail. The last insn in the block is the insn
5535 before a barrier and/or table jump insn. */
5536 last = get_last_insn ();
5537 if (BARRIER_P (last))
5538 last = PREV_INSN (last);
5539 if (JUMP_TABLE_DATA_P (last))
5540 last = PREV_INSN (PREV_INSN (last));
5543 update_bb_for_insn (bb);
5549 /* Create a basic block for initialization code. */
5552 construct_init_block (void)
5554 basic_block init_block, first_block;
5558 /* Multiple entry points not supported yet. */
5559 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5560 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5561 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5562 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5563 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5565 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5567 /* When entry edge points to first basic block, we don't need jump,
5568 otherwise we have to jump into proper target. */
5569 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5571 tree label = gimple_block_label (e->dest);
5573 emit_jump (label_rtx (label));
5577 flags = EDGE_FALLTHRU;
5579 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5581 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5582 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5583 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5584 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5587 first_block = e->dest;
5588 redirect_edge_succ (e, init_block);
5589 e = make_edge (init_block, first_block, flags);
5592 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5593 e->probability = REG_BR_PROB_BASE;
5594 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5596 update_bb_for_insn (init_block);
5600 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5601 found in the block tree. */
5604 set_block_levels (tree block, int level)
5608 BLOCK_NUMBER (block) = level;
5609 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5610 block = BLOCK_CHAIN (block);
5614 /* Create a block containing landing pads and similar stuff. */
5617 construct_exit_block (void)
5619 rtx_insn *head = get_last_insn ();
5621 basic_block exit_block;
5625 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5626 rtx_insn *orig_end = BB_END (prev_bb);
5628 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5630 /* Make sure the locus is set to the end of the function, so that
5631 epilogue line numbers and warnings are set properly. */
5632 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5633 input_location = cfun->function_end_locus;
5635 /* Generate rtl for function exit. */
5636 expand_function_end ();
5638 end = get_last_insn ();
5641 /* While emitting the function end we could move end of the last basic
5643 BB_END (prev_bb) = orig_end;
5644 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5645 head = NEXT_INSN (head);
5646 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5647 bb frequency counting will be confused. Any instructions before that
5648 label are emitted for the case where PREV_BB falls through into the
5649 exit block, so append those instructions to prev_bb in that case. */
5650 if (NEXT_INSN (head) != return_label)
5652 while (NEXT_INSN (head) != return_label)
5654 if (!NOTE_P (NEXT_INSN (head)))
5655 BB_END (prev_bb) = NEXT_INSN (head);
5656 head = NEXT_INSN (head);
5659 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5660 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5661 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5662 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5665 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5667 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5668 if (!(e->flags & EDGE_ABNORMAL))
5669 redirect_edge_succ (e, exit_block);
5674 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5675 e->probability = REG_BR_PROB_BASE;
5676 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5677 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5680 e->count -= e2->count;
5681 exit_block->count -= e2->count;
5682 exit_block->frequency -= EDGE_FREQUENCY (e2);
5686 if (exit_block->count < 0)
5687 exit_block->count = 0;
5688 if (exit_block->frequency < 0)
5689 exit_block->frequency = 0;
5690 update_bb_for_insn (exit_block);
5693 /* Helper function for discover_nonconstant_array_refs.
5694 Look for ARRAY_REF nodes with non-constant indexes and mark them
5698 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5699 void *data ATTRIBUTE_UNUSED)
5703 if (IS_TYPE_OR_DECL_P (t))
5705 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5707 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5708 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5709 && (!TREE_OPERAND (t, 2)
5710 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5711 || (TREE_CODE (t) == COMPONENT_REF
5712 && (!TREE_OPERAND (t,2)
5713 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5714 || TREE_CODE (t) == BIT_FIELD_REF
5715 || TREE_CODE (t) == REALPART_EXPR
5716 || TREE_CODE (t) == IMAGPART_EXPR
5717 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5718 || CONVERT_EXPR_P (t))
5719 t = TREE_OPERAND (t, 0);
5721 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5723 t = get_base_address (t);
5725 && DECL_MODE (t) != BLKmode)
5726 TREE_ADDRESSABLE (t) = 1;
5735 /* RTL expansion is not able to compile array references with variable
5736 offsets for arrays stored in single register. Discover such
5737 expressions and mark variables as addressable to avoid this
5741 discover_nonconstant_array_refs (void)
5744 gimple_stmt_iterator gsi;
5746 FOR_EACH_BB_FN (bb, cfun)
5747 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5749 gimple stmt = gsi_stmt (gsi);
5750 if (!is_gimple_debug (stmt))
5751 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5755 /* This function sets crtl->args.internal_arg_pointer to a virtual
5756 register if DRAP is needed. Local register allocator will replace
5757 virtual_incoming_args_rtx with the virtual register. */
5760 expand_stack_alignment (void)
5763 unsigned int preferred_stack_boundary;
5765 if (! SUPPORTS_STACK_ALIGNMENT)
5768 if (cfun->calls_alloca
5769 || cfun->has_nonlocal_label
5770 || crtl->has_nonlocal_goto)
5771 crtl->need_drap = true;
5773 /* Call update_stack_boundary here again to update incoming stack
5774 boundary. It may set incoming stack alignment to a different
5775 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5776 use the minimum incoming stack alignment to check if it is OK
5777 to perform sibcall optimization since sibcall optimization will
5778 only align the outgoing stack to incoming stack boundary. */
5779 if (targetm.calls.update_stack_boundary)
5780 targetm.calls.update_stack_boundary ();
5782 /* The incoming stack frame has to be aligned at least at
5783 parm_stack_boundary. */
5784 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5786 /* Update crtl->stack_alignment_estimated and use it later to align
5787 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5788 exceptions since callgraph doesn't collect incoming stack alignment
5790 if (cfun->can_throw_non_call_exceptions
5791 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5792 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5794 preferred_stack_boundary = crtl->preferred_stack_boundary;
5795 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5796 crtl->stack_alignment_estimated = preferred_stack_boundary;
5797 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5798 crtl->stack_alignment_needed = preferred_stack_boundary;
5800 gcc_assert (crtl->stack_alignment_needed
5801 <= crtl->stack_alignment_estimated);
5803 crtl->stack_realign_needed
5804 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5805 crtl->stack_realign_tried = crtl->stack_realign_needed;
5807 crtl->stack_realign_processed = true;
5809 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5811 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5812 drap_rtx = targetm.calls.get_drap_rtx ();
5814 /* stack_realign_drap and drap_rtx must match. */
5815 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5817 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5818 if (NULL != drap_rtx)
5820 crtl->args.internal_arg_pointer = drap_rtx;
5822 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5824 fixup_tail_calls ();
5830 expand_main_function (void)
5832 #if (defined(INVOKE__main) \
5833 || (!defined(HAS_INIT_SECTION) \
5834 && !defined(INIT_SECTION_ASM_OP) \
5835 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5836 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5841 /* Expand code to initialize the stack_protect_guard. This is invoked at
5842 the beginning of a function to be protected. */
5844 #ifndef HAVE_stack_protect_set
5845 # define HAVE_stack_protect_set 0
5846 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5850 stack_protect_prologue (void)
5852 tree guard_decl = targetm.stack_protect_guard ();
5855 x = expand_normal (crtl->stack_protect_guard);
5856 y = expand_normal (guard_decl);
5858 /* Allow the target to copy from Y to X without leaking Y into a
5860 if (HAVE_stack_protect_set)
5862 rtx insn = gen_stack_protect_set (x, y);
5870 /* Otherwise do a straight move. */
5871 emit_move_insn (x, y);
5874 /* Translate the intermediate representation contained in the CFG
5875 from GIMPLE trees to RTL.
5877 We do conversion per basic block and preserve/update the tree CFG.
5878 This implies we have to do some magic as the CFG can simultaneously
5879 consist of basic blocks containing RTL and GIMPLE trees. This can
5880 confuse the CFG hooks, so be careful to not manipulate CFG during
5885 const pass_data pass_data_expand =
5887 RTL_PASS, /* type */
5888 "expand", /* name */
5889 OPTGROUP_NONE, /* optinfo_flags */
5890 TV_EXPAND, /* tv_id */
5891 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5893 | PROP_gimple_lvec ), /* properties_required */
5894 PROP_rtl, /* properties_provided */
5895 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5896 0, /* todo_flags_start */
5897 0, /* todo_flags_finish */
5900 class pass_expand : public rtl_opt_pass
5903 pass_expand (gcc::context *ctxt)
5904 : rtl_opt_pass (pass_data_expand, ctxt)
5907 /* opt_pass methods: */
5908 virtual unsigned int execute (function *);
5910 }; // class pass_expand
5913 pass_expand::execute (function *fun)
5915 basic_block bb, init_block;
5919 rtx_insn *var_seq, *var_ret_seq;
5922 timevar_push (TV_OUT_OF_SSA);
5923 rewrite_out_of_ssa (&SA);
5924 timevar_pop (TV_OUT_OF_SSA);
5925 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5927 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
5929 gimple_stmt_iterator gsi;
5930 FOR_EACH_BB_FN (bb, cfun)
5931 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5932 if (gimple_debug_bind_p (gsi_stmt (gsi)))
5933 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
5936 /* Make sure all values used by the optimization passes have sane
5940 /* Some backends want to know that we are expanding to RTL. */
5941 currently_expanding_to_rtl = 1;
5942 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5943 free_dominance_info (CDI_DOMINATORS);
5945 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5947 if (chkp_function_instrumented_p (current_function_decl))
5948 chkp_reset_rtl_bounds ();
5950 insn_locations_init ();
5951 if (!DECL_IS_BUILTIN (current_function_decl))
5953 /* Eventually, all FEs should explicitly set function_start_locus. */
5954 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5955 set_curr_insn_location
5956 (DECL_SOURCE_LOCATION (current_function_decl));
5958 set_curr_insn_location (fun->function_start_locus);
5961 set_curr_insn_location (UNKNOWN_LOCATION);
5962 prologue_location = curr_insn_location ();
5964 #ifdef INSN_SCHEDULING
5965 init_sched_attrs ();
5968 /* Make sure first insn is a note even if we don't want linenums.
5969 This makes sure the first insn will never be deleted.
5970 Also, final expects a note to appear there. */
5971 emit_note (NOTE_INSN_DELETED);
5973 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5974 discover_nonconstant_array_refs ();
5976 targetm.expand_to_rtl_hook ();
5977 crtl->stack_alignment_needed = STACK_BOUNDARY;
5978 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5979 crtl->stack_alignment_estimated = 0;
5980 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5981 fun->cfg->max_jumptable_ents = 0;
5983 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5984 of the function section at exapnsion time to predict distance of calls. */
5985 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5987 /* Expand the variables recorded during gimple lowering. */
5988 timevar_push (TV_VAR_EXPAND);
5991 var_ret_seq = expand_used_vars ();
5993 var_seq = get_insns ();
5995 timevar_pop (TV_VAR_EXPAND);
5997 /* Honor stack protection warnings. */
5998 if (warn_stack_protect)
6000 if (fun->calls_alloca)
6001 warning (OPT_Wstack_protector,
6002 "stack protector not protecting local variables: "
6003 "variable length buffer");
6004 if (has_short_buffer && !crtl->stack_protect_guard)
6005 warning (OPT_Wstack_protector,
6006 "stack protector not protecting function: "
6007 "all local arrays are less than %d bytes long",
6008 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6011 /* Set up parameters and prepare for return, for the function. */
6012 expand_function_start (current_function_decl);
6014 /* If we emitted any instructions for setting up the variables,
6015 emit them before the FUNCTION_START note. */
6018 emit_insn_before (var_seq, parm_birth_insn);
6020 /* In expand_function_end we'll insert the alloca save/restore
6021 before parm_birth_insn. We've just insertted an alloca call.
6022 Adjust the pointer to match. */
6023 parm_birth_insn = var_seq;
6026 /* Now that we also have the parameter RTXs, copy them over to our
6028 for (i = 0; i < SA.map->num_partitions; i++)
6030 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
6032 if (TREE_CODE (var) != VAR_DECL
6033 && !SA.partition_to_pseudo[i])
6034 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
6035 gcc_assert (SA.partition_to_pseudo[i]);
6037 /* If this decl was marked as living in multiple places, reset
6038 this now to NULL. */
6039 if (DECL_RTL_IF_SET (var) == pc_rtx)
6040 SET_DECL_RTL (var, NULL);
6042 /* Some RTL parts really want to look at DECL_RTL(x) when x
6043 was a decl marked in REG_ATTR or MEM_ATTR. We could use
6044 SET_DECL_RTL here making this available, but that would mean
6045 to select one of the potentially many RTLs for one DECL. Instead
6046 of doing that we simply reset the MEM_EXPR of the RTL in question,
6047 then nobody can get at it and hence nobody can call DECL_RTL on it. */
6048 if (!DECL_RTL_SET_P (var))
6050 if (MEM_P (SA.partition_to_pseudo[i]))
6051 set_mem_expr (SA.partition_to_pseudo[i], NULL);
6055 /* If we have a class containing differently aligned pointers
6056 we need to merge those into the corresponding RTL pointer
6058 for (i = 1; i < num_ssa_names; i++)
6060 tree name = ssa_name (i);
6065 /* We might have generated new SSA names in
6066 update_alias_info_with_stack_vars. They will have a NULL
6067 defining statements, and won't be part of the partitioning,
6069 || !SSA_NAME_DEF_STMT (name))
6071 part = var_to_partition (SA.map, name);
6072 if (part == NO_PARTITION)
6075 /* Adjust all partition members to get the underlying decl of
6076 the representative which we might have created in expand_one_var. */
6077 if (SSA_NAME_VAR (name) == NULL_TREE)
6079 tree leader = partition_to_var (SA.map, part);
6080 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
6081 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
6083 if (!POINTER_TYPE_P (TREE_TYPE (name)))
6086 r = SA.partition_to_pseudo[part];
6088 mark_reg_pointer (r, get_pointer_alignment (name));
6091 /* If this function is `main', emit a call to `__main'
6092 to run global initializers, etc. */
6093 if (DECL_NAME (current_function_decl)
6094 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6095 && DECL_FILE_SCOPE_P (current_function_decl))
6096 expand_main_function ();
6098 /* Initialize the stack_protect_guard field. This must happen after the
6099 call to __main (if any) so that the external decl is initialized. */
6100 if (crtl->stack_protect_guard)
6101 stack_protect_prologue ();
6103 expand_phi_nodes (&SA);
6105 /* Register rtl specific functions for cfg. */
6106 rtl_register_cfg_hooks ();
6108 init_block = construct_init_block ();
6110 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6111 remaining edges later. */
6112 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6113 e->flags &= ~EDGE_EXECUTABLE;
6115 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6116 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6118 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6120 if (MAY_HAVE_DEBUG_INSNS)
6121 expand_debug_locations ();
6123 if (deep_ter_debug_map)
6125 delete deep_ter_debug_map;
6126 deep_ter_debug_map = NULL;
6129 /* Free stuff we no longer need after GIMPLE optimizations. */
6130 free_dominance_info (CDI_DOMINATORS);
6131 free_dominance_info (CDI_POST_DOMINATORS);
6132 delete_tree_cfg_annotations ();
6134 timevar_push (TV_OUT_OF_SSA);
6135 finish_out_of_ssa (&SA);
6136 timevar_pop (TV_OUT_OF_SSA);
6138 timevar_push (TV_POST_EXPAND);
6139 /* We are no longer in SSA form. */
6140 fun->gimple_df->in_ssa_p = false;
6141 loops_state_clear (LOOP_CLOSED_SSA);
6143 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6144 conservatively to true until they are all profile aware. */
6145 delete lab_rtx_for_bb;
6148 construct_exit_block ();
6149 insn_locations_finalize ();
6153 rtx_insn *after = return_label;
6154 rtx_insn *next = NEXT_INSN (after);
6155 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6157 emit_insn_after (var_ret_seq, after);
6160 /* Zap the tree EH table. */
6161 set_eh_throw_stmt_table (fun, NULL);
6163 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6164 split edges which edge insertions might do. */
6165 rebuild_jump_labels (get_insns ());
6167 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6168 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6172 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6176 rebuild_jump_labels_chain (e->insns.r);
6177 /* Put insns after parm birth, but before
6178 NOTE_INSNS_FUNCTION_BEG. */
6179 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6180 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6182 rtx_insn *insns = e->insns.r;
6184 if (NOTE_P (parm_birth_insn)
6185 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6186 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6188 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6191 commit_one_edge_insertion (e);
6198 /* We're done expanding trees to RTL. */
6199 currently_expanding_to_rtl = 0;
6201 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6202 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6206 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6208 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6209 e->flags &= ~EDGE_EXECUTABLE;
6211 /* At the moment not all abnormal edges match the RTL
6212 representation. It is safe to remove them here as
6213 find_many_sub_basic_blocks will rediscover them.
6214 In the future we should get this fixed properly. */
6215 if ((e->flags & EDGE_ABNORMAL)
6216 && !(e->flags & EDGE_SIBCALL))
6223 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6224 bitmap_ones (blocks);
6225 find_many_sub_basic_blocks (blocks);
6226 sbitmap_free (blocks);
6227 purge_all_dead_edges ();
6229 expand_stack_alignment ();
6231 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6233 if (crtl->tail_call_emit)
6234 fixup_tail_calls ();
6236 /* After initial rtl generation, call back to finish generating
6237 exception support code. We need to do this before cleaning up
6238 the CFG as the code does not expect dead landing pads. */
6239 if (fun->eh->region_tree != NULL)
6240 finish_eh_generation ();
6242 /* Remove unreachable blocks, otherwise we cannot compute dominators
6243 which are needed for loop state verification. As a side-effect
6244 this also compacts blocks.
6245 ??? We cannot remove trivially dead insns here as for example
6246 the DRAP reg on i?86 is not magically live at this point.
6247 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6248 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6250 #ifdef ENABLE_CHECKING
6251 verify_flow_info ();
6254 /* Initialize pseudos allocated for hard registers. */
6255 emit_initial_value_sets ();
6257 /* And finally unshare all RTL. */
6260 /* There's no need to defer outputting this function any more; we
6261 know we want to output it. */
6262 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6264 /* Now that we're done expanding trees to RTL, we shouldn't have any
6265 more CONCATs anywhere. */
6266 generating_concat_p = 0;
6271 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6272 /* And the pass manager will dump RTL for us. */
6275 /* If we're emitting a nested function, make sure its parent gets
6276 emitted as well. Doing otherwise confuses debug info. */
6279 for (parent = DECL_CONTEXT (current_function_decl);
6280 parent != NULL_TREE;
6281 parent = get_containing_scope (parent))
6282 if (TREE_CODE (parent) == FUNCTION_DECL)
6283 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6286 /* We are now committed to emitting code for this function. Do any
6287 preparation, such as emitting abstract debug info for the inline
6288 before it gets mangled by optimization. */
6289 if (cgraph_function_possibly_inlined_p (current_function_decl))
6290 (*debug_hooks->outlining_inline_function) (current_function_decl);
6292 TREE_ASM_WRITTEN (current_function_decl) = 1;
6294 /* After expanding, the return labels are no longer needed. */
6295 return_label = NULL;
6296 naked_return_label = NULL;
6298 /* After expanding, the tm_restart map is no longer needed. */
6299 if (fun->gimple_df->tm_restart)
6300 fun->gimple_df->tm_restart = NULL;
6302 /* Tag the blocks with a depth number so that change_scope can find
6303 the common parent easily. */
6304 set_block_levels (DECL_INITIAL (fun->decl), 0);
6305 default_rtl_profile ();
6307 timevar_pop (TV_POST_EXPAND);
6315 make_pass_expand (gcc::context *ctxt)
6317 return new pass_expand (ctxt);