1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
58 #include "integrate.h"
59 #include "langhooks.h"
61 #include "cfglayout.h"
63 #include "tree-pass.h"
69 /* So we can assign to cfun in this file. */
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
133 static VEC(int,heap) *sibcall_epilogue;
136 htab_t types_used_by_vars_hash = NULL;
137 tree types_used_by_cur_var_decl = NULL;
139 /* Forward declarations. */
141 static struct temp_slot *find_temp_slot_from_address (rtx);
142 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
143 static void pad_below (struct args_size *, enum machine_mode, tree);
144 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
145 static int all_blocks (tree, tree *);
146 static tree *get_block_vector (tree, int *);
147 extern tree debug_find_var_in_block_tree (tree, tree);
148 /* We always define `record_insns' even if it's not used so that we
149 can always export `prologue_epilogue_contains'. */
150 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
151 static int contains (const_rtx, VEC(int,heap) **);
153 static void emit_return_into_block (basic_block);
155 static void prepare_function_start (void);
156 static void do_clobber_return_reg (rtx, void *);
157 static void do_use_return_reg (rtx, void *);
158 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
160 /* Stack of nested functions. */
161 /* Keep track of the cfun stack. */
163 typedef struct function *function_p;
165 DEF_VEC_P(function_p);
166 DEF_VEC_ALLOC_P(function_p,heap);
167 static VEC(function_p,heap) *function_context_stack;
169 /* Save the current context for compilation of a nested function.
170 This is called from language-specific code. */
173 push_function_context (void)
176 allocate_struct_function (NULL, false);
178 VEC_safe_push (function_p, heap, function_context_stack, cfun);
182 /* Restore the last saved context, at the end of a nested function.
183 This function is called from language-specific code. */
186 pop_function_context (void)
188 struct function *p = VEC_pop (function_p, function_context_stack);
190 current_function_decl = p->decl;
192 /* Reset variables that have known state during rtx generation. */
193 virtuals_instantiated = 0;
194 generating_concat_p = 1;
197 /* Clear out all parts of the state in F that can safely be discarded
198 after the function has been parsed, but not compiled, to let
199 garbage collection reclaim the memory. */
202 free_after_parsing (struct function *f)
207 /* Clear out all parts of the state in F that can safely be discarded
208 after the function has been compiled, to let garbage collection
209 reclaim the memory. */
212 free_after_compilation (struct function *f)
214 VEC_free (int, heap, prologue);
215 VEC_free (int, heap, epilogue);
216 VEC_free (int, heap, sibcall_epilogue);
217 if (crtl->emit.regno_pointer_align)
218 free (crtl->emit.regno_pointer_align);
220 memset (crtl, 0, sizeof (struct rtl_data));
225 regno_reg_rtx = NULL;
226 insn_locators_free ();
229 /* Return size needed for stack frame based on slots so far allocated.
230 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
231 the caller may have to do that. */
234 get_frame_size (void)
236 if (FRAME_GROWS_DOWNWARD)
237 return -frame_offset;
242 /* Issue an error message and return TRUE if frame OFFSET overflows in
243 the signed target pointer arithmetics for function FUNC. Otherwise
247 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
249 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
251 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
252 /* Leave room for the fixed part of the frame. */
253 - 64 * UNITS_PER_WORD)
255 error ("%Jtotal size of local objects too large", func);
262 /* Return stack slot alignment in bits for TYPE and MODE. */
265 get_stack_local_alignment (tree type, enum machine_mode mode)
267 unsigned int alignment;
270 alignment = BIGGEST_ALIGNMENT;
272 alignment = GET_MODE_ALIGNMENT (mode);
274 /* Allow the frond-end to (possibly) increase the alignment of this
277 type = lang_hooks.types.type_for_mode (mode, 0);
279 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
282 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
283 with machine mode MODE.
285 ALIGN controls the amount of alignment for the address of the slot:
286 0 means according to MODE,
287 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
288 -2 means use BITS_PER_UNIT,
289 positive specifies alignment boundary in bits.
291 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment.
293 We do not round to stack_boundary here. */
296 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
298 bool reduce_alignment_ok ATTRIBUTE_UNUSED)
301 int bigend_correction = 0;
302 unsigned int alignment, alignment_in_bits;
303 int frame_off, frame_alignment, frame_phase;
307 alignment = get_stack_local_alignment (NULL, mode);
308 alignment /= BITS_PER_UNIT;
310 else if (align == -1)
312 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
313 size = CEIL_ROUND (size, alignment);
315 else if (align == -2)
316 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
318 alignment = align / BITS_PER_UNIT;
320 alignment_in_bits = alignment * BITS_PER_UNIT;
322 if (FRAME_GROWS_DOWNWARD)
323 frame_offset -= size;
325 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
326 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
328 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
329 alignment = alignment_in_bits / BITS_PER_UNIT;
332 if (SUPPORTS_STACK_ALIGNMENT)
334 if (crtl->stack_alignment_estimated < alignment_in_bits)
336 if (!crtl->stack_realign_processed)
337 crtl->stack_alignment_estimated = alignment_in_bits;
340 /* If stack is realigned and stack alignment value
341 hasn't been finalized, it is OK not to increase
342 stack_alignment_estimated. The bigger alignment
343 requirement is recorded in stack_alignment_needed
345 gcc_assert (!crtl->stack_realign_finalized);
346 if (!crtl->stack_realign_needed)
348 /* It is OK to reduce the alignment as long as the
349 requested size is 0 or the estimated stack
350 alignment >= mode alignment. */
351 gcc_assert (reduce_alignment_ok
353 || (crtl->stack_alignment_estimated
354 >= GET_MODE_ALIGNMENT (mode)));
355 alignment_in_bits = crtl->stack_alignment_estimated;
356 alignment = alignment_in_bits / BITS_PER_UNIT;
362 if (crtl->stack_alignment_needed < alignment_in_bits)
363 crtl->stack_alignment_needed = alignment_in_bits;
364 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
365 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
367 /* Calculate how many bytes the start of local variables is off from
369 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
370 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
371 frame_phase = frame_off ? frame_alignment - frame_off : 0;
373 /* Round the frame offset to the specified alignment. The default is
374 to always honor requests to align the stack but a port may choose to
375 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
376 if (STACK_ALIGNMENT_NEEDED
380 /* We must be careful here, since FRAME_OFFSET might be negative and
381 division with a negative dividend isn't as well defined as we might
382 like. So we instead assume that ALIGNMENT is a power of two and
383 use logical operations which are unambiguous. */
384 if (FRAME_GROWS_DOWNWARD)
386 = (FLOOR_ROUND (frame_offset - frame_phase,
387 (unsigned HOST_WIDE_INT) alignment)
391 = (CEIL_ROUND (frame_offset - frame_phase,
392 (unsigned HOST_WIDE_INT) alignment)
396 /* On a big-endian machine, if we are allocating more space than we will use,
397 use the least significant bytes of those that are allocated. */
398 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
399 bigend_correction = size - GET_MODE_SIZE (mode);
401 /* If we have already instantiated virtual registers, return the actual
402 address relative to the frame pointer. */
403 if (virtuals_instantiated)
404 addr = plus_constant (frame_pointer_rtx,
406 (frame_offset + bigend_correction
407 + STARTING_FRAME_OFFSET, Pmode));
409 addr = plus_constant (virtual_stack_vars_rtx,
411 (frame_offset + bigend_correction,
414 if (!FRAME_GROWS_DOWNWARD)
415 frame_offset += size;
417 x = gen_rtx_MEM (mode, addr);
418 set_mem_align (x, alignment_in_bits);
419 MEM_NOTRAP_P (x) = 1;
422 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
424 if (frame_offset_overflow (frame_offset, current_function_decl))
430 /* Wrap up assign_stack_local_1 with last parameter as false. */
433 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
435 return assign_stack_local_1 (mode, size, align, false);
439 /* In order to evaluate some expressions, such as function calls returning
440 structures in memory, we need to temporarily allocate stack locations.
441 We record each allocated temporary in the following structure.
443 Associated with each temporary slot is a nesting level. When we pop up
444 one level, all temporaries associated with the previous level are freed.
445 Normally, all temporaries are freed after the execution of the statement
446 in which they were created. However, if we are inside a ({...}) grouping,
447 the result may be in a temporary and hence must be preserved. If the
448 result could be in a temporary, we preserve it if we can determine which
449 one it is in. If we cannot determine which temporary may contain the
450 result, all temporaries are preserved. A temporary is preserved by
451 pretending it was allocated at the previous nesting level.
453 Automatic variables are also assigned temporary slots, at the nesting
454 level where they are defined. They are marked a "kept" so that
455 free_temp_slots will not free them. */
457 struct temp_slot GTY(())
459 /* Points to next temporary slot. */
460 struct temp_slot *next;
461 /* Points to previous temporary slot. */
462 struct temp_slot *prev;
463 /* The rtx to used to reference the slot. */
465 /* The size, in units, of the slot. */
467 /* The type of the object in the slot, or zero if it doesn't correspond
468 to a type. We use this to determine whether a slot can be reused.
469 It can be reused if objects of the type of the new slot will always
470 conflict with objects of the type of the old slot. */
472 /* The alignment (in bits) of the slot. */
474 /* Nonzero if this temporary is currently in use. */
476 /* Nonzero if this temporary has its address taken. */
478 /* Nesting level at which this slot is being used. */
480 /* Nonzero if this should survive a call to free_temp_slots. */
482 /* The offset of the slot from the frame_pointer, including extra space
483 for alignment. This info is for combine_temp_slots. */
484 HOST_WIDE_INT base_offset;
485 /* The size of the slot, including extra space for alignment. This
486 info is for combine_temp_slots. */
487 HOST_WIDE_INT full_size;
490 /* A table of addresses that represent a stack slot. The table is a mapping
491 from address RTXen to a temp slot. */
492 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
494 /* Entry for the above hash table. */
495 struct temp_slot_address_entry GTY(())
499 struct temp_slot *temp_slot;
502 /* Removes temporary slot TEMP from LIST. */
505 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
508 temp->next->prev = temp->prev;
510 temp->prev->next = temp->next;
514 temp->prev = temp->next = NULL;
517 /* Inserts temporary slot TEMP to LIST. */
520 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
524 (*list)->prev = temp;
529 /* Returns the list of used temp slots at LEVEL. */
531 static struct temp_slot **
532 temp_slots_at_level (int level)
534 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
535 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
537 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
540 /* Returns the maximal temporary slot level. */
543 max_slot_level (void)
545 if (!used_temp_slots)
548 return VEC_length (temp_slot_p, used_temp_slots) - 1;
551 /* Moves temporary slot TEMP to LEVEL. */
554 move_slot_to_level (struct temp_slot *temp, int level)
556 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
557 insert_slot_to_list (temp, temp_slots_at_level (level));
561 /* Make temporary slot TEMP available. */
564 make_slot_available (struct temp_slot *temp)
566 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
567 insert_slot_to_list (temp, &avail_temp_slots);
572 /* Compute the hash value for an address -> temp slot mapping.
573 The value is cached on the mapping entry. */
575 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
577 int do_not_record = 0;
578 return hash_rtx (t->address, GET_MODE (t->address),
579 &do_not_record, NULL, false);
582 /* Return the hash value for an address -> temp slot mapping. */
584 temp_slot_address_hash (const void *p)
586 const struct temp_slot_address_entry *t;
587 t = (const struct temp_slot_address_entry *) p;
591 /* Compare two address -> temp slot mapping entries. */
593 temp_slot_address_eq (const void *p1, const void *p2)
595 const struct temp_slot_address_entry *t1, *t2;
596 t1 = (const struct temp_slot_address_entry *) p1;
597 t2 = (const struct temp_slot_address_entry *) p2;
598 return exp_equiv_p (t1->address, t2->address, 0, true);
601 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
603 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
606 struct temp_slot_address_entry *t = GGC_NEW (struct temp_slot_address_entry);
607 t->address = address;
608 t->temp_slot = temp_slot;
609 t->hash = temp_slot_address_compute_hash (t);
610 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
614 /* Remove an address -> temp slot mapping entry if the temp slot is
615 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
617 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
619 const struct temp_slot_address_entry *t;
620 t = (const struct temp_slot_address_entry *) *slot;
621 if (! t->temp_slot->in_use)
626 /* Remove all mappings of addresses to unused temp slots. */
628 remove_unused_temp_slot_addresses (void)
630 htab_traverse (temp_slot_address_table,
631 remove_unused_temp_slot_addresses_1,
635 /* Find the temp slot corresponding to the object at address X. */
637 static struct temp_slot *
638 find_temp_slot_from_address (rtx x)
641 struct temp_slot_address_entry tmp, *t;
643 /* First try the easy way:
644 See if X exists in the address -> temp slot mapping. */
646 tmp.temp_slot = NULL;
647 tmp.hash = temp_slot_address_compute_hash (&tmp);
648 t = (struct temp_slot_address_entry *)
649 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
653 /* If we have a sum involving a register, see if it points to a temp
655 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
656 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
658 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
659 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
662 /* Last resort: Address is a virtual stack var address. */
663 if (GET_CODE (x) == PLUS
664 && XEXP (x, 0) == virtual_stack_vars_rtx
665 && GET_CODE (XEXP (x, 1)) == CONST_INT)
668 for (i = max_slot_level (); i >= 0; i--)
669 for (p = *temp_slots_at_level (i); p; p = p->next)
671 if (INTVAL (XEXP (x, 1)) >= p->base_offset
672 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
680 /* Allocate a temporary stack slot and record it for possible later
683 MODE is the machine mode to be given to the returned rtx.
685 SIZE is the size in units of the space required. We do no rounding here
686 since assign_stack_local will do any required rounding.
688 KEEP is 1 if this slot is to be retained after a call to
689 free_temp_slots. Automatic variables for a block are allocated
690 with this flag. KEEP values of 2 or 3 were needed respectively
691 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
692 or for SAVE_EXPRs, but they are now unused.
694 TYPE is the type that will be used for the stack slot. */
697 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
701 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
704 /* If SIZE is -1 it means that somebody tried to allocate a temporary
705 of a variable size. */
706 gcc_assert (size != -1);
708 /* These are now unused. */
709 gcc_assert (keep <= 1);
711 align = get_stack_local_alignment (type, mode);
713 /* Try to find an available, already-allocated temporary of the proper
714 mode which meets the size and alignment requirements. Choose the
715 smallest one with the closest alignment.
717 If assign_stack_temp is called outside of the tree->rtl expansion,
718 we cannot reuse the stack slots (that may still refer to
719 VIRTUAL_STACK_VARS_REGNUM). */
720 if (!virtuals_instantiated)
722 for (p = avail_temp_slots; p; p = p->next)
724 if (p->align >= align && p->size >= size
725 && GET_MODE (p->slot) == mode
726 && objects_must_conflict_p (p->type, type)
727 && (best_p == 0 || best_p->size > p->size
728 || (best_p->size == p->size && best_p->align > p->align)))
730 if (p->align == align && p->size == size)
733 cut_slot_from_list (selected, &avail_temp_slots);
742 /* Make our best, if any, the one to use. */
746 cut_slot_from_list (selected, &avail_temp_slots);
748 /* If there are enough aligned bytes left over, make them into a new
749 temp_slot so that the extra bytes don't get wasted. Do this only
750 for BLKmode slots, so that we can be sure of the alignment. */
751 if (GET_MODE (best_p->slot) == BLKmode)
753 int alignment = best_p->align / BITS_PER_UNIT;
754 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
756 if (best_p->size - rounded_size >= alignment)
758 p = GGC_NEW (struct temp_slot);
759 p->in_use = p->addr_taken = 0;
760 p->size = best_p->size - rounded_size;
761 p->base_offset = best_p->base_offset + rounded_size;
762 p->full_size = best_p->full_size - rounded_size;
763 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
764 p->align = best_p->align;
765 p->type = best_p->type;
766 insert_slot_to_list (p, &avail_temp_slots);
768 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
771 best_p->size = rounded_size;
772 best_p->full_size = rounded_size;
777 /* If we still didn't find one, make a new temporary. */
780 HOST_WIDE_INT frame_offset_old = frame_offset;
782 p = GGC_NEW (struct temp_slot);
784 /* We are passing an explicit alignment request to assign_stack_local.
785 One side effect of that is assign_stack_local will not round SIZE
786 to ensure the frame offset remains suitably aligned.
788 So for requests which depended on the rounding of SIZE, we go ahead
789 and round it now. We also make sure ALIGNMENT is at least
790 BIGGEST_ALIGNMENT. */
791 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
792 p->slot = assign_stack_local (mode,
794 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
800 /* The following slot size computation is necessary because we don't
801 know the actual size of the temporary slot until assign_stack_local
802 has performed all the frame alignment and size rounding for the
803 requested temporary. Note that extra space added for alignment
804 can be either above or below this stack slot depending on which
805 way the frame grows. We include the extra space if and only if it
806 is above this slot. */
807 if (FRAME_GROWS_DOWNWARD)
808 p->size = frame_offset_old - frame_offset;
812 /* Now define the fields used by combine_temp_slots. */
813 if (FRAME_GROWS_DOWNWARD)
815 p->base_offset = frame_offset;
816 p->full_size = frame_offset_old - frame_offset;
820 p->base_offset = frame_offset_old;
821 p->full_size = frame_offset - frame_offset_old;
831 p->level = temp_slot_level;
834 pp = temp_slots_at_level (p->level);
835 insert_slot_to_list (p, pp);
836 insert_temp_slot_address (XEXP (p->slot, 0), p);
838 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
839 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
840 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
842 /* If we know the alias set for the memory that will be used, use
843 it. If there's no TYPE, then we don't know anything about the
844 alias set for the memory. */
845 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
846 set_mem_align (slot, align);
848 /* If a type is specified, set the relevant flags. */
851 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
852 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
853 || TREE_CODE (type) == COMPLEX_TYPE));
855 MEM_NOTRAP_P (slot) = 1;
860 /* Allocate a temporary stack slot and record it for possible later
861 reuse. First three arguments are same as in preceding function. */
864 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
866 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
869 /* Assign a temporary.
870 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
871 and so that should be used in error messages. In either case, we
872 allocate of the given type.
873 KEEP is as for assign_stack_temp.
874 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
875 it is 0 if a register is OK.
876 DONT_PROMOTE is 1 if we should not promote values in register
880 assign_temp (tree type_or_decl, int keep, int memory_required,
881 int dont_promote ATTRIBUTE_UNUSED)
884 enum machine_mode mode;
889 if (DECL_P (type_or_decl))
890 decl = type_or_decl, type = TREE_TYPE (decl);
892 decl = NULL, type = type_or_decl;
894 mode = TYPE_MODE (type);
896 unsignedp = TYPE_UNSIGNED (type);
899 if (mode == BLKmode || memory_required)
901 HOST_WIDE_INT size = int_size_in_bytes (type);
904 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
905 problems with allocating the stack space. */
909 /* Unfortunately, we don't yet know how to allocate variable-sized
910 temporaries. However, sometimes we can find a fixed upper limit on
911 the size, so try that instead. */
913 size = max_int_size_in_bytes (type);
915 /* The size of the temporary may be too large to fit into an integer. */
916 /* ??? Not sure this should happen except for user silliness, so limit
917 this to things that aren't compiler-generated temporaries. The
918 rest of the time we'll die in assign_stack_temp_for_type. */
919 if (decl && size == -1
920 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
922 error ("size of variable %q+D is too large", decl);
926 tmp = assign_stack_temp_for_type (mode, size, keep, type);
932 mode = promote_mode (type, mode, &unsignedp, 0);
935 return gen_reg_rtx (mode);
938 /* Combine temporary stack slots which are adjacent on the stack.
940 This allows for better use of already allocated stack space. This is only
941 done for BLKmode slots because we can be sure that we won't have alignment
942 problems in this case. */
945 combine_temp_slots (void)
947 struct temp_slot *p, *q, *next, *next_q;
950 /* We can't combine slots, because the information about which slot
951 is in which alias set will be lost. */
952 if (flag_strict_aliasing)
955 /* If there are a lot of temp slots, don't do anything unless
956 high levels of optimization. */
957 if (! flag_expensive_optimizations)
958 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
959 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
962 for (p = avail_temp_slots; p; p = next)
968 if (GET_MODE (p->slot) != BLKmode)
971 for (q = p->next; q; q = next_q)
977 if (GET_MODE (q->slot) != BLKmode)
980 if (p->base_offset + p->full_size == q->base_offset)
982 /* Q comes after P; combine Q into P. */
984 p->full_size += q->full_size;
987 else if (q->base_offset + q->full_size == p->base_offset)
989 /* P comes after Q; combine P into Q. */
991 q->full_size += p->full_size;
996 cut_slot_from_list (q, &avail_temp_slots);
999 /* Either delete P or advance past it. */
1001 cut_slot_from_list (p, &avail_temp_slots);
1005 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1006 slot that previously was known by OLD_RTX. */
1009 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1011 struct temp_slot *p;
1013 if (rtx_equal_p (old_rtx, new_rtx))
1016 p = find_temp_slot_from_address (old_rtx);
1018 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1019 NEW_RTX is a register, see if one operand of the PLUS is a
1020 temporary location. If so, NEW_RTX points into it. Otherwise,
1021 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1022 in common between them. If so, try a recursive call on those
1026 if (GET_CODE (old_rtx) != PLUS)
1029 if (REG_P (new_rtx))
1031 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1032 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1035 else if (GET_CODE (new_rtx) != PLUS)
1038 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1039 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1040 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1041 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1042 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1043 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1044 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1045 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1050 /* Otherwise add an alias for the temp's address. */
1051 insert_temp_slot_address (new_rtx, p);
1054 /* If X could be a reference to a temporary slot, mark the fact that its
1055 address was taken. */
1058 mark_temp_addr_taken (rtx x)
1060 struct temp_slot *p;
1065 /* If X is not in memory or is at a constant address, it cannot be in
1066 a temporary slot. */
1067 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1070 p = find_temp_slot_from_address (XEXP (x, 0));
1075 /* If X could be a reference to a temporary slot, mark that slot as
1076 belonging to the to one level higher than the current level. If X
1077 matched one of our slots, just mark that one. Otherwise, we can't
1078 easily predict which it is, so upgrade all of them. Kept slots
1079 need not be touched.
1081 This is called when an ({...}) construct occurs and a statement
1082 returns a value in memory. */
1085 preserve_temp_slots (rtx x)
1087 struct temp_slot *p = 0, *next;
1089 /* If there is no result, we still might have some objects whose address
1090 were taken, so we need to make sure they stay around. */
1093 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1098 move_slot_to_level (p, temp_slot_level - 1);
1104 /* If X is a register that is being used as a pointer, see if we have
1105 a temporary slot we know it points to. To be consistent with
1106 the code below, we really should preserve all non-kept slots
1107 if we can't find a match, but that seems to be much too costly. */
1108 if (REG_P (x) && REG_POINTER (x))
1109 p = find_temp_slot_from_address (x);
1111 /* If X is not in memory or is at a constant address, it cannot be in
1112 a temporary slot, but it can contain something whose address was
1114 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1116 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1121 move_slot_to_level (p, temp_slot_level - 1);
1127 /* First see if we can find a match. */
1129 p = find_temp_slot_from_address (XEXP (x, 0));
1133 /* Move everything at our level whose address was taken to our new
1134 level in case we used its address. */
1135 struct temp_slot *q;
1137 if (p->level == temp_slot_level)
1139 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1143 if (p != q && q->addr_taken)
1144 move_slot_to_level (q, temp_slot_level - 1);
1147 move_slot_to_level (p, temp_slot_level - 1);
1153 /* Otherwise, preserve all non-kept slots at this level. */
1154 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1159 move_slot_to_level (p, temp_slot_level - 1);
1163 /* Free all temporaries used so far. This is normally called at the
1164 end of generating code for a statement. */
1167 free_temp_slots (void)
1169 struct temp_slot *p, *next;
1171 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1176 make_slot_available (p);
1179 remove_unused_temp_slot_addresses ();
1180 combine_temp_slots ();
1183 /* Push deeper into the nesting level for stack temporaries. */
1186 push_temp_slots (void)
1191 /* Pop a temporary nesting level. All slots in use in the current level
1195 pop_temp_slots (void)
1197 struct temp_slot *p, *next;
1199 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1202 make_slot_available (p);
1205 remove_unused_temp_slot_addresses ();
1206 combine_temp_slots ();
1211 /* Initialize temporary slots. */
1214 init_temp_slots (void)
1216 /* We have not allocated any temporaries yet. */
1217 avail_temp_slots = 0;
1218 used_temp_slots = 0;
1219 temp_slot_level = 0;
1221 /* Set up the table to map addresses to temp slots. */
1222 if (! temp_slot_address_table)
1223 temp_slot_address_table = htab_create_ggc (32,
1224 temp_slot_address_hash,
1225 temp_slot_address_eq,
1228 htab_empty (temp_slot_address_table);
1231 /* These routines are responsible for converting virtual register references
1232 to the actual hard register references once RTL generation is complete.
1234 The following four variables are used for communication between the
1235 routines. They contain the offsets of the virtual registers from their
1236 respective hard registers. */
1238 static int in_arg_offset;
1239 static int var_offset;
1240 static int dynamic_offset;
1241 static int out_arg_offset;
1242 static int cfa_offset;
1244 /* In most machines, the stack pointer register is equivalent to the bottom
1247 #ifndef STACK_POINTER_OFFSET
1248 #define STACK_POINTER_OFFSET 0
1251 /* If not defined, pick an appropriate default for the offset of dynamically
1252 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1253 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1255 #ifndef STACK_DYNAMIC_OFFSET
1257 /* The bottom of the stack points to the actual arguments. If
1258 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1259 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1260 stack space for register parameters is not pushed by the caller, but
1261 rather part of the fixed stack areas and hence not included in
1262 `crtl->outgoing_args_size'. Nevertheless, we must allow
1263 for it when allocating stack dynamic objects. */
1265 #if defined(REG_PARM_STACK_SPACE)
1266 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1267 ((ACCUMULATE_OUTGOING_ARGS \
1268 ? (crtl->outgoing_args_size \
1269 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1270 : REG_PARM_STACK_SPACE (FNDECL))) \
1271 : 0) + (STACK_POINTER_OFFSET))
1273 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1274 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1275 + (STACK_POINTER_OFFSET))
1280 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1281 is a virtual register, return the equivalent hard register and set the
1282 offset indirectly through the pointer. Otherwise, return 0. */
1285 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1288 HOST_WIDE_INT offset;
1290 if (x == virtual_incoming_args_rtx)
1292 if (stack_realign_drap)
1294 /* Replace virtual_incoming_args_rtx with internal arg
1295 pointer if DRAP is used to realign stack. */
1296 new_rtx = crtl->args.internal_arg_pointer;
1300 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1302 else if (x == virtual_stack_vars_rtx)
1303 new_rtx = frame_pointer_rtx, offset = var_offset;
1304 else if (x == virtual_stack_dynamic_rtx)
1305 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1306 else if (x == virtual_outgoing_args_rtx)
1307 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1308 else if (x == virtual_cfa_rtx)
1310 #ifdef FRAME_POINTER_CFA_OFFSET
1311 new_rtx = frame_pointer_rtx;
1313 new_rtx = arg_pointer_rtx;
1315 offset = cfa_offset;
1324 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1325 Instantiate any virtual registers present inside of *LOC. The expression
1326 is simplified, as much as possible, but is not to be considered "valid"
1327 in any sense implied by the target. If any change is made, set CHANGED
1331 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1333 HOST_WIDE_INT offset;
1334 bool *changed = (bool *) data;
1341 switch (GET_CODE (x))
1344 new_rtx = instantiate_new_reg (x, &offset);
1347 *loc = plus_constant (new_rtx, offset);
1354 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1357 new_rtx = plus_constant (new_rtx, offset);
1358 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1364 /* FIXME -- from old code */
1365 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1366 we can commute the PLUS and SUBREG because pointers into the
1367 frame are well-behaved. */
1377 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1378 matches the predicate for insn CODE operand OPERAND. */
1381 safe_insn_predicate (int code, int operand, rtx x)
1383 const struct insn_operand_data *op_data;
1388 op_data = &insn_data[code].operand[operand];
1389 if (op_data->predicate == NULL)
1392 return op_data->predicate (x, op_data->mode);
1395 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1396 registers present inside of insn. The result will be a valid insn. */
1399 instantiate_virtual_regs_in_insn (rtx insn)
1401 HOST_WIDE_INT offset;
1403 bool any_change = false;
1404 rtx set, new_rtx, x, seq;
1406 /* There are some special cases to be handled first. */
1407 set = single_set (insn);
1410 /* We're allowed to assign to a virtual register. This is interpreted
1411 to mean that the underlying register gets assigned the inverse
1412 transformation. This is used, for example, in the handling of
1414 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1419 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1420 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1422 x = force_operand (x, new_rtx);
1424 emit_move_insn (new_rtx, x);
1429 emit_insn_before (seq, insn);
1434 /* Handle a straight copy from a virtual register by generating a
1435 new add insn. The difference between this and falling through
1436 to the generic case is avoiding a new pseudo and eliminating a
1437 move insn in the initial rtl stream. */
1438 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1439 if (new_rtx && offset != 0
1440 && REG_P (SET_DEST (set))
1441 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1445 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1446 new_rtx, GEN_INT (offset), SET_DEST (set),
1447 1, OPTAB_LIB_WIDEN);
1448 if (x != SET_DEST (set))
1449 emit_move_insn (SET_DEST (set), x);
1454 emit_insn_before (seq, insn);
1459 extract_insn (insn);
1460 insn_code = INSN_CODE (insn);
1462 /* Handle a plus involving a virtual register by determining if the
1463 operands remain valid if they're modified in place. */
1464 if (GET_CODE (SET_SRC (set)) == PLUS
1465 && recog_data.n_operands >= 3
1466 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1467 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1468 && GET_CODE (recog_data.operand[2]) == CONST_INT
1469 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1471 offset += INTVAL (recog_data.operand[2]);
1473 /* If the sum is zero, then replace with a plain move. */
1475 && REG_P (SET_DEST (set))
1476 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1479 emit_move_insn (SET_DEST (set), new_rtx);
1483 emit_insn_before (seq, insn);
1488 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1490 /* Using validate_change and apply_change_group here leaves
1491 recog_data in an invalid state. Since we know exactly what
1492 we want to check, do those two by hand. */
1493 if (safe_insn_predicate (insn_code, 1, new_rtx)
1494 && safe_insn_predicate (insn_code, 2, x))
1496 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1497 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1500 /* Fall through into the regular operand fixup loop in
1501 order to take care of operands other than 1 and 2. */
1507 extract_insn (insn);
1508 insn_code = INSN_CODE (insn);
1511 /* In the general case, we expect virtual registers to appear only in
1512 operands, and then only as either bare registers or inside memories. */
1513 for (i = 0; i < recog_data.n_operands; ++i)
1515 x = recog_data.operand[i];
1516 switch (GET_CODE (x))
1520 rtx addr = XEXP (x, 0);
1521 bool changed = false;
1523 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1528 x = replace_equiv_address (x, addr);
1529 /* It may happen that the address with the virtual reg
1530 was valid (e.g. based on the virtual stack reg, which might
1531 be acceptable to the predicates with all offsets), whereas
1532 the address now isn't anymore, for instance when the address
1533 is still offsetted, but the base reg isn't virtual-stack-reg
1534 anymore. Below we would do a force_reg on the whole operand,
1535 but this insn might actually only accept memory. Hence,
1536 before doing that last resort, try to reload the address into
1537 a register, so this operand stays a MEM. */
1538 if (!safe_insn_predicate (insn_code, i, x))
1540 addr = force_reg (GET_MODE (addr), addr);
1541 x = replace_equiv_address (x, addr);
1546 emit_insn_before (seq, insn);
1551 new_rtx = instantiate_new_reg (x, &offset);
1552 if (new_rtx == NULL)
1560 /* Careful, special mode predicates may have stuff in
1561 insn_data[insn_code].operand[i].mode that isn't useful
1562 to us for computing a new value. */
1563 /* ??? Recognize address_operand and/or "p" constraints
1564 to see if (plus new offset) is a valid before we put
1565 this through expand_simple_binop. */
1566 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1567 GEN_INT (offset), NULL_RTX,
1568 1, OPTAB_LIB_WIDEN);
1571 emit_insn_before (seq, insn);
1576 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1577 if (new_rtx == NULL)
1582 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1583 GEN_INT (offset), NULL_RTX,
1584 1, OPTAB_LIB_WIDEN);
1587 emit_insn_before (seq, insn);
1589 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1590 GET_MODE (new_rtx), SUBREG_BYTE (x));
1598 /* At this point, X contains the new value for the operand.
1599 Validate the new value vs the insn predicate. Note that
1600 asm insns will have insn_code -1 here. */
1601 if (!safe_insn_predicate (insn_code, i, x))
1606 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1607 x = copy_to_reg (x);
1610 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1614 emit_insn_before (seq, insn);
1617 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1623 /* Propagate operand changes into the duplicates. */
1624 for (i = 0; i < recog_data.n_dups; ++i)
1625 *recog_data.dup_loc[i]
1626 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1628 /* Force re-recognition of the instruction for validation. */
1629 INSN_CODE (insn) = -1;
1632 if (asm_noperands (PATTERN (insn)) >= 0)
1634 if (!check_asm_operands (PATTERN (insn)))
1636 error_for_asm (insn, "impossible constraint in %<asm%>");
1642 if (recog_memoized (insn) < 0)
1643 fatal_insn_not_found (insn);
1647 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1648 do any instantiation required. */
1651 instantiate_decl_rtl (rtx x)
1658 /* If this is a CONCAT, recurse for the pieces. */
1659 if (GET_CODE (x) == CONCAT)
1661 instantiate_decl_rtl (XEXP (x, 0));
1662 instantiate_decl_rtl (XEXP (x, 1));
1666 /* If this is not a MEM, no need to do anything. Similarly if the
1667 address is a constant or a register that is not a virtual register. */
1672 if (CONSTANT_P (addr)
1674 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1675 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1678 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1681 /* Helper for instantiate_decls called via walk_tree: Process all decls
1682 in the given DECL_VALUE_EXPR. */
1685 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1691 if (DECL_P (t) && DECL_RTL_SET_P (t))
1692 instantiate_decl_rtl (DECL_RTL (t));
1697 /* Subroutine of instantiate_decls: Process all decls in the given
1698 BLOCK node and all its subblocks. */
1701 instantiate_decls_1 (tree let)
1705 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1707 if (DECL_RTL_SET_P (t))
1708 instantiate_decl_rtl (DECL_RTL (t));
1709 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1711 tree v = DECL_VALUE_EXPR (t);
1712 walk_tree (&v, instantiate_expr, NULL, NULL);
1716 /* Process all subblocks. */
1717 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1718 instantiate_decls_1 (t);
1721 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1722 all virtual registers in their DECL_RTL's. */
1725 instantiate_decls (tree fndecl)
1729 /* Process all parameters of the function. */
1730 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1732 instantiate_decl_rtl (DECL_RTL (decl));
1733 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1734 if (DECL_HAS_VALUE_EXPR_P (decl))
1736 tree v = DECL_VALUE_EXPR (decl);
1737 walk_tree (&v, instantiate_expr, NULL, NULL);
1741 /* Now process all variables defined in the function or its subblocks. */
1742 instantiate_decls_1 (DECL_INITIAL (fndecl));
1744 t = cfun->local_decls;
1745 cfun->local_decls = NULL_TREE;
1748 next = TREE_CHAIN (t);
1749 decl = TREE_VALUE (t);
1750 if (DECL_RTL_SET_P (decl))
1751 instantiate_decl_rtl (DECL_RTL (decl));
1756 /* Pass through the INSNS of function FNDECL and convert virtual register
1757 references to hard register references. */
1760 instantiate_virtual_regs (void)
1764 /* Compute the offsets to use for this function. */
1765 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1766 var_offset = STARTING_FRAME_OFFSET;
1767 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1768 out_arg_offset = STACK_POINTER_OFFSET;
1769 #ifdef FRAME_POINTER_CFA_OFFSET
1770 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1772 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1775 /* Initialize recognition, indicating that volatile is OK. */
1778 /* Scan through all the insns, instantiating every virtual register still
1780 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1783 /* These patterns in the instruction stream can never be recognized.
1784 Fortunately, they shouldn't contain virtual registers either. */
1785 if (GET_CODE (PATTERN (insn)) == USE
1786 || GET_CODE (PATTERN (insn)) == CLOBBER
1787 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1788 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1789 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1792 instantiate_virtual_regs_in_insn (insn);
1794 if (INSN_DELETED_P (insn))
1797 for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1799 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1800 if (GET_CODE (insn) == CALL_INSN)
1801 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1802 instantiate_virtual_regs_in_rtx, NULL);
1805 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1806 instantiate_decls (current_function_decl);
1808 targetm.instantiate_decls ();
1810 /* Indicate that, from now on, assign_stack_local should use
1811 frame_pointer_rtx. */
1812 virtuals_instantiated = 1;
1816 struct rtl_opt_pass pass_instantiate_virtual_regs =
1822 instantiate_virtual_regs, /* execute */
1825 0, /* static_pass_number */
1827 0, /* properties_required */
1828 0, /* properties_provided */
1829 0, /* properties_destroyed */
1830 0, /* todo_flags_start */
1831 TODO_dump_func /* todo_flags_finish */
1836 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1837 This means a type for which function calls must pass an address to the
1838 function or get an address back from the function.
1839 EXP may be a type node or an expression (whose type is tested). */
1842 aggregate_value_p (const_tree exp, const_tree fntype)
1844 int i, regno, nregs;
1847 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1849 /* DECL node associated with FNTYPE when relevant, which we might need to
1850 check for by-invisible-reference returns, typically for CALL_EXPR input
1852 const_tree fndecl = NULL_TREE;
1855 switch (TREE_CODE (fntype))
1858 fndecl = get_callee_fndecl (fntype);
1860 ? TREE_TYPE (fndecl)
1861 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1865 fntype = TREE_TYPE (fndecl);
1870 case IDENTIFIER_NODE:
1874 /* We don't expect other rtl types here. */
1878 if (TREE_CODE (type) == VOID_TYPE)
1881 /* If the front end has decided that this needs to be passed by
1882 reference, do so. */
1883 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1884 && DECL_BY_REFERENCE (exp))
1887 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1888 called function RESULT_DECL, meaning the function returns in memory by
1889 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1890 on the function type, which used to be the way to request such a return
1891 mechanism but might now be causing troubles at gimplification time if
1892 temporaries with the function type need to be created. */
1893 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1894 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1897 if (targetm.calls.return_in_memory (type, fntype))
1899 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1900 and thus can't be returned in registers. */
1901 if (TREE_ADDRESSABLE (type))
1903 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1905 /* Make sure we have suitable call-clobbered regs to return
1906 the value in; if not, we must return it in memory. */
1907 reg = hard_function_value (type, 0, fntype, 0);
1909 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1914 regno = REGNO (reg);
1915 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1916 for (i = 0; i < nregs; i++)
1917 if (! call_used_regs[regno + i])
1922 /* Return true if we should assign DECL a pseudo register; false if it
1923 should live on the local stack. */
1926 use_register_for_decl (const_tree decl)
1928 if (!targetm.calls.allocate_stack_slots_for_args())
1931 /* Honor volatile. */
1932 if (TREE_SIDE_EFFECTS (decl))
1935 /* Honor addressability. */
1936 if (TREE_ADDRESSABLE (decl))
1939 /* Only register-like things go in registers. */
1940 if (DECL_MODE (decl) == BLKmode)
1943 /* If -ffloat-store specified, don't put explicit float variables
1945 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1946 propagates values across these stores, and it probably shouldn't. */
1947 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1950 /* If we're not interested in tracking debugging information for
1951 this decl, then we can certainly put it in a register. */
1952 if (DECL_IGNORED_P (decl))
1958 if (!DECL_REGISTER (decl))
1961 switch (TREE_CODE (TREE_TYPE (decl)))
1965 case QUAL_UNION_TYPE:
1966 /* When not optimizing, disregard register keyword for variables with
1967 types containing methods, otherwise the methods won't be callable
1968 from the debugger. */
1969 if (TYPE_METHODS (TREE_TYPE (decl)))
1979 /* Return true if TYPE should be passed by invisible reference. */
1982 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1983 tree type, bool named_arg)
1987 /* If this type contains non-trivial constructors, then it is
1988 forbidden for the middle-end to create any new copies. */
1989 if (TREE_ADDRESSABLE (type))
1992 /* GCC post 3.4 passes *all* variable sized types by reference. */
1993 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1997 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
2000 /* Return true if TYPE, which is passed by reference, should be callee
2001 copied instead of caller copied. */
2004 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2005 tree type, bool named_arg)
2007 if (type && TREE_ADDRESSABLE (type))
2009 return targetm.calls.callee_copies (ca, mode, type, named_arg);
2012 /* Structures to communicate between the subroutines of assign_parms.
2013 The first holds data persistent across all parameters, the second
2014 is cleared out for each parameter. */
2016 struct assign_parm_data_all
2018 CUMULATIVE_ARGS args_so_far;
2019 struct args_size stack_args_size;
2020 tree function_result_decl;
2022 rtx first_conversion_insn;
2023 rtx last_conversion_insn;
2024 HOST_WIDE_INT pretend_args_size;
2025 HOST_WIDE_INT extra_pretend_bytes;
2026 int reg_parm_stack_space;
2029 struct assign_parm_data_one
2035 enum machine_mode nominal_mode;
2036 enum machine_mode passed_mode;
2037 enum machine_mode promoted_mode;
2038 struct locate_and_pad_arg_data locate;
2040 BOOL_BITFIELD named_arg : 1;
2041 BOOL_BITFIELD passed_pointer : 1;
2042 BOOL_BITFIELD on_stack : 1;
2043 BOOL_BITFIELD loaded_in_reg : 1;
2046 /* A subroutine of assign_parms. Initialize ALL. */
2049 assign_parms_initialize_all (struct assign_parm_data_all *all)
2053 memset (all, 0, sizeof (*all));
2055 fntype = TREE_TYPE (current_function_decl);
2057 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2058 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2060 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2061 current_function_decl, -1);
2064 #ifdef REG_PARM_STACK_SPACE
2065 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2069 /* If ARGS contains entries with complex types, split the entry into two
2070 entries of the component type. Return a new list of substitutions are
2071 needed, else the old list. */
2074 split_complex_args (tree args)
2078 /* Before allocating memory, check for the common case of no complex. */
2079 for (p = args; p; p = TREE_CHAIN (p))
2081 tree type = TREE_TYPE (p);
2082 if (TREE_CODE (type) == COMPLEX_TYPE
2083 && targetm.calls.split_complex_arg (type))
2089 args = copy_list (args);
2091 for (p = args; p; p = TREE_CHAIN (p))
2093 tree type = TREE_TYPE (p);
2094 if (TREE_CODE (type) == COMPLEX_TYPE
2095 && targetm.calls.split_complex_arg (type))
2098 tree subtype = TREE_TYPE (type);
2099 bool addressable = TREE_ADDRESSABLE (p);
2101 /* Rewrite the PARM_DECL's type with its component. */
2102 TREE_TYPE (p) = subtype;
2103 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2104 DECL_MODE (p) = VOIDmode;
2105 DECL_SIZE (p) = NULL;
2106 DECL_SIZE_UNIT (p) = NULL;
2107 /* If this arg must go in memory, put it in a pseudo here.
2108 We can't allow it to go in memory as per normal parms,
2109 because the usual place might not have the imag part
2110 adjacent to the real part. */
2111 DECL_ARTIFICIAL (p) = addressable;
2112 DECL_IGNORED_P (p) = addressable;
2113 TREE_ADDRESSABLE (p) = 0;
2116 /* Build a second synthetic decl. */
2117 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2118 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2119 DECL_ARTIFICIAL (decl) = addressable;
2120 DECL_IGNORED_P (decl) = addressable;
2121 layout_decl (decl, 0);
2123 /* Splice it in; skip the new decl. */
2124 TREE_CHAIN (decl) = TREE_CHAIN (p);
2125 TREE_CHAIN (p) = decl;
2133 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2134 the hidden struct return argument, and (abi willing) complex args.
2135 Return the new parameter list. */
2138 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2140 tree fndecl = current_function_decl;
2141 tree fntype = TREE_TYPE (fndecl);
2142 tree fnargs = DECL_ARGUMENTS (fndecl);
2144 /* If struct value address is treated as the first argument, make it so. */
2145 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2146 && ! cfun->returns_pcc_struct
2147 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2149 tree type = build_pointer_type (TREE_TYPE (fntype));
2152 decl = build_decl (PARM_DECL, NULL_TREE, type);
2153 DECL_ARG_TYPE (decl) = type;
2154 DECL_ARTIFICIAL (decl) = 1;
2155 DECL_IGNORED_P (decl) = 1;
2157 TREE_CHAIN (decl) = fnargs;
2159 all->function_result_decl = decl;
2162 all->orig_fnargs = fnargs;
2164 /* If the target wants to split complex arguments into scalars, do so. */
2165 if (targetm.calls.split_complex_arg)
2166 fnargs = split_complex_args (fnargs);
2171 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2172 data for the parameter. Incorporate ABI specifics such as pass-by-
2173 reference and type promotion. */
2176 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2177 struct assign_parm_data_one *data)
2179 tree nominal_type, passed_type;
2180 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2182 memset (data, 0, sizeof (*data));
2184 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2186 data->named_arg = 1; /* No variadic parms. */
2187 else if (TREE_CHAIN (parm))
2188 data->named_arg = 1; /* Not the last non-variadic parm. */
2189 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2190 data->named_arg = 1; /* Only variadic ones are unnamed. */
2192 data->named_arg = 0; /* Treat as variadic. */
2194 nominal_type = TREE_TYPE (parm);
2195 passed_type = DECL_ARG_TYPE (parm);
2197 /* Look out for errors propagating this far. Also, if the parameter's
2198 type is void then its value doesn't matter. */
2199 if (TREE_TYPE (parm) == error_mark_node
2200 /* This can happen after weird syntax errors
2201 or if an enum type is defined among the parms. */
2202 || TREE_CODE (parm) != PARM_DECL
2203 || passed_type == NULL
2204 || VOID_TYPE_P (nominal_type))
2206 nominal_type = passed_type = void_type_node;
2207 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2211 /* Find mode of arg as it is passed, and mode of arg as it should be
2212 during execution of this function. */
2213 passed_mode = TYPE_MODE (passed_type);
2214 nominal_mode = TYPE_MODE (nominal_type);
2216 /* If the parm is to be passed as a transparent union, use the type of
2217 the first field for the tests below. We have already verified that
2218 the modes are the same. */
2219 if (TREE_CODE (passed_type) == UNION_TYPE
2220 && TYPE_TRANSPARENT_UNION (passed_type))
2221 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2223 /* See if this arg was passed by invisible reference. */
2224 if (pass_by_reference (&all->args_so_far, passed_mode,
2225 passed_type, data->named_arg))
2227 passed_type = nominal_type = build_pointer_type (passed_type);
2228 data->passed_pointer = true;
2229 passed_mode = nominal_mode = Pmode;
2232 /* Find mode as it is passed by the ABI. */
2233 promoted_mode = passed_mode;
2234 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2236 int unsignedp = TYPE_UNSIGNED (passed_type);
2237 promoted_mode = promote_mode (passed_type, promoted_mode,
2242 data->nominal_type = nominal_type;
2243 data->passed_type = passed_type;
2244 data->nominal_mode = nominal_mode;
2245 data->passed_mode = passed_mode;
2246 data->promoted_mode = promoted_mode;
2249 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2252 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2253 struct assign_parm_data_one *data, bool no_rtl)
2255 int varargs_pretend_bytes = 0;
2257 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2258 data->promoted_mode,
2260 &varargs_pretend_bytes, no_rtl);
2262 /* If the back-end has requested extra stack space, record how much is
2263 needed. Do not change pretend_args_size otherwise since it may be
2264 nonzero from an earlier partial argument. */
2265 if (varargs_pretend_bytes > 0)
2266 all->pretend_args_size = varargs_pretend_bytes;
2269 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2270 the incoming location of the current parameter. */
2273 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2274 struct assign_parm_data_one *data)
2276 HOST_WIDE_INT pretend_bytes = 0;
2280 if (data->promoted_mode == VOIDmode)
2282 data->entry_parm = data->stack_parm = const0_rtx;
2286 #ifdef FUNCTION_INCOMING_ARG
2287 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2288 data->passed_type, data->named_arg);
2290 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2291 data->passed_type, data->named_arg);
2294 if (entry_parm == 0)
2295 data->promoted_mode = data->passed_mode;
2297 /* Determine parm's home in the stack, in case it arrives in the stack
2298 or we should pretend it did. Compute the stack position and rtx where
2299 the argument arrives and its size.
2301 There is one complexity here: If this was a parameter that would
2302 have been passed in registers, but wasn't only because it is
2303 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2304 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2305 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2306 as it was the previous time. */
2307 in_regs = entry_parm != 0;
2308 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2311 if (!in_regs && !data->named_arg)
2313 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2316 #ifdef FUNCTION_INCOMING_ARG
2317 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2318 data->passed_type, true);
2320 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2321 data->passed_type, true);
2323 in_regs = tem != NULL;
2327 /* If this parameter was passed both in registers and in the stack, use
2328 the copy on the stack. */
2329 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2337 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2338 data->promoted_mode,
2341 data->partial = partial;
2343 /* The caller might already have allocated stack space for the
2344 register parameters. */
2345 if (partial != 0 && all->reg_parm_stack_space == 0)
2347 /* Part of this argument is passed in registers and part
2348 is passed on the stack. Ask the prologue code to extend
2349 the stack part so that we can recreate the full value.
2351 PRETEND_BYTES is the size of the registers we need to store.
2352 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2353 stack space that the prologue should allocate.
2355 Internally, gcc assumes that the argument pointer is aligned
2356 to STACK_BOUNDARY bits. This is used both for alignment
2357 optimizations (see init_emit) and to locate arguments that are
2358 aligned to more than PARM_BOUNDARY bits. We must preserve this
2359 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2360 a stack boundary. */
2362 /* We assume at most one partial arg, and it must be the first
2363 argument on the stack. */
2364 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2366 pretend_bytes = partial;
2367 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2369 /* We want to align relative to the actual stack pointer, so
2370 don't include this in the stack size until later. */
2371 all->extra_pretend_bytes = all->pretend_args_size;
2375 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2376 entry_parm ? data->partial : 0, current_function_decl,
2377 &all->stack_args_size, &data->locate);
2379 /* Update parm_stack_boundary if this parameter is passed in the
2381 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2382 crtl->parm_stack_boundary = data->locate.boundary;
2384 /* Adjust offsets to include the pretend args. */
2385 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2386 data->locate.slot_offset.constant += pretend_bytes;
2387 data->locate.offset.constant += pretend_bytes;
2389 data->entry_parm = entry_parm;
2392 /* A subroutine of assign_parms. If there is actually space on the stack
2393 for this parm, count it in stack_args_size and return true. */
2396 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2397 struct assign_parm_data_one *data)
2399 /* Trivially true if we've no incoming register. */
2400 if (data->entry_parm == NULL)
2402 /* Also true if we're partially in registers and partially not,
2403 since we've arranged to drop the entire argument on the stack. */
2404 else if (data->partial != 0)
2406 /* Also true if the target says that it's passed in both registers
2407 and on the stack. */
2408 else if (GET_CODE (data->entry_parm) == PARALLEL
2409 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2411 /* Also true if the target says that there's stack allocated for
2412 all register parameters. */
2413 else if (all->reg_parm_stack_space > 0)
2415 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2419 all->stack_args_size.constant += data->locate.size.constant;
2420 if (data->locate.size.var)
2421 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2426 /* A subroutine of assign_parms. Given that this parameter is allocated
2427 stack space by the ABI, find it. */
2430 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2432 rtx offset_rtx, stack_parm;
2433 unsigned int align, boundary;
2435 /* If we're passing this arg using a reg, make its stack home the
2436 aligned stack slot. */
2437 if (data->entry_parm)
2438 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2440 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2442 stack_parm = crtl->args.internal_arg_pointer;
2443 if (offset_rtx != const0_rtx)
2444 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2445 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2447 set_mem_attributes (stack_parm, parm, 1);
2448 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2449 while promoted mode's size is needed. */
2450 if (data->promoted_mode != BLKmode
2451 && data->promoted_mode != DECL_MODE (parm))
2453 set_mem_size (stack_parm, GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
2454 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
2456 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2457 data->promoted_mode);
2459 set_mem_offset (stack_parm,
2460 plus_constant (MEM_OFFSET (stack_parm), -offset));
2464 boundary = data->locate.boundary;
2465 align = BITS_PER_UNIT;
2467 /* If we're padding upward, we know that the alignment of the slot
2468 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2469 intentionally forcing upward padding. Otherwise we have to come
2470 up with a guess at the alignment based on OFFSET_RTX. */
2471 if (data->locate.where_pad != downward || data->entry_parm)
2473 else if (GET_CODE (offset_rtx) == CONST_INT)
2475 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2476 align = align & -align;
2478 set_mem_align (stack_parm, align);
2480 if (data->entry_parm)
2481 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2483 data->stack_parm = stack_parm;
2486 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2487 always valid and contiguous. */
2490 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2492 rtx entry_parm = data->entry_parm;
2493 rtx stack_parm = data->stack_parm;
2495 /* If this parm was passed part in regs and part in memory, pretend it
2496 arrived entirely in memory by pushing the register-part onto the stack.
2497 In the special case of a DImode or DFmode that is split, we could put
2498 it together in a pseudoreg directly, but for now that's not worth
2500 if (data->partial != 0)
2502 /* Handle calls that pass values in multiple non-contiguous
2503 locations. The Irix 6 ABI has examples of this. */
2504 if (GET_CODE (entry_parm) == PARALLEL)
2505 emit_group_store (validize_mem (stack_parm), entry_parm,
2507 int_size_in_bytes (data->passed_type));
2510 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2511 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2512 data->partial / UNITS_PER_WORD);
2515 entry_parm = stack_parm;
2518 /* If we didn't decide this parm came in a register, by default it came
2520 else if (entry_parm == NULL)
2521 entry_parm = stack_parm;
2523 /* When an argument is passed in multiple locations, we can't make use
2524 of this information, but we can save some copying if the whole argument
2525 is passed in a single register. */
2526 else if (GET_CODE (entry_parm) == PARALLEL
2527 && data->nominal_mode != BLKmode
2528 && data->passed_mode != BLKmode)
2530 size_t i, len = XVECLEN (entry_parm, 0);
2532 for (i = 0; i < len; i++)
2533 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2534 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2535 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2536 == data->passed_mode)
2537 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2539 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2544 data->entry_parm = entry_parm;
2547 /* A subroutine of assign_parms. Reconstitute any values which were
2548 passed in multiple registers and would fit in a single register. */
2551 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2553 rtx entry_parm = data->entry_parm;
2555 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2556 This can be done with register operations rather than on the
2557 stack, even if we will store the reconstituted parameter on the
2559 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2561 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2562 emit_group_store (parmreg, entry_parm, data->passed_type,
2563 GET_MODE_SIZE (GET_MODE (entry_parm)));
2564 entry_parm = parmreg;
2567 data->entry_parm = entry_parm;
2570 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2571 always valid and properly aligned. */
2574 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2576 rtx stack_parm = data->stack_parm;
2578 /* If we can't trust the parm stack slot to be aligned enough for its
2579 ultimate type, don't use that slot after entry. We'll make another
2580 stack slot, if we need one. */
2582 && ((STRICT_ALIGNMENT
2583 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2584 || (data->nominal_type
2585 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2586 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2589 /* If parm was passed in memory, and we need to convert it on entry,
2590 don't store it back in that same slot. */
2591 else if (data->entry_parm == stack_parm
2592 && data->nominal_mode != BLKmode
2593 && data->nominal_mode != data->passed_mode)
2596 /* If stack protection is in effect for this function, don't leave any
2597 pointers in their passed stack slots. */
2598 else if (crtl->stack_protect_guard
2599 && (flag_stack_protect == 2
2600 || data->passed_pointer
2601 || POINTER_TYPE_P (data->nominal_type)))
2604 data->stack_parm = stack_parm;
2607 /* A subroutine of assign_parms. Return true if the current parameter
2608 should be stored as a BLKmode in the current frame. */
2611 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2613 if (data->nominal_mode == BLKmode)
2615 if (GET_MODE (data->entry_parm) == BLKmode)
2618 #ifdef BLOCK_REG_PADDING
2619 /* Only assign_parm_setup_block knows how to deal with register arguments
2620 that are padded at the least significant end. */
2621 if (REG_P (data->entry_parm)
2622 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2623 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2624 == (BYTES_BIG_ENDIAN ? upward : downward)))
2631 /* A subroutine of assign_parms. Arrange for the parameter to be
2632 present and valid in DATA->STACK_RTL. */
2635 assign_parm_setup_block (struct assign_parm_data_all *all,
2636 tree parm, struct assign_parm_data_one *data)
2638 rtx entry_parm = data->entry_parm;
2639 rtx stack_parm = data->stack_parm;
2641 HOST_WIDE_INT size_stored;
2643 if (GET_CODE (entry_parm) == PARALLEL)
2644 entry_parm = emit_group_move_into_temps (entry_parm);
2646 size = int_size_in_bytes (data->passed_type);
2647 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2648 if (stack_parm == 0)
2650 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2651 stack_parm = assign_stack_local (BLKmode, size_stored,
2653 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2654 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2655 set_mem_attributes (stack_parm, parm, 1);
2658 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2659 calls that pass values in multiple non-contiguous locations. */
2660 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2664 /* Note that we will be storing an integral number of words.
2665 So we have to be careful to ensure that we allocate an
2666 integral number of words. We do this above when we call
2667 assign_stack_local if space was not allocated in the argument
2668 list. If it was, this will not work if PARM_BOUNDARY is not
2669 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2670 if it becomes a problem. Exception is when BLKmode arrives
2671 with arguments not conforming to word_mode. */
2673 if (data->stack_parm == 0)
2675 else if (GET_CODE (entry_parm) == PARALLEL)
2678 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2680 mem = validize_mem (stack_parm);
2682 /* Handle values in multiple non-contiguous locations. */
2683 if (GET_CODE (entry_parm) == PARALLEL)
2685 push_to_sequence2 (all->first_conversion_insn,
2686 all->last_conversion_insn);
2687 emit_group_store (mem, entry_parm, data->passed_type, size);
2688 all->first_conversion_insn = get_insns ();
2689 all->last_conversion_insn = get_last_insn ();
2696 /* If SIZE is that of a mode no bigger than a word, just use
2697 that mode's store operation. */
2698 else if (size <= UNITS_PER_WORD)
2700 enum machine_mode mode
2701 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2704 #ifdef BLOCK_REG_PADDING
2705 && (size == UNITS_PER_WORD
2706 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2707 != (BYTES_BIG_ENDIAN ? upward : downward)))
2713 /* We are really truncating a word_mode value containing
2714 SIZE bytes into a value of mode MODE. If such an
2715 operation requires no actual instructions, we can refer
2716 to the value directly in mode MODE, otherwise we must
2717 start with the register in word_mode and explicitly
2719 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2720 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2723 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2724 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2726 emit_move_insn (change_address (mem, mode, 0), reg);
2729 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2730 machine must be aligned to the left before storing
2731 to memory. Note that the previous test doesn't
2732 handle all cases (e.g. SIZE == 3). */
2733 else if (size != UNITS_PER_WORD
2734 #ifdef BLOCK_REG_PADDING
2735 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2743 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2744 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2746 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2747 build_int_cst (NULL_TREE, by),
2749 tem = change_address (mem, word_mode, 0);
2750 emit_move_insn (tem, x);
2753 move_block_from_reg (REGNO (entry_parm), mem,
2754 size_stored / UNITS_PER_WORD);
2757 move_block_from_reg (REGNO (entry_parm), mem,
2758 size_stored / UNITS_PER_WORD);
2760 else if (data->stack_parm == 0)
2762 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2763 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2765 all->first_conversion_insn = get_insns ();
2766 all->last_conversion_insn = get_last_insn ();
2770 data->stack_parm = stack_parm;
2771 SET_DECL_RTL (parm, stack_parm);
2774 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2775 parameter. Get it there. Perform all ABI specified conversions. */
2778 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2779 struct assign_parm_data_one *data)
2782 enum machine_mode promoted_nominal_mode;
2783 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2784 bool did_conversion = false;
2786 /* Store the parm in a pseudoregister during the function, but we may
2787 need to do it in a wider mode. */
2789 /* This is not really promoting for a call. However we need to be
2790 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2791 promoted_nominal_mode
2792 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2794 parmreg = gen_reg_rtx (promoted_nominal_mode);
2796 if (!DECL_ARTIFICIAL (parm))
2797 mark_user_reg (parmreg);
2799 /* If this was an item that we received a pointer to,
2800 set DECL_RTL appropriately. */
2801 if (data->passed_pointer)
2803 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2804 set_mem_attributes (x, parm, 1);
2805 SET_DECL_RTL (parm, x);
2808 SET_DECL_RTL (parm, parmreg);
2810 assign_parm_remove_parallels (data);
2812 /* Copy the value into the register. */
2813 if (data->nominal_mode != data->passed_mode
2814 || promoted_nominal_mode != data->promoted_mode)
2818 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2819 mode, by the caller. We now have to convert it to
2820 NOMINAL_MODE, if different. However, PARMREG may be in
2821 a different mode than NOMINAL_MODE if it is being stored
2824 If ENTRY_PARM is a hard register, it might be in a register
2825 not valid for operating in its mode (e.g., an odd-numbered
2826 register for a DFmode). In that case, moves are the only
2827 thing valid, so we can't do a convert from there. This
2828 occurs when the calling sequence allow such misaligned
2831 In addition, the conversion may involve a call, which could
2832 clobber parameters which haven't been copied to pseudo
2833 registers yet. Therefore, we must first copy the parm to
2834 a pseudo reg here, and save the conversion until after all
2835 parameters have been moved. */
2837 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2839 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2841 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2842 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2844 if (GET_CODE (tempreg) == SUBREG
2845 && GET_MODE (tempreg) == data->nominal_mode
2846 && REG_P (SUBREG_REG (tempreg))
2847 && data->nominal_mode == data->passed_mode
2848 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2849 && GET_MODE_SIZE (GET_MODE (tempreg))
2850 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2852 /* The argument is already sign/zero extended, so note it
2854 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2855 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2858 /* TREE_USED gets set erroneously during expand_assignment. */
2859 save_tree_used = TREE_USED (parm);
2860 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2861 TREE_USED (parm) = save_tree_used;
2862 all->first_conversion_insn = get_insns ();
2863 all->last_conversion_insn = get_last_insn ();
2866 did_conversion = true;
2869 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2871 /* If we were passed a pointer but the actual value can safely live
2872 in a register, put it in one. */
2873 if (data->passed_pointer
2874 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2875 /* If by-reference argument was promoted, demote it. */
2876 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2877 || use_register_for_decl (parm)))
2879 /* We can't use nominal_mode, because it will have been set to
2880 Pmode above. We must use the actual mode of the parm. */
2881 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2882 mark_user_reg (parmreg);
2884 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2886 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2887 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2889 push_to_sequence2 (all->first_conversion_insn,
2890 all->last_conversion_insn);
2891 emit_move_insn (tempreg, DECL_RTL (parm));
2892 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2893 emit_move_insn (parmreg, tempreg);
2894 all->first_conversion_insn = get_insns ();
2895 all->last_conversion_insn = get_last_insn ();
2898 did_conversion = true;
2901 emit_move_insn (parmreg, DECL_RTL (parm));
2903 SET_DECL_RTL (parm, parmreg);
2905 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2907 data->stack_parm = NULL;
2910 /* Mark the register as eliminable if we did no conversion and it was
2911 copied from memory at a fixed offset, and the arg pointer was not
2912 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2913 offset formed an invalid address, such memory-equivalences as we
2914 make here would screw up life analysis for it. */
2915 if (data->nominal_mode == data->passed_mode
2917 && data->stack_parm != 0
2918 && MEM_P (data->stack_parm)
2919 && data->locate.offset.var == 0
2920 && reg_mentioned_p (virtual_incoming_args_rtx,
2921 XEXP (data->stack_parm, 0)))
2923 rtx linsn = get_last_insn ();
2926 /* Mark complex types separately. */
2927 if (GET_CODE (parmreg) == CONCAT)
2929 enum machine_mode submode
2930 = GET_MODE_INNER (GET_MODE (parmreg));
2931 int regnor = REGNO (XEXP (parmreg, 0));
2932 int regnoi = REGNO (XEXP (parmreg, 1));
2933 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2934 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2935 GET_MODE_SIZE (submode));
2937 /* Scan backwards for the set of the real and
2939 for (sinsn = linsn; sinsn != 0;
2940 sinsn = prev_nonnote_insn (sinsn))
2942 set = single_set (sinsn);
2946 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2947 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2948 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2949 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2952 else if ((set = single_set (linsn)) != 0
2953 && SET_DEST (set) == parmreg)
2954 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2957 /* For pointer data type, suggest pointer register. */
2958 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2959 mark_reg_pointer (parmreg,
2960 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2963 /* A subroutine of assign_parms. Allocate stack space to hold the current
2964 parameter. Get it there. Perform all ABI specified conversions. */
2967 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2968 struct assign_parm_data_one *data)
2970 /* Value must be stored in the stack slot STACK_PARM during function
2972 bool to_conversion = false;
2974 assign_parm_remove_parallels (data);
2976 if (data->promoted_mode != data->nominal_mode)
2978 /* Conversion is required. */
2979 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2981 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2983 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2984 to_conversion = true;
2986 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2987 TYPE_UNSIGNED (TREE_TYPE (parm)));
2989 if (data->stack_parm)
2991 int offset = subreg_lowpart_offset (data->nominal_mode,
2992 GET_MODE (data->stack_parm));
2993 /* ??? This may need a big-endian conversion on sparc64. */
2995 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2996 if (offset && MEM_OFFSET (data->stack_parm))
2997 set_mem_offset (data->stack_parm,
2998 plus_constant (MEM_OFFSET (data->stack_parm),
3003 if (data->entry_parm != data->stack_parm)
3007 if (data->stack_parm == 0)
3009 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3010 GET_MODE (data->entry_parm),
3011 TYPE_ALIGN (data->passed_type));
3013 = assign_stack_local (GET_MODE (data->entry_parm),
3014 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3016 set_mem_attributes (data->stack_parm, parm, 1);
3019 dest = validize_mem (data->stack_parm);
3020 src = validize_mem (data->entry_parm);
3024 /* Use a block move to handle potentially misaligned entry_parm. */
3026 push_to_sequence2 (all->first_conversion_insn,
3027 all->last_conversion_insn);
3028 to_conversion = true;
3030 emit_block_move (dest, src,
3031 GEN_INT (int_size_in_bytes (data->passed_type)),
3035 emit_move_insn (dest, src);
3040 all->first_conversion_insn = get_insns ();
3041 all->last_conversion_insn = get_last_insn ();
3045 SET_DECL_RTL (parm, data->stack_parm);
3048 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3049 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3052 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
3055 tree orig_fnargs = all->orig_fnargs;
3057 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
3059 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3060 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3062 rtx tmp, real, imag;
3063 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3065 real = DECL_RTL (fnargs);
3066 imag = DECL_RTL (TREE_CHAIN (fnargs));
3067 if (inner != GET_MODE (real))
3069 real = gen_lowpart_SUBREG (inner, real);
3070 imag = gen_lowpart_SUBREG (inner, imag);
3073 if (TREE_ADDRESSABLE (parm))
3076 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3077 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3079 TYPE_ALIGN (TREE_TYPE (parm)));
3081 /* split_complex_arg put the real and imag parts in
3082 pseudos. Move them to memory. */
3083 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3084 set_mem_attributes (tmp, parm, 1);
3085 rmem = adjust_address_nv (tmp, inner, 0);
3086 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3087 push_to_sequence2 (all->first_conversion_insn,
3088 all->last_conversion_insn);
3089 emit_move_insn (rmem, real);
3090 emit_move_insn (imem, imag);
3091 all->first_conversion_insn = get_insns ();
3092 all->last_conversion_insn = get_last_insn ();
3096 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3097 SET_DECL_RTL (parm, tmp);
3099 real = DECL_INCOMING_RTL (fnargs);
3100 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
3101 if (inner != GET_MODE (real))
3103 real = gen_lowpart_SUBREG (inner, real);
3104 imag = gen_lowpart_SUBREG (inner, imag);
3106 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3107 set_decl_incoming_rtl (parm, tmp, false);
3108 fnargs = TREE_CHAIN (fnargs);
3112 SET_DECL_RTL (parm, DECL_RTL (fnargs));
3113 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
3115 /* Set MEM_EXPR to the original decl, i.e. to PARM,
3116 instead of the copy of decl, i.e. FNARGS. */
3117 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
3118 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
3121 fnargs = TREE_CHAIN (fnargs);
3125 /* Assign RTL expressions to the function's parameters. This may involve
3126 copying them into registers and using those registers as the DECL_RTL. */
3129 assign_parms (tree fndecl)
3131 struct assign_parm_data_all all;
3134 crtl->args.internal_arg_pointer
3135 = targetm.calls.internal_arg_pointer ();
3137 assign_parms_initialize_all (&all);
3138 fnargs = assign_parms_augmented_arg_list (&all);
3140 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3142 struct assign_parm_data_one data;
3144 /* Extract the type of PARM; adjust it according to ABI. */
3145 assign_parm_find_data_types (&all, parm, &data);
3147 /* Early out for errors and void parameters. */
3148 if (data.passed_mode == VOIDmode)
3150 SET_DECL_RTL (parm, const0_rtx);
3151 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3155 /* Estimate stack alignment from parameter alignment. */
3156 if (SUPPORTS_STACK_ALIGNMENT)
3158 unsigned int align = FUNCTION_ARG_BOUNDARY (data.promoted_mode,
3160 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3162 if (TYPE_ALIGN (data.nominal_type) > align)
3163 align = MINIMUM_ALIGNMENT (data.nominal_type,
3164 TYPE_MODE (data.nominal_type),
3165 TYPE_ALIGN (data.nominal_type));
3166 if (crtl->stack_alignment_estimated < align)
3168 gcc_assert (!crtl->stack_realign_processed);
3169 crtl->stack_alignment_estimated = align;
3173 if (cfun->stdarg && !TREE_CHAIN (parm))
3174 assign_parms_setup_varargs (&all, &data, false);
3176 /* Find out where the parameter arrives in this function. */
3177 assign_parm_find_entry_rtl (&all, &data);
3179 /* Find out where stack space for this parameter might be. */
3180 if (assign_parm_is_stack_parm (&all, &data))
3182 assign_parm_find_stack_rtl (parm, &data);
3183 assign_parm_adjust_entry_rtl (&data);
3186 /* Record permanently how this parm was passed. */
3187 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
3189 /* Update info on where next arg arrives in registers. */
3190 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3191 data.passed_type, data.named_arg);
3193 assign_parm_adjust_stack_rtl (&data);
3195 if (assign_parm_setup_block_p (&data))
3196 assign_parm_setup_block (&all, parm, &data);
3197 else if (data.passed_pointer || use_register_for_decl (parm))
3198 assign_parm_setup_reg (&all, parm, &data);
3200 assign_parm_setup_stack (&all, parm, &data);
3203 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3204 assign_parms_unsplit_complex (&all, fnargs);
3206 /* Output all parameter conversion instructions (possibly including calls)
3207 now that all parameters have been copied out of hard registers. */
3208 emit_insn (all.first_conversion_insn);
3210 /* Estimate reload stack alignment from scalar return mode. */
3211 if (SUPPORTS_STACK_ALIGNMENT)
3213 if (DECL_RESULT (fndecl))
3215 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3216 enum machine_mode mode = TYPE_MODE (type);
3220 && !AGGREGATE_TYPE_P (type))
3222 unsigned int align = GET_MODE_ALIGNMENT (mode);
3223 if (crtl->stack_alignment_estimated < align)
3225 gcc_assert (!crtl->stack_realign_processed);
3226 crtl->stack_alignment_estimated = align;
3232 /* If we are receiving a struct value address as the first argument, set up
3233 the RTL for the function result. As this might require code to convert
3234 the transmitted address to Pmode, we do this here to ensure that possible
3235 preliminary conversions of the address have been emitted already. */
3236 if (all.function_result_decl)
3238 tree result = DECL_RESULT (current_function_decl);
3239 rtx addr = DECL_RTL (all.function_result_decl);
3242 if (DECL_BY_REFERENCE (result))
3246 addr = convert_memory_address (Pmode, addr);
3247 x = gen_rtx_MEM (DECL_MODE (result), addr);
3248 set_mem_attributes (x, result, 1);
3250 SET_DECL_RTL (result, x);
3253 /* We have aligned all the args, so add space for the pretend args. */
3254 crtl->args.pretend_args_size = all.pretend_args_size;
3255 all.stack_args_size.constant += all.extra_pretend_bytes;
3256 crtl->args.size = all.stack_args_size.constant;
3258 /* Adjust function incoming argument size for alignment and
3261 #ifdef REG_PARM_STACK_SPACE
3262 crtl->args.size = MAX (crtl->args.size,
3263 REG_PARM_STACK_SPACE (fndecl));
3266 crtl->args.size = CEIL_ROUND (crtl->args.size,
3267 PARM_BOUNDARY / BITS_PER_UNIT);
3269 #ifdef ARGS_GROW_DOWNWARD
3270 crtl->args.arg_offset_rtx
3271 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3272 : expand_expr (size_diffop (all.stack_args_size.var,
3273 size_int (-all.stack_args_size.constant)),
3274 NULL_RTX, VOIDmode, 0));
3276 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3279 /* See how many bytes, if any, of its args a function should try to pop
3282 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3285 /* For stdarg.h function, save info about
3286 regs and stack space used by the named args. */
3288 crtl->args.info = all.args_so_far;
3290 /* Set the rtx used for the function return value. Put this in its
3291 own variable so any optimizers that need this information don't have
3292 to include tree.h. Do this here so it gets done when an inlined
3293 function gets output. */
3296 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3297 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3299 /* If scalar return value was computed in a pseudo-reg, or was a named
3300 return value that got dumped to the stack, copy that to the hard
3302 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3304 tree decl_result = DECL_RESULT (fndecl);
3305 rtx decl_rtl = DECL_RTL (decl_result);
3307 if (REG_P (decl_rtl)
3308 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3309 : DECL_REGISTER (decl_result))
3313 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3315 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3316 /* The delay slot scheduler assumes that crtl->return_rtx
3317 holds the hard register containing the return value, not a
3318 temporary pseudo. */
3319 crtl->return_rtx = real_decl_rtl;
3324 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3325 For all seen types, gimplify their sizes. */
3328 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3335 if (POINTER_TYPE_P (t))
3337 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3338 && !TYPE_SIZES_GIMPLIFIED (t))
3340 gimplify_type_sizes (t, (gimple_seq *) data);
3348 /* Gimplify the parameter list for current_function_decl. This involves
3349 evaluating SAVE_EXPRs of variable sized parameters and generating code
3350 to implement callee-copies reference parameters. Returns a sequence of
3351 statements to add to the beginning of the function. */
3354 gimplify_parameters (void)
3356 struct assign_parm_data_all all;
3358 gimple_seq stmts = NULL;
3360 assign_parms_initialize_all (&all);
3361 fnargs = assign_parms_augmented_arg_list (&all);
3363 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3365 struct assign_parm_data_one data;
3367 /* Extract the type of PARM; adjust it according to ABI. */
3368 assign_parm_find_data_types (&all, parm, &data);
3370 /* Early out for errors and void parameters. */
3371 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3374 /* Update info on where next arg arrives in registers. */
3375 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3376 data.passed_type, data.named_arg);
3378 /* ??? Once upon a time variable_size stuffed parameter list
3379 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3380 turned out to be less than manageable in the gimple world.
3381 Now we have to hunt them down ourselves. */
3382 walk_tree_without_duplicates (&data.passed_type,
3383 gimplify_parm_type, &stmts);
3385 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3387 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3388 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3391 if (data.passed_pointer)
3393 tree type = TREE_TYPE (data.passed_type);
3394 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3395 type, data.named_arg))
3399 /* For constant-sized objects, this is trivial; for
3400 variable-sized objects, we have to play games. */
3401 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3402 && !(flag_stack_check == GENERIC_STACK_CHECK
3403 && compare_tree_int (DECL_SIZE_UNIT (parm),
3404 STACK_CHECK_MAX_VAR_SIZE) > 0))
3406 local = create_tmp_var (type, get_name (parm));
3407 DECL_IGNORED_P (local) = 0;
3408 /* If PARM was addressable, move that flag over
3409 to the local copy, as its address will be taken,
3410 not the PARMs. Keep the parms address taken
3411 as we'll query that flag during gimplification. */
3412 if (TREE_ADDRESSABLE (parm))
3413 TREE_ADDRESSABLE (local) = 1;
3417 tree ptr_type, addr;
3419 ptr_type = build_pointer_type (type);
3420 addr = create_tmp_var (ptr_type, get_name (parm));
3421 DECL_IGNORED_P (addr) = 0;
3422 local = build_fold_indirect_ref (addr);
3424 t = built_in_decls[BUILT_IN_ALLOCA];
3425 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3426 t = fold_convert (ptr_type, t);
3427 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3428 gimplify_and_add (t, &stmts);
3431 gimplify_assign (local, parm, &stmts);
3433 SET_DECL_VALUE_EXPR (parm, local);
3434 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3442 /* Compute the size and offset from the start of the stacked arguments for a
3443 parm passed in mode PASSED_MODE and with type TYPE.
3445 INITIAL_OFFSET_PTR points to the current offset into the stacked
3448 The starting offset and size for this parm are returned in
3449 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3450 nonzero, the offset is that of stack slot, which is returned in
3451 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3452 padding required from the initial offset ptr to the stack slot.
3454 IN_REGS is nonzero if the argument will be passed in registers. It will
3455 never be set if REG_PARM_STACK_SPACE is not defined.
3457 FNDECL is the function in which the argument was defined.
3459 There are two types of rounding that are done. The first, controlled by
3460 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3461 list to be aligned to the specific boundary (in bits). This rounding
3462 affects the initial and starting offsets, but not the argument size.
3464 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3465 optionally rounds the size of the parm to PARM_BOUNDARY. The
3466 initial offset is not affected by this rounding, while the size always
3467 is and the starting offset may be. */
3469 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3470 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3471 callers pass in the total size of args so far as
3472 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3475 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3476 int partial, tree fndecl ATTRIBUTE_UNUSED,
3477 struct args_size *initial_offset_ptr,
3478 struct locate_and_pad_arg_data *locate)
3481 enum direction where_pad;
3482 unsigned int boundary;
3483 int reg_parm_stack_space = 0;
3484 int part_size_in_regs;
3486 #ifdef REG_PARM_STACK_SPACE
3487 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3489 /* If we have found a stack parm before we reach the end of the
3490 area reserved for registers, skip that area. */
3493 if (reg_parm_stack_space > 0)
3495 if (initial_offset_ptr->var)
3497 initial_offset_ptr->var
3498 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3499 ssize_int (reg_parm_stack_space));
3500 initial_offset_ptr->constant = 0;
3502 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3503 initial_offset_ptr->constant = reg_parm_stack_space;
3506 #endif /* REG_PARM_STACK_SPACE */
3508 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3511 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3512 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3513 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3514 locate->where_pad = where_pad;
3516 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3517 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3518 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3520 locate->boundary = boundary;
3522 if (SUPPORTS_STACK_ALIGNMENT)
3524 /* stack_alignment_estimated can't change after stack has been
3526 if (crtl->stack_alignment_estimated < boundary)
3528 if (!crtl->stack_realign_processed)
3529 crtl->stack_alignment_estimated = boundary;
3532 /* If stack is realigned and stack alignment value
3533 hasn't been finalized, it is OK not to increase
3534 stack_alignment_estimated. The bigger alignment
3535 requirement is recorded in stack_alignment_needed
3537 gcc_assert (!crtl->stack_realign_finalized
3538 && crtl->stack_realign_needed);
3543 /* Remember if the outgoing parameter requires extra alignment on the
3544 calling function side. */
3545 if (crtl->stack_alignment_needed < boundary)
3546 crtl->stack_alignment_needed = boundary;
3547 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
3548 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
3549 if (crtl->preferred_stack_boundary < boundary)
3550 crtl->preferred_stack_boundary = boundary;
3552 #ifdef ARGS_GROW_DOWNWARD
3553 locate->slot_offset.constant = -initial_offset_ptr->constant;
3554 if (initial_offset_ptr->var)
3555 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3556 initial_offset_ptr->var);
3560 if (where_pad != none
3561 && (!host_integerp (sizetree, 1)
3562 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3563 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3564 SUB_PARM_SIZE (locate->slot_offset, s2);
3567 locate->slot_offset.constant += part_size_in_regs;
3570 #ifdef REG_PARM_STACK_SPACE
3571 || REG_PARM_STACK_SPACE (fndecl) > 0
3574 pad_to_arg_alignment (&locate->slot_offset, boundary,
3575 &locate->alignment_pad);
3577 locate->size.constant = (-initial_offset_ptr->constant
3578 - locate->slot_offset.constant);
3579 if (initial_offset_ptr->var)
3580 locate->size.var = size_binop (MINUS_EXPR,
3581 size_binop (MINUS_EXPR,
3583 initial_offset_ptr->var),
3584 locate->slot_offset.var);
3586 /* Pad_below needs the pre-rounded size to know how much to pad
3588 locate->offset = locate->slot_offset;
3589 if (where_pad == downward)
3590 pad_below (&locate->offset, passed_mode, sizetree);
3592 #else /* !ARGS_GROW_DOWNWARD */
3594 #ifdef REG_PARM_STACK_SPACE
3595 || REG_PARM_STACK_SPACE (fndecl) > 0
3598 pad_to_arg_alignment (initial_offset_ptr, boundary,
3599 &locate->alignment_pad);
3600 locate->slot_offset = *initial_offset_ptr;
3602 #ifdef PUSH_ROUNDING
3603 if (passed_mode != BLKmode)
3604 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3607 /* Pad_below needs the pre-rounded size to know how much to pad below
3608 so this must be done before rounding up. */
3609 locate->offset = locate->slot_offset;
3610 if (where_pad == downward)
3611 pad_below (&locate->offset, passed_mode, sizetree);
3613 if (where_pad != none
3614 && (!host_integerp (sizetree, 1)
3615 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3616 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3618 ADD_PARM_SIZE (locate->size, sizetree);
3620 locate->size.constant -= part_size_in_regs;
3621 #endif /* ARGS_GROW_DOWNWARD */
3623 #ifdef FUNCTION_ARG_OFFSET
3624 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3628 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3629 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3632 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3633 struct args_size *alignment_pad)
3635 tree save_var = NULL_TREE;
3636 HOST_WIDE_INT save_constant = 0;
3637 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3638 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3640 #ifdef SPARC_STACK_BOUNDARY_HACK
3641 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3642 the real alignment of %sp. However, when it does this, the
3643 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3644 if (SPARC_STACK_BOUNDARY_HACK)
3648 if (boundary > PARM_BOUNDARY)
3650 save_var = offset_ptr->var;
3651 save_constant = offset_ptr->constant;
3654 alignment_pad->var = NULL_TREE;
3655 alignment_pad->constant = 0;
3657 if (boundary > BITS_PER_UNIT)
3659 if (offset_ptr->var)
3661 tree sp_offset_tree = ssize_int (sp_offset);
3662 tree offset = size_binop (PLUS_EXPR,
3663 ARGS_SIZE_TREE (*offset_ptr),
3665 #ifdef ARGS_GROW_DOWNWARD
3666 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3668 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3671 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3672 /* ARGS_SIZE_TREE includes constant term. */
3673 offset_ptr->constant = 0;
3674 if (boundary > PARM_BOUNDARY)
3675 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3680 offset_ptr->constant = -sp_offset +
3681 #ifdef ARGS_GROW_DOWNWARD
3682 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3684 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3686 if (boundary > PARM_BOUNDARY)
3687 alignment_pad->constant = offset_ptr->constant - save_constant;
3693 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3695 if (passed_mode != BLKmode)
3697 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3698 offset_ptr->constant
3699 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3700 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3701 - GET_MODE_SIZE (passed_mode));
3705 if (TREE_CODE (sizetree) != INTEGER_CST
3706 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3708 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3709 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3711 ADD_PARM_SIZE (*offset_ptr, s2);
3712 SUB_PARM_SIZE (*offset_ptr, sizetree);
3718 /* True if register REGNO was alive at a place where `setjmp' was
3719 called and was set more than once or is an argument. Such regs may
3720 be clobbered by `longjmp'. */
3723 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3725 /* There appear to be cases where some local vars never reach the
3726 backend but have bogus regnos. */
3727 if (regno >= max_reg_num ())
3730 return ((REG_N_SETS (regno) > 1
3731 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3732 && REGNO_REG_SET_P (setjmp_crosses, regno));
3735 /* Walk the tree of blocks describing the binding levels within a
3736 function and warn about variables the might be killed by setjmp or
3737 vfork. This is done after calling flow_analysis before register
3738 allocation since that will clobber the pseudo-regs to hard
3742 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3746 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3748 if (TREE_CODE (decl) == VAR_DECL
3749 && DECL_RTL_SET_P (decl)
3750 && REG_P (DECL_RTL (decl))
3751 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3752 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3753 " %<longjmp%> or %<vfork%>", decl);
3756 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3757 setjmp_vars_warning (setjmp_crosses, sub);
3760 /* Do the appropriate part of setjmp_vars_warning
3761 but for arguments instead of local variables. */
3764 setjmp_args_warning (bitmap setjmp_crosses)
3767 for (decl = DECL_ARGUMENTS (current_function_decl);
3768 decl; decl = TREE_CHAIN (decl))
3769 if (DECL_RTL (decl) != 0
3770 && REG_P (DECL_RTL (decl))
3771 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3772 warning (OPT_Wclobbered,
3773 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3777 /* Generate warning messages for variables live across setjmp. */
3780 generate_setjmp_warnings (void)
3782 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3784 if (n_basic_blocks == NUM_FIXED_BLOCKS
3785 || bitmap_empty_p (setjmp_crosses))
3788 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3789 setjmp_args_warning (setjmp_crosses);
3793 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3794 and create duplicate blocks. */
3795 /* ??? Need an option to either create block fragments or to create
3796 abstract origin duplicates of a source block. It really depends
3797 on what optimization has been performed. */
3800 reorder_blocks (void)
3802 tree block = DECL_INITIAL (current_function_decl);
3803 VEC(tree,heap) *block_stack;
3805 if (block == NULL_TREE)
3808 block_stack = VEC_alloc (tree, heap, 10);
3810 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3811 clear_block_marks (block);
3813 /* Prune the old trees away, so that they don't get in the way. */
3814 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3815 BLOCK_CHAIN (block) = NULL_TREE;
3817 /* Recreate the block tree from the note nesting. */
3818 reorder_blocks_1 (get_insns (), block, &block_stack);
3819 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3821 VEC_free (tree, heap, block_stack);
3824 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3827 clear_block_marks (tree block)
3831 TREE_ASM_WRITTEN (block) = 0;
3832 clear_block_marks (BLOCK_SUBBLOCKS (block));
3833 block = BLOCK_CHAIN (block);
3838 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3842 for (insn = insns; insn; insn = NEXT_INSN (insn))
3846 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3848 tree block = NOTE_BLOCK (insn);
3851 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3852 ? BLOCK_FRAGMENT_ORIGIN (block)
3855 /* If we have seen this block before, that means it now
3856 spans multiple address regions. Create a new fragment. */
3857 if (TREE_ASM_WRITTEN (block))
3859 tree new_block = copy_node (block);
3861 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3862 BLOCK_FRAGMENT_CHAIN (new_block)
3863 = BLOCK_FRAGMENT_CHAIN (origin);
3864 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3866 NOTE_BLOCK (insn) = new_block;
3870 BLOCK_SUBBLOCKS (block) = 0;
3871 TREE_ASM_WRITTEN (block) = 1;
3872 /* When there's only one block for the entire function,
3873 current_block == block and we mustn't do this, it
3874 will cause infinite recursion. */
3875 if (block != current_block)
3877 if (block != origin)
3878 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3880 BLOCK_SUPERCONTEXT (block) = current_block;
3881 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3882 BLOCK_SUBBLOCKS (current_block) = block;
3883 current_block = origin;
3885 VEC_safe_push (tree, heap, *p_block_stack, block);
3887 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3889 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3890 BLOCK_SUBBLOCKS (current_block)
3891 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3892 current_block = BLOCK_SUPERCONTEXT (current_block);
3898 /* Reverse the order of elements in the chain T of blocks,
3899 and return the new head of the chain (old last element). */
3902 blocks_nreverse (tree t)
3904 tree prev = 0, decl, next;
3905 for (decl = t; decl; decl = next)
3907 next = BLOCK_CHAIN (decl);
3908 BLOCK_CHAIN (decl) = prev;
3914 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3915 non-NULL, list them all into VECTOR, in a depth-first preorder
3916 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3920 all_blocks (tree block, tree *vector)
3926 TREE_ASM_WRITTEN (block) = 0;
3928 /* Record this block. */
3930 vector[n_blocks] = block;
3934 /* Record the subblocks, and their subblocks... */
3935 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3936 vector ? vector + n_blocks : 0);
3937 block = BLOCK_CHAIN (block);
3943 /* Return a vector containing all the blocks rooted at BLOCK. The
3944 number of elements in the vector is stored in N_BLOCKS_P. The
3945 vector is dynamically allocated; it is the caller's responsibility
3946 to call `free' on the pointer returned. */
3949 get_block_vector (tree block, int *n_blocks_p)
3953 *n_blocks_p = all_blocks (block, NULL);
3954 block_vector = XNEWVEC (tree, *n_blocks_p);
3955 all_blocks (block, block_vector);
3957 return block_vector;
3960 static GTY(()) int next_block_index = 2;
3962 /* Set BLOCK_NUMBER for all the blocks in FN. */
3965 number_blocks (tree fn)
3971 /* For SDB and XCOFF debugging output, we start numbering the blocks
3972 from 1 within each function, rather than keeping a running
3974 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3975 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3976 next_block_index = 1;
3979 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3981 /* The top-level BLOCK isn't numbered at all. */
3982 for (i = 1; i < n_blocks; ++i)
3983 /* We number the blocks from two. */
3984 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3986 free (block_vector);
3991 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3994 debug_find_var_in_block_tree (tree var, tree block)
3998 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4002 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4004 tree ret = debug_find_var_in_block_tree (var, t);
4012 /* Keep track of whether we're in a dummy function context. If we are,
4013 we don't want to invoke the set_current_function hook, because we'll
4014 get into trouble if the hook calls target_reinit () recursively or
4015 when the initial initialization is not yet complete. */
4017 static bool in_dummy_function;
4019 /* Invoke the target hook when setting cfun. Update the optimization options
4020 if the function uses different options than the default. */
4023 invoke_set_current_function_hook (tree fndecl)
4025 if (!in_dummy_function)
4027 tree opts = ((fndecl)
4028 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4029 : optimization_default_node);
4032 opts = optimization_default_node;
4034 /* Change optimization options if needed. */
4035 if (optimization_current_node != opts)
4037 optimization_current_node = opts;
4038 cl_optimization_restore (TREE_OPTIMIZATION (opts));
4041 targetm.set_current_function (fndecl);
4045 /* cfun should never be set directly; use this function. */
4048 set_cfun (struct function *new_cfun)
4050 if (cfun != new_cfun)
4053 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4057 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4059 static VEC(function_p,heap) *cfun_stack;
4061 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4064 push_cfun (struct function *new_cfun)
4066 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4067 set_cfun (new_cfun);
4070 /* Pop cfun from the stack. */
4075 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4076 set_cfun (new_cfun);
4079 /* Return value of funcdef and increase it. */
4081 get_next_funcdef_no (void)
4083 return funcdef_no++;
4086 /* Allocate a function structure for FNDECL and set its contents
4087 to the defaults. Set cfun to the newly-allocated object.
4088 Some of the helper functions invoked during initialization assume
4089 that cfun has already been set. Therefore, assign the new object
4090 directly into cfun and invoke the back end hook explicitly at the
4091 very end, rather than initializing a temporary and calling set_cfun
4094 ABSTRACT_P is true if this is a function that will never be seen by
4095 the middle-end. Such functions are front-end concepts (like C++
4096 function templates) that do not correspond directly to functions
4097 placed in object files. */
4100 allocate_struct_function (tree fndecl, bool abstract_p)
4103 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4105 cfun = GGC_CNEW (struct function);
4107 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
4109 init_eh_for_function ();
4111 if (init_machine_status)
4112 cfun->machine = (*init_machine_status) ();
4114 #ifdef OVERRIDE_ABI_FORMAT
4115 OVERRIDE_ABI_FORMAT (fndecl);
4118 invoke_set_current_function_hook (fndecl);
4120 if (fndecl != NULL_TREE)
4122 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4123 cfun->decl = fndecl;
4124 current_function_funcdef_no = get_next_funcdef_no ();
4126 result = DECL_RESULT (fndecl);
4127 if (!abstract_p && aggregate_value_p (result, fndecl))
4129 #ifdef PCC_STATIC_STRUCT_RETURN
4130 cfun->returns_pcc_struct = 1;
4132 cfun->returns_struct = 1;
4137 && TYPE_ARG_TYPES (fntype) != 0
4138 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4139 != void_type_node));
4141 /* Assume all registers in stdarg functions need to be saved. */
4142 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4143 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4147 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4148 instead of just setting it. */
4151 push_struct_function (tree fndecl)
4153 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4154 allocate_struct_function (fndecl, false);
4157 /* Reset cfun, and other non-struct-function variables to defaults as
4158 appropriate for emitting rtl at the start of a function. */
4161 prepare_function_start (void)
4163 gcc_assert (!crtl->emit.x_last_insn);
4166 init_varasm_status ();
4168 default_rtl_profile ();
4170 cse_not_expected = ! optimize;
4172 /* Caller save not needed yet. */
4173 caller_save_needed = 0;
4175 /* We haven't done register allocation yet. */
4178 /* Indicate that we have not instantiated virtual registers yet. */
4179 virtuals_instantiated = 0;
4181 /* Indicate that we want CONCATs now. */
4182 generating_concat_p = 1;
4184 /* Indicate we have no need of a frame pointer yet. */
4185 frame_pointer_needed = 0;
4188 /* Initialize the rtl expansion mechanism so that we can do simple things
4189 like generate sequences. This is used to provide a context during global
4190 initialization of some passes. You must call expand_dummy_function_end
4191 to exit this context. */
4194 init_dummy_function_start (void)
4196 gcc_assert (!in_dummy_function);
4197 in_dummy_function = true;
4198 push_struct_function (NULL_TREE);
4199 prepare_function_start ();
4202 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4203 and initialize static variables for generating RTL for the statements
4207 init_function_start (tree subr)
4209 if (subr && DECL_STRUCT_FUNCTION (subr))
4210 set_cfun (DECL_STRUCT_FUNCTION (subr));
4212 allocate_struct_function (subr, false);
4213 prepare_function_start ();
4215 /* Warn if this value is an aggregate type,
4216 regardless of which calling convention we are using for it. */
4217 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4218 warning (OPT_Waggregate_return, "function returns an aggregate");
4221 /* Make sure all values used by the optimization passes have sane
4224 init_function_for_compilation (void)
4228 /* No prologue/epilogue insns yet. Make sure that these vectors are
4230 gcc_assert (VEC_length (int, prologue) == 0);
4231 gcc_assert (VEC_length (int, epilogue) == 0);
4232 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
4236 struct rtl_opt_pass pass_init_function =
4242 init_function_for_compilation, /* execute */
4245 0, /* static_pass_number */
4247 0, /* properties_required */
4248 0, /* properties_provided */
4249 0, /* properties_destroyed */
4250 0, /* todo_flags_start */
4251 0 /* todo_flags_finish */
4257 expand_main_function (void)
4259 #if (defined(INVOKE__main) \
4260 || (!defined(HAS_INIT_SECTION) \
4261 && !defined(INIT_SECTION_ASM_OP) \
4262 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4263 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4267 /* Expand code to initialize the stack_protect_guard. This is invoked at
4268 the beginning of a function to be protected. */
4270 #ifndef HAVE_stack_protect_set
4271 # define HAVE_stack_protect_set 0
4272 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4276 stack_protect_prologue (void)
4278 tree guard_decl = targetm.stack_protect_guard ();
4281 x = expand_normal (crtl->stack_protect_guard);
4282 y = expand_normal (guard_decl);
4284 /* Allow the target to copy from Y to X without leaking Y into a
4286 if (HAVE_stack_protect_set)
4288 rtx insn = gen_stack_protect_set (x, y);
4296 /* Otherwise do a straight move. */
4297 emit_move_insn (x, y);
4300 /* Expand code to verify the stack_protect_guard. This is invoked at
4301 the end of a function to be protected. */
4303 #ifndef HAVE_stack_protect_test
4304 # define HAVE_stack_protect_test 0
4305 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4309 stack_protect_epilogue (void)
4311 tree guard_decl = targetm.stack_protect_guard ();
4312 rtx label = gen_label_rtx ();
4315 x = expand_normal (crtl->stack_protect_guard);
4316 y = expand_normal (guard_decl);
4318 /* Allow the target to compare Y with X without leaking either into
4320 switch (HAVE_stack_protect_test != 0)
4323 tmp = gen_stack_protect_test (x, y, label);
4332 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4336 /* The noreturn predictor has been moved to the tree level. The rtl-level
4337 predictors estimate this branch about 20%, which isn't enough to get
4338 things moved out of line. Since this is the only extant case of adding
4339 a noreturn function at the rtl level, it doesn't seem worth doing ought
4340 except adding the prediction by hand. */
4341 tmp = get_last_insn ();
4343 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4345 expand_expr_stmt (targetm.stack_protect_fail ());
4349 /* Start the RTL for a new function, and set variables used for
4351 SUBR is the FUNCTION_DECL node.
4352 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4353 the function's parameters, which must be run at any return statement. */
4356 expand_function_start (tree subr)
4358 /* Make sure volatile mem refs aren't considered
4359 valid operands of arithmetic insns. */
4360 init_recog_no_volatile ();
4364 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4367 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4369 /* Make the label for return statements to jump to. Do not special
4370 case machines with special return instructions -- they will be
4371 handled later during jump, ifcvt, or epilogue creation. */
4372 return_label = gen_label_rtx ();
4374 /* Initialize rtx used to return the value. */
4375 /* Do this before assign_parms so that we copy the struct value address
4376 before any library calls that assign parms might generate. */
4378 /* Decide whether to return the value in memory or in a register. */
4379 if (aggregate_value_p (DECL_RESULT (subr), subr))
4381 /* Returning something that won't go in a register. */
4382 rtx value_address = 0;
4384 #ifdef PCC_STATIC_STRUCT_RETURN
4385 if (cfun->returns_pcc_struct)
4387 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4388 value_address = assemble_static_space (size);
4393 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4394 /* Expect to be passed the address of a place to store the value.
4395 If it is passed as an argument, assign_parms will take care of
4399 value_address = gen_reg_rtx (Pmode);
4400 emit_move_insn (value_address, sv);
4405 rtx x = value_address;
4406 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4408 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4409 set_mem_attributes (x, DECL_RESULT (subr), 1);
4411 SET_DECL_RTL (DECL_RESULT (subr), x);
4414 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4415 /* If return mode is void, this decl rtl should not be used. */
4416 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4419 /* Compute the return values into a pseudo reg, which we will copy
4420 into the true return register after the cleanups are done. */
4421 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4422 if (TYPE_MODE (return_type) != BLKmode
4423 && targetm.calls.return_in_msb (return_type))
4424 /* expand_function_end will insert the appropriate padding in
4425 this case. Use the return value's natural (unpadded) mode
4426 within the function proper. */
4427 SET_DECL_RTL (DECL_RESULT (subr),
4428 gen_reg_rtx (TYPE_MODE (return_type)));
4431 /* In order to figure out what mode to use for the pseudo, we
4432 figure out what the mode of the eventual return register will
4433 actually be, and use that. */
4434 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4436 /* Structures that are returned in registers are not
4437 aggregate_value_p, so we may see a PARALLEL or a REG. */
4438 if (REG_P (hard_reg))
4439 SET_DECL_RTL (DECL_RESULT (subr),
4440 gen_reg_rtx (GET_MODE (hard_reg)));
4443 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4444 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4448 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4449 result to the real return register(s). */
4450 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4453 /* Initialize rtx for parameters and local variables.
4454 In some cases this requires emitting insns. */
4455 assign_parms (subr);
4457 /* If function gets a static chain arg, store it. */
4458 if (cfun->static_chain_decl)
4460 tree parm = cfun->static_chain_decl;
4461 rtx local = gen_reg_rtx (Pmode);
4463 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4464 SET_DECL_RTL (parm, local);
4465 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4467 emit_move_insn (local, static_chain_incoming_rtx);
4470 /* If the function receives a non-local goto, then store the
4471 bits we need to restore the frame pointer. */
4472 if (cfun->nonlocal_goto_save_area)
4477 /* ??? We need to do this save early. Unfortunately here is
4478 before the frame variable gets declared. Help out... */
4479 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4480 if (!DECL_RTL_SET_P (var))
4483 t_save = build4 (ARRAY_REF, ptr_type_node,
4484 cfun->nonlocal_goto_save_area,
4485 integer_zero_node, NULL_TREE, NULL_TREE);
4486 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4487 r_save = convert_memory_address (Pmode, r_save);
4489 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4490 update_nonlocal_goto_save_area ();
4493 /* The following was moved from init_function_start.
4494 The move is supposed to make sdb output more accurate. */
4495 /* Indicate the beginning of the function body,
4496 as opposed to parm setup. */
4497 emit_note (NOTE_INSN_FUNCTION_BEG);
4499 gcc_assert (NOTE_P (get_last_insn ()));
4501 parm_birth_insn = get_last_insn ();
4506 PROFILE_HOOK (current_function_funcdef_no);
4510 /* After the display initializations is where the stack checking
4512 if(flag_stack_check)
4513 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4515 /* Make sure there is a line number after the function entry setup code. */
4516 force_next_line_note ();
4519 /* Undo the effects of init_dummy_function_start. */
4521 expand_dummy_function_end (void)
4523 gcc_assert (in_dummy_function);
4525 /* End any sequences that failed to be closed due to syntax errors. */
4526 while (in_sequence_p ())
4529 /* Outside function body, can't compute type's actual size
4530 until next function's body starts. */
4532 free_after_parsing (cfun);
4533 free_after_compilation (cfun);
4535 in_dummy_function = false;
4538 /* Call DOIT for each hard register used as a return value from
4539 the current function. */
4542 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4544 rtx outgoing = crtl->return_rtx;
4549 if (REG_P (outgoing))
4550 (*doit) (outgoing, arg);
4551 else if (GET_CODE (outgoing) == PARALLEL)
4555 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4557 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4559 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4566 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4572 clobber_return_register (void)
4574 diddle_return_value (do_clobber_return_reg, NULL);
4576 /* In case we do use pseudo to return value, clobber it too. */
4577 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4579 tree decl_result = DECL_RESULT (current_function_decl);
4580 rtx decl_rtl = DECL_RTL (decl_result);
4581 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4583 do_clobber_return_reg (decl_rtl, NULL);
4589 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4595 use_return_register (void)
4597 diddle_return_value (do_use_return_reg, NULL);
4600 /* Possibly warn about unused parameters. */
4602 do_warn_unused_parameter (tree fn)
4606 for (decl = DECL_ARGUMENTS (fn);
4607 decl; decl = TREE_CHAIN (decl))
4608 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4609 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4610 && !TREE_NO_WARNING (decl))
4611 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4614 static GTY(()) rtx initial_trampoline;
4616 /* Generate RTL for the end of the current function. */
4619 expand_function_end (void)
4623 /* If arg_pointer_save_area was referenced only from a nested
4624 function, we will not have initialized it yet. Do that now. */
4625 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4626 get_arg_pointer_save_area ();
4628 /* If we are doing generic stack checking and this function makes calls,
4629 do a stack probe at the start of the function to ensure we have enough
4630 space for another stack frame. */
4631 if (flag_stack_check == GENERIC_STACK_CHECK)
4635 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4639 probe_stack_range (STACK_OLD_CHECK_PROTECT,
4640 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4643 emit_insn_before (seq, stack_check_probe_note);
4648 /* End any sequences that failed to be closed due to syntax errors. */
4649 while (in_sequence_p ())
4652 clear_pending_stack_adjust ();
4653 do_pending_stack_adjust ();
4655 /* Output a linenumber for the end of the function.
4656 SDB depends on this. */
4657 force_next_line_note ();
4658 set_curr_insn_source_location (input_location);
4660 /* Before the return label (if any), clobber the return
4661 registers so that they are not propagated live to the rest of
4662 the function. This can only happen with functions that drop
4663 through; if there had been a return statement, there would
4664 have either been a return rtx, or a jump to the return label.
4666 We delay actual code generation after the current_function_value_rtx
4668 clobber_after = get_last_insn ();
4670 /* Output the label for the actual return from the function. */
4671 emit_label (return_label);
4673 if (USING_SJLJ_EXCEPTIONS)
4675 /* Let except.c know where it should emit the call to unregister
4676 the function context for sjlj exceptions. */
4677 if (flag_exceptions)
4678 sjlj_emit_function_exit_after (get_last_insn ());
4682 /* We want to ensure that instructions that may trap are not
4683 moved into the epilogue by scheduling, because we don't
4684 always emit unwind information for the epilogue. */
4685 if (flag_non_call_exceptions)
4686 emit_insn (gen_blockage ());
4689 /* If this is an implementation of throw, do what's necessary to
4690 communicate between __builtin_eh_return and the epilogue. */
4691 expand_eh_return ();
4693 /* If scalar return value was computed in a pseudo-reg, or was a named
4694 return value that got dumped to the stack, copy that to the hard
4696 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4698 tree decl_result = DECL_RESULT (current_function_decl);
4699 rtx decl_rtl = DECL_RTL (decl_result);
4701 if (REG_P (decl_rtl)
4702 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4703 : DECL_REGISTER (decl_result))
4705 rtx real_decl_rtl = crtl->return_rtx;
4707 /* This should be set in assign_parms. */
4708 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4710 /* If this is a BLKmode structure being returned in registers,
4711 then use the mode computed in expand_return. Note that if
4712 decl_rtl is memory, then its mode may have been changed,
4713 but that crtl->return_rtx has not. */
4714 if (GET_MODE (real_decl_rtl) == BLKmode)
4715 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4717 /* If a non-BLKmode return value should be padded at the least
4718 significant end of the register, shift it left by the appropriate
4719 amount. BLKmode results are handled using the group load/store
4721 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4722 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4724 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4725 REGNO (real_decl_rtl)),
4727 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4729 /* If a named return value dumped decl_return to memory, then
4730 we may need to re-do the PROMOTE_MODE signed/unsigned
4732 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4734 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4736 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4737 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4740 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4742 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4744 /* If expand_function_start has created a PARALLEL for decl_rtl,
4745 move the result to the real return registers. Otherwise, do
4746 a group load from decl_rtl for a named return. */
4747 if (GET_CODE (decl_rtl) == PARALLEL)
4748 emit_group_move (real_decl_rtl, decl_rtl);
4750 emit_group_load (real_decl_rtl, decl_rtl,
4751 TREE_TYPE (decl_result),
4752 int_size_in_bytes (TREE_TYPE (decl_result)));
4754 /* In the case of complex integer modes smaller than a word, we'll
4755 need to generate some non-trivial bitfield insertions. Do that
4756 on a pseudo and not the hard register. */
4757 else if (GET_CODE (decl_rtl) == CONCAT
4758 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4759 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4761 int old_generating_concat_p;
4764 old_generating_concat_p = generating_concat_p;
4765 generating_concat_p = 0;
4766 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4767 generating_concat_p = old_generating_concat_p;
4769 emit_move_insn (tmp, decl_rtl);
4770 emit_move_insn (real_decl_rtl, tmp);
4773 emit_move_insn (real_decl_rtl, decl_rtl);
4777 /* If returning a structure, arrange to return the address of the value
4778 in a place where debuggers expect to find it.
4780 If returning a structure PCC style,
4781 the caller also depends on this value.
4782 And cfun->returns_pcc_struct is not necessarily set. */
4783 if (cfun->returns_struct
4784 || cfun->returns_pcc_struct)
4786 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4787 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4790 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4791 type = TREE_TYPE (type);
4793 value_address = XEXP (value_address, 0);
4795 outgoing = targetm.calls.function_value (build_pointer_type (type),
4796 current_function_decl, true);
4798 /* Mark this as a function return value so integrate will delete the
4799 assignment and USE below when inlining this function. */
4800 REG_FUNCTION_VALUE_P (outgoing) = 1;
4802 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4803 value_address = convert_memory_address (GET_MODE (outgoing),
4806 emit_move_insn (outgoing, value_address);
4808 /* Show return register used to hold result (in this case the address
4810 crtl->return_rtx = outgoing;
4813 /* Emit the actual code to clobber return register. */
4818 clobber_return_register ();
4819 expand_naked_return ();
4823 emit_insn_after (seq, clobber_after);
4826 /* Output the label for the naked return from the function. */
4827 emit_label (naked_return_label);
4829 /* @@@ This is a kludge. We want to ensure that instructions that
4830 may trap are not moved into the epilogue by scheduling, because
4831 we don't always emit unwind information for the epilogue. */
4832 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4833 emit_insn (gen_blockage ());
4835 /* If stack protection is enabled for this function, check the guard. */
4836 if (crtl->stack_protect_guard)
4837 stack_protect_epilogue ();
4839 /* If we had calls to alloca, and this machine needs
4840 an accurate stack pointer to exit the function,
4841 insert some code to save and restore the stack pointer. */
4842 if (! EXIT_IGNORE_STACK
4843 && cfun->calls_alloca)
4847 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4848 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4851 /* ??? This should no longer be necessary since stupid is no longer with
4852 us, but there are some parts of the compiler (eg reload_combine, and
4853 sh mach_dep_reorg) that still try and compute their own lifetime info
4854 instead of using the general framework. */
4855 use_return_register ();
4859 get_arg_pointer_save_area (void)
4861 rtx ret = arg_pointer_save_area;
4865 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4866 arg_pointer_save_area = ret;
4869 if (! crtl->arg_pointer_save_area_init)
4873 /* Save the arg pointer at the beginning of the function. The
4874 generated stack slot may not be a valid memory address, so we
4875 have to check it and fix it if necessary. */
4877 emit_move_insn (validize_mem (ret),
4878 crtl->args.internal_arg_pointer);
4882 push_topmost_sequence ();
4883 emit_insn_after (seq, entry_of_function ());
4884 pop_topmost_sequence ();
4890 /* Extend a vector that records the INSN_UIDs of INSNS
4891 (a list of one or more insns). */
4894 record_insns (rtx insns, VEC(int,heap) **vecp)
4898 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4899 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4902 /* Set the locator of the insn chain starting at INSN to LOC. */
4904 set_insn_locators (rtx insn, int loc)
4906 while (insn != NULL_RTX)
4909 INSN_LOCATOR (insn) = loc;
4910 insn = NEXT_INSN (insn);
4914 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4915 be running after reorg, SEQUENCE rtl is possible. */
4918 contains (const_rtx insn, VEC(int,heap) **vec)
4922 if (NONJUMP_INSN_P (insn)
4923 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4926 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4927 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4928 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4929 == VEC_index (int, *vec, j))
4935 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4936 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4943 prologue_epilogue_contains (const_rtx insn)
4945 if (contains (insn, &prologue))
4947 if (contains (insn, &epilogue))
4953 sibcall_epilogue_contains (const_rtx insn)
4955 if (sibcall_epilogue)
4956 return contains (insn, &sibcall_epilogue);
4961 /* Insert gen_return at the end of block BB. This also means updating
4962 block_for_insn appropriately. */
4965 emit_return_into_block (basic_block bb)
4967 emit_jump_insn_after (gen_return (), BB_END (bb));
4969 #endif /* HAVE_return */
4971 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4972 this into place with notes indicating where the prologue ends and where
4973 the epilogue begins. Update the basic block information when possible. */
4976 thread_prologue_and_epilogue_insns (void)
4980 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4983 #if defined (HAVE_epilogue) || defined(HAVE_return)
4984 rtx epilogue_end = NULL_RTX;
4988 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4989 #ifdef HAVE_prologue
4993 seq = gen_prologue ();
4996 /* Insert an explicit USE for the frame pointer
4997 if the profiling is on and the frame pointer is required. */
4998 if (crtl->profile && frame_pointer_needed)
4999 emit_use (hard_frame_pointer_rtx);
5001 /* Retain a map of the prologue insns. */
5002 record_insns (seq, &prologue);
5003 emit_note (NOTE_INSN_PROLOGUE_END);
5005 #ifndef PROFILE_BEFORE_PROLOGUE
5006 /* Ensure that instructions are not moved into the prologue when
5007 profiling is on. The call to the profiling routine can be
5008 emitted within the live range of a call-clobbered register. */
5010 emit_insn (gen_blockage ());
5015 set_insn_locators (seq, prologue_locator);
5017 /* Can't deal with multiple successors of the entry block
5018 at the moment. Function should always have at least one
5020 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5022 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5027 /* If the exit block has no non-fake predecessors, we don't need
5029 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5030 if ((e->flags & EDGE_FAKE) == 0)
5035 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5037 if (optimize && HAVE_return)
5039 /* If we're allowed to generate a simple return instruction,
5040 then by definition we don't need a full epilogue. Examine
5041 the block that falls through to EXIT. If it does not
5042 contain any code, examine its predecessors and try to
5043 emit (conditional) return instructions. */
5048 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5049 if (e->flags & EDGE_FALLTHRU)
5055 /* Verify that there are no active instructions in the last block. */
5056 label = BB_END (last);
5057 while (label && !LABEL_P (label))
5059 if (active_insn_p (label))
5061 label = PREV_INSN (label);
5064 if (BB_HEAD (last) == label && LABEL_P (label))
5068 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5070 basic_block bb = e->src;
5073 if (bb == ENTRY_BLOCK_PTR)
5080 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5086 /* If we have an unconditional jump, we can replace that
5087 with a simple return instruction. */
5088 if (simplejump_p (jump))
5090 emit_return_into_block (bb);
5094 /* If we have a conditional jump, we can try to replace
5095 that with a conditional return instruction. */
5096 else if (condjump_p (jump))
5098 if (! redirect_jump (jump, 0, 0))
5104 /* If this block has only one successor, it both jumps
5105 and falls through to the fallthru block, so we can't
5107 if (single_succ_p (bb))
5119 /* Fix up the CFG for the successful change we just made. */
5120 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5123 /* Emit a return insn for the exit fallthru block. Whether
5124 this is still reachable will be determined later. */
5126 emit_barrier_after (BB_END (last));
5127 emit_return_into_block (last);
5128 epilogue_end = BB_END (last);
5129 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5134 /* Find the edge that falls through to EXIT. Other edges may exist
5135 due to RETURN instructions, but those don't need epilogues.
5136 There really shouldn't be a mixture -- either all should have
5137 been converted or none, however... */
5139 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5140 if (e->flags & EDGE_FALLTHRU)
5145 #ifdef HAVE_epilogue
5149 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5150 seq = gen_epilogue ();
5151 emit_jump_insn (seq);
5153 /* Retain a map of the epilogue insns. */
5154 record_insns (seq, &epilogue);
5155 set_insn_locators (seq, epilogue_locator);
5160 insert_insn_on_edge (seq, e);
5168 if (! next_active_insn (BB_END (e->src)))
5170 /* We have a fall-through edge to the exit block, the source is not
5171 at the end of the function, and there will be an assembler epilogue
5172 at the end of the function.
5173 We can't use force_nonfallthru here, because that would try to
5174 use return. Inserting a jump 'by hand' is extremely messy, so
5175 we take advantage of cfg_layout_finalize using
5176 fixup_fallthru_exit_predecessor. */
5177 cfg_layout_initialize (0);
5178 FOR_EACH_BB (cur_bb)
5179 if (cur_bb->index >= NUM_FIXED_BLOCKS
5180 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5181 cur_bb->aux = cur_bb->next_bb;
5182 cfg_layout_finalize ();
5185 default_rtl_profile ();
5189 commit_edge_insertions ();
5191 /* The epilogue insns we inserted may cause the exit edge to no longer
5193 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5195 if (((e->flags & EDGE_FALLTHRU) != 0)
5196 && returnjump_p (BB_END (e->src)))
5197 e->flags &= ~EDGE_FALLTHRU;
5201 #ifdef HAVE_sibcall_epilogue
5202 /* Emit sibling epilogues before any sibling call sites. */
5203 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5205 basic_block bb = e->src;
5206 rtx insn = BB_END (bb);
5209 || ! SIBLING_CALL_P (insn))
5216 emit_insn (gen_sibcall_epilogue ());
5220 /* Retain a map of the epilogue insns. Used in life analysis to
5221 avoid getting rid of sibcall epilogue insns. Do this before we
5222 actually emit the sequence. */
5223 record_insns (seq, &sibcall_epilogue);
5224 set_insn_locators (seq, epilogue_locator);
5226 emit_insn_before (seq, insn);
5231 #ifdef HAVE_epilogue
5236 /* Similarly, move any line notes that appear after the epilogue.
5237 There is no need, however, to be quite so anal about the existence
5238 of such a note. Also possibly move
5239 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5241 for (insn = epilogue_end; insn; insn = next)
5243 next = NEXT_INSN (insn);
5245 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5246 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5251 /* Threading the prologue and epilogue changes the artificial refs
5252 in the entry and exit blocks. */
5253 epilogue_completed = 1;
5254 df_update_entry_exit_and_calls ();
5257 /* Reposition the prologue-end and epilogue-begin notes after instruction
5258 scheduling and delayed branch scheduling. */
5261 reposition_prologue_and_epilogue_notes (void)
5263 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5264 rtx insn, last, note;
5267 if ((len = VEC_length (int, prologue)) > 0)
5271 /* Scan from the beginning until we reach the last prologue insn.
5272 We apparently can't depend on basic_block_{head,end} after
5274 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5278 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5281 else if (contains (insn, &prologue))
5291 /* Find the prologue-end note if we haven't already, and
5292 move it to just after the last prologue insn. */
5295 for (note = last; (note = NEXT_INSN (note));)
5297 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5301 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5303 last = NEXT_INSN (last);
5304 reorder_insns (note, note, last);
5308 if ((len = VEC_length (int, epilogue)) > 0)
5312 /* Scan from the end until we reach the first epilogue insn.
5313 We apparently can't depend on basic_block_{head,end} after
5315 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5319 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5322 else if (contains (insn, &epilogue))
5332 /* Find the epilogue-begin note if we haven't already, and
5333 move it to just before the first epilogue insn. */
5336 for (note = insn; (note = PREV_INSN (note));)
5338 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5342 if (PREV_INSN (last) != note)
5343 reorder_insns (note, note, PREV_INSN (last));
5346 #endif /* HAVE_prologue or HAVE_epilogue */
5349 /* Returns the name of the current function. */
5351 current_function_name (void)
5353 return lang_hooks.decl_printable_name (cfun->decl, 2);
5356 /* Returns the raw (mangled) name of the current function. */
5358 current_function_assembler_name (void)
5360 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5365 rest_of_handle_check_leaf_regs (void)
5367 #ifdef LEAF_REGISTERS
5368 current_function_uses_only_leaf_regs
5369 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5374 /* Insert a TYPE into the used types hash table of CFUN. */
5377 used_types_insert_helper (tree type, struct function *func)
5379 if (type != NULL && func != NULL)
5383 if (func->used_types_hash == NULL)
5384 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5385 htab_eq_pointer, NULL);
5386 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5392 /* Given a type, insert it into the used hash table in cfun. */
5394 used_types_insert (tree t)
5396 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5398 t = TYPE_MAIN_VARIANT (t);
5399 if (debug_info_level > DINFO_LEVEL_NONE)
5402 used_types_insert_helper (t, cfun);
5404 /* So this might be a type referenced by a global variable.
5405 Record that type so that we can later decide to emit its debug
5407 types_used_by_cur_var_decl =
5408 tree_cons (t, NULL, types_used_by_cur_var_decl);
5413 /* Helper to Hash a struct types_used_by_vars_entry. */
5416 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
5418 gcc_assert (entry && entry->var_decl && entry->type);
5420 return iterative_hash_object (entry->type,
5421 iterative_hash_object (entry->var_decl, 0));
5424 /* Hash function of the types_used_by_vars_entry hash table. */
5427 types_used_by_vars_do_hash (const void *x)
5429 const struct types_used_by_vars_entry *entry =
5430 (const struct types_used_by_vars_entry *) x;
5432 return hash_types_used_by_vars_entry (entry);
5435 /*Equality function of the types_used_by_vars_entry hash table. */
5438 types_used_by_vars_eq (const void *x1, const void *x2)
5440 const struct types_used_by_vars_entry *e1 =
5441 (const struct types_used_by_vars_entry *) x1;
5442 const struct types_used_by_vars_entry *e2 =
5443 (const struct types_used_by_vars_entry *)x2;
5445 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
5448 /* Inserts an entry into the types_used_by_vars_hash hash table. */
5451 types_used_by_var_decl_insert (tree type, tree var_decl)
5453 if (type != NULL && var_decl != NULL)
5456 struct types_used_by_vars_entry e;
5457 e.var_decl = var_decl;
5459 if (types_used_by_vars_hash == NULL)
5460 types_used_by_vars_hash =
5461 htab_create_ggc (37, types_used_by_vars_do_hash,
5462 types_used_by_vars_eq, NULL);
5463 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
5464 hash_types_used_by_vars_entry (&e), INSERT);
5467 struct types_used_by_vars_entry *entry;
5468 entry = (struct types_used_by_vars_entry*) ggc_alloc
5469 (sizeof (struct types_used_by_vars_entry));
5471 entry->var_decl = var_decl;
5477 struct rtl_opt_pass pass_leaf_regs =
5483 rest_of_handle_check_leaf_regs, /* execute */
5486 0, /* static_pass_number */
5488 0, /* properties_required */
5489 0, /* properties_provided */
5490 0, /* properties_destroyed */
5491 0, /* todo_flags_start */
5492 0 /* todo_flags_finish */
5497 rest_of_handle_thread_prologue_and_epilogue (void)
5500 cleanup_cfg (CLEANUP_EXPENSIVE);
5501 /* On some machines, the prologue and epilogue code, or parts thereof,
5502 can be represented as RTL. Doing so lets us schedule insns between
5503 it and the rest of the code and also allows delayed branch
5504 scheduling to operate in the epilogue. */
5506 thread_prologue_and_epilogue_insns ();
5510 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5514 "pro_and_epilogue", /* name */
5516 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5519 0, /* static_pass_number */
5520 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5521 0, /* properties_required */
5522 0, /* properties_provided */
5523 0, /* properties_destroyed */
5524 TODO_verify_flow, /* todo_flags_start */
5527 TODO_df_finish | TODO_verify_rtl_sharing |
5528 TODO_ggc_collect /* todo_flags_finish */
5533 /* This mini-pass fixes fall-out from SSA in asm statements that have
5534 in-out constraints. Say you start with
5537 asm ("": "+mr" (inout));
5540 which is transformed very early to use explicit output and match operands:
5543 asm ("": "=mr" (inout) : "0" (inout));
5546 Or, after SSA and copyprop,
5548 asm ("": "=mr" (inout_2) : "0" (inout_1));
5551 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5552 they represent two separate values, so they will get different pseudo
5553 registers during expansion. Then, since the two operands need to match
5554 per the constraints, but use different pseudo registers, reload can
5555 only register a reload for these operands. But reloads can only be
5556 satisfied by hardregs, not by memory, so we need a register for this
5557 reload, just because we are presented with non-matching operands.
5558 So, even though we allow memory for this operand, no memory can be
5559 used for it, just because the two operands don't match. This can
5560 cause reload failures on register-starved targets.
5562 So it's a symptom of reload not being able to use memory for reloads
5563 or, alternatively it's also a symptom of both operands not coming into
5564 reload as matching (in which case the pseudo could go to memory just
5565 fine, as the alternative allows it, and no reload would be necessary).
5566 We fix the latter problem here, by transforming
5568 asm ("": "=mr" (inout_2) : "0" (inout_1));
5573 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5576 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5579 bool changed = false;
5580 rtx op = SET_SRC (p_sets[0]);
5581 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5582 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5583 bool *output_matched = XALLOCAVEC (bool, noutputs);
5585 memset (output_matched, 0, noutputs * sizeof (bool));
5586 for (i = 0; i < ninputs; i++)
5588 rtx input, output, insns;
5589 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5593 if (*constraint == '%')
5596 match = strtoul (constraint, &end, 10);
5597 if (end == constraint)
5600 gcc_assert (match < noutputs);
5601 output = SET_DEST (p_sets[match]);
5602 input = RTVEC_ELT (inputs, i);
5603 /* Only do the transformation for pseudos. */
5604 if (! REG_P (output)
5605 || rtx_equal_p (output, input)
5606 || (GET_MODE (input) != VOIDmode
5607 && GET_MODE (input) != GET_MODE (output)))
5610 /* We can't do anything if the output is also used as input,
5611 as we're going to overwrite it. */
5612 for (j = 0; j < ninputs; j++)
5613 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5618 /* Avoid changing the same input several times. For
5619 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5620 only change in once (to out1), rather than changing it
5621 first to out1 and afterwards to out2. */
5624 for (j = 0; j < noutputs; j++)
5625 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5630 output_matched[match] = true;
5633 emit_move_insn (output, input);
5634 insns = get_insns ();
5636 emit_insn_before (insns, insn);
5638 /* Now replace all mentions of the input with output. We can't
5639 just replace the occurrence in inputs[i], as the register might
5640 also be used in some other input (or even in an address of an
5641 output), which would mean possibly increasing the number of
5642 inputs by one (namely 'output' in addition), which might pose
5643 a too complicated problem for reload to solve. E.g. this situation:
5645 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5647 Here 'input' is used in two occurrences as input (once for the
5648 input operand, once for the address in the second output operand).
5649 If we would replace only the occurrence of the input operand (to
5650 make the matching) we would be left with this:
5653 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5655 Now we suddenly have two different input values (containing the same
5656 value, but different pseudos) where we formerly had only one.
5657 With more complicated asms this might lead to reload failures
5658 which wouldn't have happen without this pass. So, iterate over
5659 all operands and replace all occurrences of the register used. */
5660 for (j = 0; j < noutputs; j++)
5661 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5662 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5663 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5665 for (j = 0; j < ninputs; j++)
5666 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5667 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5674 df_insn_rescan (insn);
5678 rest_of_match_asm_constraints (void)
5681 rtx insn, pat, *p_sets;
5684 if (!crtl->has_asm_statement)
5687 df_set_flags (DF_DEFER_INSN_RESCAN);
5690 FOR_BB_INSNS (bb, insn)
5695 pat = PATTERN (insn);
5696 if (GET_CODE (pat) == PARALLEL)
5697 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5698 else if (GET_CODE (pat) == SET)
5699 p_sets = &PATTERN (insn), noutputs = 1;
5703 if (GET_CODE (*p_sets) == SET
5704 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5705 match_asm_constraints_1 (insn, p_sets, noutputs);
5709 return TODO_df_finish;
5712 struct rtl_opt_pass pass_match_asm_constraints =
5716 "asmcons", /* name */
5718 rest_of_match_asm_constraints, /* execute */
5721 0, /* static_pass_number */
5723 0, /* properties_required */
5724 0, /* properties_provided */
5725 0, /* properties_destroyed */
5726 0, /* todo_flags_start */
5727 TODO_dump_func /* todo_flags_finish */
5732 #include "gt-function.h"