1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* $FreeBSD: src/contrib/gcc/function.c,v 1.6.2.3 2002/06/20 23:12:27 obrien Exp $ */
23 /* $DragonFly: src/contrib/gcc/Attic/function.c,v 1.2 2003/06/17 04:23:59 dillon Exp $ */
26 /* This file handles the generation of rtl code from tree structure
27 at the level of the function as a whole.
28 It creates the rtl expressions for parameters and auto variables
29 and has full responsibility for allocating stack slots.
31 `expand_function_start' is called at the beginning of a function,
32 before the function body is parsed, and `expand_function_end' is
33 called after parsing the body.
35 Call `assign_stack_local' to allocate a stack slot for a local variable.
36 This is usually done during the RTL generation for the function body,
37 but it can also be done in the reload pass when a pseudo-register does
38 not get a hard register.
40 Call `put_var_into_stack' when you learn, belatedly, that a variable
41 previously given a pseudo-register must in fact go in the stack.
42 This function changes the DECL_RTL to be a stack slot instead of a reg
43 then scans all the RTL instructions so far generated to correct them. */
52 #include "insn-flags.h"
54 #include "insn-codes.h"
56 #include "hard-reg-set.h"
57 #include "insn-config.h"
60 #include "basic-block.h"
65 #ifndef TRAMPOLINE_ALIGNMENT
66 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
69 #ifndef LOCAL_ALIGNMENT
70 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
73 /* Some systems use __main in a way incompatible with its use in gcc, in these
74 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
75 give the same symbol without quotes for an alternative entry point. You
76 must define both, or neither. */
78 #define NAME__MAIN "__main"
79 #define SYMBOL__MAIN __main
82 /* Round a value to the lowest integer less than it that is a multiple of
83 the required alignment. Avoid using division in case the value is
84 negative. Assume the alignment is a power of two. */
85 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
87 /* Similar, but round to the next highest integer that meets the
89 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
91 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
92 during rtl generation. If they are different register numbers, this is
93 always true. It may also be true if
94 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
95 generation. See fix_lexical_addr for details. */
97 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
98 #define NEED_SEPARATE_AP
101 /* Number of bytes of args popped by function being compiled on its return.
102 Zero if no bytes are to be popped.
103 May affect compilation of return insn or of function epilogue. */
105 int current_function_pops_args;
107 /* Nonzero if function being compiled needs to be given an address
108 where the value should be stored. */
110 int current_function_returns_struct;
112 /* Nonzero if function being compiled needs to
113 return the address of where it has put a structure value. */
115 int current_function_returns_pcc_struct;
117 /* Nonzero if function being compiled needs to be passed a static chain. */
119 int current_function_needs_context;
121 /* Nonzero if function being compiled can call setjmp. */
123 int current_function_calls_setjmp;
125 /* Nonzero if function being compiled can call longjmp. */
127 int current_function_calls_longjmp;
129 /* Nonzero if function being compiled receives nonlocal gotos
130 from nested functions. */
132 int current_function_has_nonlocal_label;
134 /* Nonzero if function being compiled has nonlocal gotos to parent
137 int current_function_has_nonlocal_goto;
139 /* Nonzero if function being compiled contains nested functions. */
141 int current_function_contains_functions;
143 /* Nonzero if function being compiled doesn't contain any calls
144 (ignoring the prologue and epilogue). This is set prior to
145 local register allocation and is valid for the remaining
148 int current_function_is_leaf;
150 /* Nonzero if function being compiled doesn't modify the stack pointer
151 (ignoring the prologue and epilogue). This is only valid after
152 life_analysis has run. */
154 int current_function_sp_is_unchanging;
156 /* Nonzero if the function being compiled is a leaf function which only
157 uses leaf registers. This is valid after reload (specifically after
158 sched2) and is useful only if the port defines LEAF_REGISTERS. */
160 int current_function_uses_only_leaf_regs;
162 /* Nonzero if the function being compiled issues a computed jump. */
164 int current_function_has_computed_jump;
166 /* Nonzero if the current function is a thunk (a lightweight function that
167 just adjusts one of its arguments and forwards to another function), so
168 we should try to cut corners where we can. */
169 int current_function_is_thunk;
171 /* Nonzero if function being compiled can call alloca,
172 either as a subroutine or builtin. */
174 int current_function_calls_alloca;
176 /* Nonzero if the current function returns a pointer type */
178 int current_function_returns_pointer;
180 /* If some insns can be deferred to the delay slots of the epilogue, the
181 delay list for them is recorded here. */
183 rtx current_function_epilogue_delay_list;
185 /* If function's args have a fixed size, this is that size, in bytes.
187 May affect compilation of return insn or of function epilogue. */
189 int current_function_args_size;
191 /* # bytes the prologue should push and pretend that the caller pushed them.
192 The prologue must do this, but only if parms can be passed in registers. */
194 int current_function_pretend_args_size;
196 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
197 defined, the needed space is pushed by the prologue. */
199 int current_function_outgoing_args_size;
201 /* This is the offset from the arg pointer to the place where the first
202 anonymous arg can be found, if there is one. */
204 rtx current_function_arg_offset_rtx;
206 /* Nonzero if current function uses varargs.h or equivalent.
207 Zero for functions that use stdarg.h. */
209 int current_function_varargs;
211 /* Nonzero if current function uses stdarg.h or equivalent.
212 Zero for functions that use varargs.h. */
214 int current_function_stdarg;
216 /* Quantities of various kinds of registers
217 used for the current function's args. */
219 CUMULATIVE_ARGS current_function_args_info;
221 /* Name of function now being compiled. */
223 char *current_function_name;
225 /* If non-zero, an RTL expression for the location at which the current
226 function returns its result. If the current function returns its
227 result in a register, current_function_return_rtx will always be
228 the hard register containing the result. */
230 rtx current_function_return_rtx;
232 /* Nonzero if the current function uses the constant pool. */
234 int current_function_uses_const_pool;
236 /* Nonzero if the current function uses pic_offset_table_rtx. */
237 int current_function_uses_pic_offset_table;
239 /* The arg pointer hard register, or the pseudo into which it was copied. */
240 rtx current_function_internal_arg_pointer;
242 /* Language-specific reason why the current function cannot be made inline. */
243 char *current_function_cannot_inline;
245 /* Nonzero if instrumentation calls for function entry and exit should be
247 int current_function_instrument_entry_exit;
249 /* Nonzero if memory access checking be enabled in the current function. */
250 int current_function_check_memory_usage;
252 /* The FUNCTION_DECL for an inline function currently being expanded. */
253 tree inline_function_decl;
255 /* Number of function calls seen so far in current function. */
257 int function_call_count;
259 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
260 (labels to which there can be nonlocal gotos from nested functions)
263 tree nonlocal_labels;
265 /* List (chain of EXPR_LIST) of stack slots that hold the current handlers
266 for nonlocal gotos. There is one for every nonlocal label in the function;
267 this list matches the one in nonlocal_labels.
268 Zero when function does not have nonlocal labels. */
270 rtx nonlocal_goto_handler_slots;
272 /* List (chain of EXPR_LIST) of labels heading the current handlers for
275 rtx nonlocal_goto_handler_labels;
277 /* RTX for stack slot that holds the stack pointer value to restore
279 Zero when function does not have nonlocal labels. */
281 rtx nonlocal_goto_stack_level;
283 /* Label that will go on parm cleanup code, if any.
284 Jumping to this label runs cleanup code for parameters, if
285 such code must be run. Following this code is the logical return label. */
289 /* Label that will go on function epilogue.
290 Jumping to this label serves as a "return" instruction
291 on machines which require execution of the epilogue on all returns. */
295 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
296 So we can mark them all live at the end of the function, if nonopt. */
299 /* List (chain of EXPR_LISTs) of all stack slots in this function.
300 Made for the sake of unshare_all_rtl. */
303 /* Chain of all RTL_EXPRs that have insns in them. */
306 /* Label to jump back to for tail recursion, or 0 if we have
307 not yet needed one for this function. */
308 rtx tail_recursion_label;
310 /* Place after which to insert the tail_recursion_label if we need one. */
311 rtx tail_recursion_reentry;
313 /* Location at which to save the argument pointer if it will need to be
314 referenced. There are two cases where this is done: if nonlocal gotos
315 exist, or if vars stored at an offset from the argument pointer will be
316 needed by inner routines. */
318 rtx arg_pointer_save_area;
320 /* Offset to end of allocated area of stack frame.
321 If stack grows down, this is the address of the last stack slot allocated.
322 If stack grows up, this is the address for the next slot. */
323 HOST_WIDE_INT frame_offset;
325 /* List (chain of TREE_LISTs) of static chains for containing functions.
326 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
327 in an RTL_EXPR in the TREE_VALUE. */
328 static tree context_display;
330 /* List (chain of TREE_LISTs) of trampolines for nested functions.
331 The trampoline sets up the static chain and jumps to the function.
332 We supply the trampoline's address when the function's address is requested.
334 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
335 in an RTL_EXPR in the TREE_VALUE. */
336 static tree trampoline_list;
338 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
339 static rtx parm_birth_insn;
342 /* Nonzero if a stack slot has been generated whose address is not
343 actually valid. It means that the generated rtl must all be scanned
344 to detect and correct the invalid addresses where they occur. */
345 static int invalid_stack_slot;
348 /* Last insn of those whose job was to put parms into their nominal homes. */
349 static rtx last_parm_insn;
351 /* 1 + last pseudo register number possibly used for loading a copy
352 of a parameter of this function. */
355 /* Vector indexed by REGNO, containing location on stack in which
356 to put the parm which is nominally in pseudo register REGNO,
357 if we discover that that parm must go in the stack. The highest
358 element in this vector is one less than MAX_PARM_REG, above. */
359 rtx *parm_reg_stack_loc;
361 /* Nonzero once virtual register instantiation has been done.
362 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
363 static int virtuals_instantiated;
365 /* These variables hold pointers to functions to
366 save and restore machine-specific data,
367 in push_function_context and pop_function_context. */
368 void (*save_machine_status) PROTO((struct function *));
369 void (*restore_machine_status) PROTO((struct function *));
371 /* Nonzero if we need to distinguish between the return value of this function
372 and the return value of a function called by this function. This helps
375 extern int rtx_equal_function_value_matters;
376 extern tree sequence_rtl_expr;
378 /* In order to evaluate some expressions, such as function calls returning
379 structures in memory, we need to temporarily allocate stack locations.
380 We record each allocated temporary in the following structure.
382 Associated with each temporary slot is a nesting level. When we pop up
383 one level, all temporaries associated with the previous level are freed.
384 Normally, all temporaries are freed after the execution of the statement
385 in which they were created. However, if we are inside a ({...}) grouping,
386 the result may be in a temporary and hence must be preserved. If the
387 result could be in a temporary, we preserve it if we can determine which
388 one it is in. If we cannot determine which temporary may contain the
389 result, all temporaries are preserved. A temporary is preserved by
390 pretending it was allocated at the previous nesting level.
392 Automatic variables are also assigned temporary slots, at the nesting
393 level where they are defined. They are marked a "kept" so that
394 free_temp_slots will not free them. */
398 /* Points to next temporary slot. */
399 struct temp_slot *next;
400 /* The rtx to used to reference the slot. */
402 /* The rtx used to represent the address if not the address of the
403 slot above. May be an EXPR_LIST if multiple addresses exist. */
405 /* The alignment (in bits) of the slot. */
407 /* The size, in units, of the slot. */
409 /* The alias set for the slot. If the alias set is zero, we don't
410 know anything about the alias set of the slot. We must only
411 reuse a slot if it is assigned an object of the same alias set.
412 Otherwise, the rest of the compiler may assume that the new use
413 of the slot cannot alias the old use of the slot, which is
414 false. If the slot has alias set zero, then we can't reuse the
415 slot at all, since we have no idea what alias set may have been
416 imposed on the memory. For example, if the stack slot is the
417 call frame for an inline functioned, we have no idea what alias
418 sets will be assigned to various pieces of the call frame. */
420 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
422 /* Non-zero if this temporary is currently in use. */
424 /* Non-zero if this temporary has its address taken. */
426 /* Nesting level at which this slot is being used. */
428 /* Non-zero if this should survive a call to free_temp_slots. */
430 /* The offset of the slot from the frame_pointer, including extra space
431 for alignment. This info is for combine_temp_slots. */
432 HOST_WIDE_INT base_offset;
433 /* The size of the slot, including extra space for alignment. This
434 info is for combine_temp_slots. */
435 HOST_WIDE_INT full_size;
438 /* List of all temporaries allocated, both available and in use. */
440 struct temp_slot *temp_slots;
442 /* Current nesting level for temporaries. */
446 /* Current nesting level for variables in a block. */
448 int var_temp_slot_level;
450 /* When temporaries are created by TARGET_EXPRs, they are created at
451 this level of temp_slot_level, so that they can remain allocated
452 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
454 int target_temp_slot_level;
456 /* This structure is used to record MEMs or pseudos used to replace VAR, any
457 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
458 maintain this list in case two operands of an insn were required to match;
459 in that case we must ensure we use the same replacement. */
461 struct fixup_replacement
465 struct fixup_replacement *next;
468 struct insns_for_mem_entry {
469 /* The KEY in HE will be a MEM. */
470 struct hash_entry he;
471 /* These are the INSNS which reference the MEM. */
475 /* Forward declarations. */
477 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
478 int, struct function *));
479 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
481 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
482 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
483 enum machine_mode, enum machine_mode,
485 struct hash_table *));
486 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
487 struct hash_table *));
488 static struct fixup_replacement
489 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
490 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
491 rtx, int, struct hash_table *));
492 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
493 struct fixup_replacement **));
494 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
495 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
496 static rtx fixup_stack_1 PROTO((rtx, rtx));
497 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
498 static void instantiate_decls PROTO((tree, int));
499 static void instantiate_decls_1 PROTO((tree, int));
500 static void instantiate_decl PROTO((rtx, int, int));
501 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
502 static void delete_handlers PROTO((void));
503 static void pad_to_arg_alignment PROTO((struct args_size *, int));
504 #ifndef ARGS_GROW_DOWNWARD
505 static void pad_below PROTO((struct args_size *, enum machine_mode,
508 #ifdef ARGS_GROW_DOWNWARD
509 static tree round_down PROTO((tree, int));
511 static rtx round_trampoline_addr PROTO((rtx));
512 static tree blocks_nreverse PROTO((tree));
513 static int all_blocks PROTO((tree, tree *));
514 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
515 static int *record_insns PROTO((rtx));
516 static int contains PROTO((rtx, int *));
517 #endif /* HAVE_prologue || HAVE_epilogue */
518 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
519 static boolean purge_addressof_1 PROTO((rtx *, rtx, int, int,
520 struct hash_table *));
521 static int is_addressof PROTO ((rtx *, void *));
522 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
525 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
526 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
527 static int insns_for_mem_walk PROTO ((rtx *, void *));
528 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
531 /* Pointer to chain of `struct function' for containing functions. */
532 struct function *outer_function_chain;
534 /* Given a function decl for a containing function,
535 return the `struct function' for it. */
538 find_function_data (decl)
543 for (p = outer_function_chain; p; p = p->next)
550 /* Save the current context for compilation of a nested function.
551 This is called from language-specific code.
552 The caller is responsible for saving any language-specific status,
553 since this function knows only about language-independent variables. */
556 push_function_context_to (context)
559 struct function *p = (struct function *) xmalloc (sizeof (struct function));
561 p->next = outer_function_chain;
562 outer_function_chain = p;
564 p->name = current_function_name;
565 p->decl = current_function_decl;
566 p->pops_args = current_function_pops_args;
567 p->returns_struct = current_function_returns_struct;
568 p->returns_pcc_struct = current_function_returns_pcc_struct;
569 p->returns_pointer = current_function_returns_pointer;
570 p->needs_context = current_function_needs_context;
571 p->calls_setjmp = current_function_calls_setjmp;
572 p->calls_longjmp = current_function_calls_longjmp;
573 p->calls_alloca = current_function_calls_alloca;
574 p->has_nonlocal_label = current_function_has_nonlocal_label;
575 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
576 p->contains_functions = current_function_contains_functions;
577 p->has_computed_jump = current_function_has_computed_jump;
578 p->is_thunk = current_function_is_thunk;
579 p->args_size = current_function_args_size;
580 p->pretend_args_size = current_function_pretend_args_size;
581 p->arg_offset_rtx = current_function_arg_offset_rtx;
582 p->varargs = current_function_varargs;
583 p->stdarg = current_function_stdarg;
584 p->uses_const_pool = current_function_uses_const_pool;
585 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
586 p->internal_arg_pointer = current_function_internal_arg_pointer;
587 p->cannot_inline = current_function_cannot_inline;
588 p->max_parm_reg = max_parm_reg;
589 p->parm_reg_stack_loc = parm_reg_stack_loc;
590 p->outgoing_args_size = current_function_outgoing_args_size;
591 p->return_rtx = current_function_return_rtx;
592 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
593 p->nonlocal_goto_handler_labels = nonlocal_goto_handler_labels;
594 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
595 p->nonlocal_labels = nonlocal_labels;
596 p->cleanup_label = cleanup_label;
597 p->return_label = return_label;
598 p->save_expr_regs = save_expr_regs;
599 p->stack_slot_list = stack_slot_list;
600 p->parm_birth_insn = parm_birth_insn;
601 p->frame_offset = frame_offset;
602 p->tail_recursion_label = tail_recursion_label;
603 p->tail_recursion_reentry = tail_recursion_reentry;
604 p->arg_pointer_save_area = arg_pointer_save_area;
605 p->rtl_expr_chain = rtl_expr_chain;
606 p->last_parm_insn = last_parm_insn;
607 p->context_display = context_display;
608 p->trampoline_list = trampoline_list;
609 p->function_call_count = function_call_count;
610 p->temp_slots = temp_slots;
611 p->temp_slot_level = temp_slot_level;
612 p->target_temp_slot_level = target_temp_slot_level;
613 p->var_temp_slot_level = var_temp_slot_level;
614 p->fixup_var_refs_queue = 0;
615 p->epilogue_delay_list = current_function_epilogue_delay_list;
616 p->args_info = current_function_args_info;
617 p->check_memory_usage = current_function_check_memory_usage;
618 p->instrument_entry_exit = current_function_instrument_entry_exit;
620 save_tree_status (p, context);
621 save_storage_status (p);
622 save_emit_status (p);
623 save_expr_status (p);
624 save_stmt_status (p);
625 save_varasm_status (p, context);
626 if (save_machine_status)
627 (*save_machine_status) (p);
631 push_function_context ()
633 push_function_context_to (current_function_decl);
636 /* Restore the last saved context, at the end of a nested function.
637 This function is called from language-specific code. */
640 pop_function_context_from (context)
643 struct function *p = outer_function_chain;
644 struct var_refs_queue *queue;
646 outer_function_chain = p->next;
648 current_function_contains_functions
649 = p->contains_functions || p->inline_obstacks
650 || context == current_function_decl;
651 current_function_has_computed_jump = p->has_computed_jump;
652 current_function_name = p->name;
653 current_function_decl = p->decl;
654 current_function_pops_args = p->pops_args;
655 current_function_returns_struct = p->returns_struct;
656 current_function_returns_pcc_struct = p->returns_pcc_struct;
657 current_function_returns_pointer = p->returns_pointer;
658 current_function_needs_context = p->needs_context;
659 current_function_calls_setjmp = p->calls_setjmp;
660 current_function_calls_longjmp = p->calls_longjmp;
661 current_function_calls_alloca = p->calls_alloca;
662 current_function_has_nonlocal_label = p->has_nonlocal_label;
663 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
664 current_function_is_thunk = p->is_thunk;
665 current_function_args_size = p->args_size;
666 current_function_pretend_args_size = p->pretend_args_size;
667 current_function_arg_offset_rtx = p->arg_offset_rtx;
668 current_function_varargs = p->varargs;
669 current_function_stdarg = p->stdarg;
670 current_function_uses_const_pool = p->uses_const_pool;
671 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
672 current_function_internal_arg_pointer = p->internal_arg_pointer;
673 current_function_cannot_inline = p->cannot_inline;
674 max_parm_reg = p->max_parm_reg;
675 parm_reg_stack_loc = p->parm_reg_stack_loc;
676 current_function_outgoing_args_size = p->outgoing_args_size;
677 current_function_return_rtx = p->return_rtx;
678 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
679 nonlocal_goto_handler_labels = p->nonlocal_goto_handler_labels;
680 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
681 nonlocal_labels = p->nonlocal_labels;
682 cleanup_label = p->cleanup_label;
683 return_label = p->return_label;
684 save_expr_regs = p->save_expr_regs;
685 stack_slot_list = p->stack_slot_list;
686 parm_birth_insn = p->parm_birth_insn;
687 frame_offset = p->frame_offset;
688 tail_recursion_label = p->tail_recursion_label;
689 tail_recursion_reentry = p->tail_recursion_reentry;
690 arg_pointer_save_area = p->arg_pointer_save_area;
691 rtl_expr_chain = p->rtl_expr_chain;
692 last_parm_insn = p->last_parm_insn;
693 context_display = p->context_display;
694 trampoline_list = p->trampoline_list;
695 function_call_count = p->function_call_count;
696 temp_slots = p->temp_slots;
697 temp_slot_level = p->temp_slot_level;
698 target_temp_slot_level = p->target_temp_slot_level;
699 var_temp_slot_level = p->var_temp_slot_level;
700 current_function_epilogue_delay_list = p->epilogue_delay_list;
702 current_function_args_info = p->args_info;
703 current_function_check_memory_usage = p->check_memory_usage;
704 current_function_instrument_entry_exit = p->instrument_entry_exit;
706 restore_tree_status (p, context);
707 restore_storage_status (p);
708 restore_expr_status (p);
709 restore_emit_status (p);
710 restore_stmt_status (p);
711 restore_varasm_status (p);
713 if (restore_machine_status)
714 (*restore_machine_status) (p);
716 /* Finish doing put_var_into_stack for any of our variables
717 which became addressable during the nested function. */
718 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
719 fixup_var_refs (queue->modified, queue->promoted_mode,
720 queue->unsignedp, 0);
724 /* Reset variables that have known state during rtx generation. */
725 rtx_equal_function_value_matters = 1;
726 virtuals_instantiated = 0;
729 void pop_function_context ()
731 pop_function_context_from (current_function_decl);
734 /* Allocate fixed slots in the stack frame of the current function. */
736 /* Return size needed for stack frame based on slots so far allocated.
737 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
738 the caller may have to do that. */
743 #ifdef FRAME_GROWS_DOWNWARD
744 return -frame_offset;
750 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
751 with machine mode MODE.
753 ALIGN controls the amount of alignment for the address of the slot:
754 0 means according to MODE,
755 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
756 positive specifies alignment boundary in bits.
758 We do not round to stack_boundary here. */
761 assign_stack_local (mode, size, align)
762 enum machine_mode mode;
766 register rtx x, addr;
767 int bigend_correction = 0;
774 alignment = GET_MODE_ALIGNMENT (mode);
776 alignment = BIGGEST_ALIGNMENT;
778 /* Allow the target to (possibly) increase the alignment of this
780 type = type_for_mode (mode, 0);
782 alignment = LOCAL_ALIGNMENT (type, alignment);
784 alignment /= BITS_PER_UNIT;
786 else if (align == -1)
788 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
789 size = CEIL_ROUND (size, alignment);
792 alignment = align / BITS_PER_UNIT;
794 #ifdef FRAME_GROWS_DOWNWARD
795 frame_offset -= size;
798 /* Round frame offset to that alignment.
799 We must be careful here, since FRAME_OFFSET might be negative and
800 division with a negative dividend isn't as well defined as we might
801 like. So we instead assume that ALIGNMENT is a power of two and
802 use logical operations which are unambiguous. */
803 #ifdef FRAME_GROWS_DOWNWARD
804 frame_offset = FLOOR_ROUND (frame_offset, alignment);
806 frame_offset = CEIL_ROUND (frame_offset, alignment);
809 /* On a big-endian machine, if we are allocating more space than we will use,
810 use the least significant bytes of those that are allocated. */
811 if (BYTES_BIG_ENDIAN && mode != BLKmode)
812 bigend_correction = size - GET_MODE_SIZE (mode);
814 /* If we have already instantiated virtual registers, return the actual
815 address relative to the frame pointer. */
816 if (virtuals_instantiated)
817 addr = plus_constant (frame_pointer_rtx,
818 (frame_offset + bigend_correction
819 + STARTING_FRAME_OFFSET));
821 addr = plus_constant (virtual_stack_vars_rtx,
822 frame_offset + bigend_correction);
824 #ifndef FRAME_GROWS_DOWNWARD
825 frame_offset += size;
828 x = gen_rtx_MEM (mode, addr);
830 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
835 /* Assign a stack slot in a containing function.
836 First three arguments are same as in preceding function.
837 The last argument specifies the function to allocate in. */
840 assign_outer_stack_local (mode, size, align, function)
841 enum machine_mode mode;
844 struct function *function;
846 register rtx x, addr;
847 int bigend_correction = 0;
850 /* Allocate in the memory associated with the function in whose frame
852 push_obstacks (function->function_obstack,
853 function->function_maybepermanent_obstack);
859 alignment = GET_MODE_ALIGNMENT (mode);
861 alignment = BIGGEST_ALIGNMENT;
863 /* Allow the target to (possibly) increase the alignment of this
865 type = type_for_mode (mode, 0);
867 alignment = LOCAL_ALIGNMENT (type, alignment);
869 alignment /= BITS_PER_UNIT;
871 else if (align == -1)
873 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
874 size = CEIL_ROUND (size, alignment);
877 alignment = align / BITS_PER_UNIT;
879 #ifdef FRAME_GROWS_DOWNWARD
880 function->frame_offset -= size;
883 /* Round frame offset to that alignment. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
887 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
890 /* On a big-endian machine, if we are allocating more space than we will use,
891 use the least significant bytes of those that are allocated. */
892 if (BYTES_BIG_ENDIAN && mode != BLKmode)
893 bigend_correction = size - GET_MODE_SIZE (mode);
895 addr = plus_constant (virtual_stack_vars_rtx,
896 function->frame_offset + bigend_correction);
897 #ifndef FRAME_GROWS_DOWNWARD
898 function->frame_offset += size;
901 x = gen_rtx_MEM (mode, addr);
903 function->stack_slot_list
904 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
911 /* Allocate a temporary stack slot and record it for possible later
914 MODE is the machine mode to be given to the returned rtx.
916 SIZE is the size in units of the space required. We do no rounding here
917 since assign_stack_local will do any required rounding.
919 KEEP is 1 if this slot is to be retained after a call to
920 free_temp_slots. Automatic variables for a block are allocated
921 with this flag. KEEP is 2 if we allocate a longer term temporary,
922 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
923 if we are to allocate something at an inner level to be treated as
924 a variable in the block (e.g., a SAVE_EXPR).
926 TYPE is the type that will be used for the stack slot. */
929 assign_stack_temp_for_type (mode, size, keep, type)
930 enum machine_mode mode;
937 struct temp_slot *p, *best_p = 0;
939 /* If SIZE is -1 it means that somebody tried to allocate a temporary
940 of a variable size. */
944 /* If we know the alias set for the memory that will be used, use
945 it. If there's no TYPE, then we don't know anything about the
946 alias set for the memory. */
948 alias_set = get_alias_set (type);
952 align = GET_MODE_ALIGNMENT (mode);
954 align = BIGGEST_ALIGNMENT;
957 type = type_for_mode (mode, 0);
959 align = LOCAL_ALIGNMENT (type, align);
961 /* Try to find an available, already-allocated temporary of the proper
962 mode which meets the size and alignment requirements. Choose the
963 smallest one with the closest alignment. */
964 for (p = temp_slots; p; p = p->next)
965 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
967 && (!flag_strict_aliasing
968 || (alias_set && p->alias_set == alias_set))
969 && (best_p == 0 || best_p->size > p->size
970 || (best_p->size == p->size && best_p->align > p->align)))
972 if (p->align == align && p->size == size)
980 /* Make our best, if any, the one to use. */
983 /* If there are enough aligned bytes left over, make them into a new
984 temp_slot so that the extra bytes don't get wasted. Do this only
985 for BLKmode slots, so that we can be sure of the alignment. */
986 if (GET_MODE (best_p->slot) == BLKmode
987 /* We can't split slots if -fstrict-aliasing because the
988 information about the alias set for the new slot will be
990 && !flag_strict_aliasing)
992 int alignment = best_p->align / BITS_PER_UNIT;
993 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
995 if (best_p->size - rounded_size >= alignment)
997 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
998 p->in_use = p->addr_taken = 0;
999 p->size = best_p->size - rounded_size;
1000 p->base_offset = best_p->base_offset + rounded_size;
1001 p->full_size = best_p->full_size - rounded_size;
1002 p->slot = gen_rtx_MEM (BLKmode,
1003 plus_constant (XEXP (best_p->slot, 0),
1005 p->align = best_p->align;
1008 p->next = temp_slots;
1011 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
1014 best_p->size = rounded_size;
1015 best_p->full_size = rounded_size;
1022 /* If we still didn't find one, make a new temporary. */
1025 HOST_WIDE_INT frame_offset_old = frame_offset;
1027 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
1029 /* We are passing an explicit alignment request to assign_stack_local.
1030 One side effect of that is assign_stack_local will not round SIZE
1031 to ensure the frame offset remains suitably aligned.
1033 So for requests which depended on the rounding of SIZE, we go ahead
1034 and round it now. We also make sure ALIGNMENT is at least
1035 BIGGEST_ALIGNMENT. */
1036 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
1038 p->slot = assign_stack_local (mode,
1040 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
1045 p->alias_set = alias_set;
1047 /* The following slot size computation is necessary because we don't
1048 know the actual size of the temporary slot until assign_stack_local
1049 has performed all the frame alignment and size rounding for the
1050 requested temporary. Note that extra space added for alignment
1051 can be either above or below this stack slot depending on which
1052 way the frame grows. We include the extra space if and only if it
1053 is above this slot. */
1054 #ifdef FRAME_GROWS_DOWNWARD
1055 p->size = frame_offset_old - frame_offset;
1060 /* Now define the fields used by combine_temp_slots. */
1061 #ifdef FRAME_GROWS_DOWNWARD
1062 p->base_offset = frame_offset;
1063 p->full_size = frame_offset_old - frame_offset;
1065 p->base_offset = frame_offset_old;
1066 p->full_size = frame_offset - frame_offset_old;
1069 p->next = temp_slots;
1075 p->rtl_expr = sequence_rtl_expr;
1079 p->level = target_temp_slot_level;
1084 p->level = var_temp_slot_level;
1089 p->level = temp_slot_level;
1093 /* We may be reusing an old slot, so clear any MEM flags that may have been
1095 RTX_UNCHANGING_P (p->slot) = 0;
1096 MEM_IN_STRUCT_P (p->slot) = 0;
1097 MEM_SCALAR_P (p->slot) = 0;
1098 MEM_ALIAS_SET (p->slot) = 0;
1102 /* Allocate a temporary stack slot and record it for possible later
1103 reuse. First three arguments are same as in preceding function. */
1106 assign_stack_temp (mode, size, keep)
1107 enum machine_mode mode;
1111 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
1114 /* Assign a temporary of given TYPE.
1115 KEEP is as for assign_stack_temp.
1116 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
1117 it is 0 if a register is OK.
1118 DONT_PROMOTE is 1 if we should not promote values in register
1122 assign_temp (type, keep, memory_required, dont_promote)
1125 int memory_required;
1128 enum machine_mode mode = TYPE_MODE (type);
1129 int unsignedp = TREE_UNSIGNED (type);
1131 if (mode == BLKmode || memory_required)
1133 HOST_WIDE_INT size = int_size_in_bytes (type);
1136 /* Unfortunately, we don't yet know how to allocate variable-sized
1137 temporaries. However, sometimes we have a fixed upper limit on
1138 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
1139 instead. This is the case for Chill variable-sized strings. */
1140 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1141 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1142 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1143 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1145 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1146 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
1150 #ifndef PROMOTE_FOR_CALL_ONLY
1152 mode = promote_mode (type, mode, &unsignedp, 0);
1155 return gen_reg_rtx (mode);
1158 /* Combine temporary stack slots which are adjacent on the stack.
1160 This allows for better use of already allocated stack space. This is only
1161 done for BLKmode slots because we can be sure that we won't have alignment
1162 problems in this case. */
1165 combine_temp_slots ()
1167 struct temp_slot *p, *q;
1168 struct temp_slot *prev_p, *prev_q;
1171 /* We can't combine slots, because the information about which slot
1172 is in which alias set will be lost. */
1173 if (flag_strict_aliasing)
1176 /* If there are a lot of temp slots, don't do anything unless
1177 high levels of optimizaton. */
1178 if (! flag_expensive_optimizations)
1179 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1180 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1183 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1187 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1188 for (q = p->next, prev_q = p; q; q = prev_q->next)
1191 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1193 if (p->base_offset + p->full_size == q->base_offset)
1195 /* Q comes after P; combine Q into P. */
1197 p->full_size += q->full_size;
1200 else if (q->base_offset + q->full_size == p->base_offset)
1202 /* P comes after Q; combine P into Q. */
1204 q->full_size += p->full_size;
1209 /* Either delete Q or advance past it. */
1211 prev_q->next = q->next;
1215 /* Either delete P or advance past it. */
1219 prev_p->next = p->next;
1221 temp_slots = p->next;
1228 /* Find the temp slot corresponding to the object at address X. */
1230 static struct temp_slot *
1231 find_temp_slot_from_address (x)
1234 struct temp_slot *p;
1237 for (p = temp_slots; p; p = p->next)
1242 else if (XEXP (p->slot, 0) == x
1244 || (GET_CODE (x) == PLUS
1245 && XEXP (x, 0) == virtual_stack_vars_rtx
1246 && GET_CODE (XEXP (x, 1)) == CONST_INT
1247 && INTVAL (XEXP (x, 1)) >= p->base_offset
1248 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1251 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1252 for (next = p->address; next; next = XEXP (next, 1))
1253 if (XEXP (next, 0) == x)
1260 /* Indicate that NEW is an alternate way of referring to the temp slot
1261 that previously was known by OLD. */
1264 update_temp_slot_address (old, new)
1267 struct temp_slot *p = find_temp_slot_from_address (old);
1269 /* If none, return. Else add NEW as an alias. */
1272 else if (p->address == 0)
1276 if (GET_CODE (p->address) != EXPR_LIST)
1277 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1279 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1283 /* If X could be a reference to a temporary slot, mark the fact that its
1284 address was taken. */
1287 mark_temp_addr_taken (x)
1290 struct temp_slot *p;
1295 /* If X is not in memory or is at a constant address, it cannot be in
1296 a temporary slot. */
1297 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1300 p = find_temp_slot_from_address (XEXP (x, 0));
1305 /* If X could be a reference to a temporary slot, mark that slot as
1306 belonging to the to one level higher than the current level. If X
1307 matched one of our slots, just mark that one. Otherwise, we can't
1308 easily predict which it is, so upgrade all of them. Kept slots
1309 need not be touched.
1311 This is called when an ({...}) construct occurs and a statement
1312 returns a value in memory. */
1315 preserve_temp_slots (x)
1318 struct temp_slot *p = 0;
1320 /* If there is no result, we still might have some objects whose address
1321 were taken, so we need to make sure they stay around. */
1324 for (p = temp_slots; p; p = p->next)
1325 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1331 /* If X is a register that is being used as a pointer, see if we have
1332 a temporary slot we know it points to. To be consistent with
1333 the code below, we really should preserve all non-kept slots
1334 if we can't find a match, but that seems to be much too costly. */
1335 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1336 p = find_temp_slot_from_address (x);
1338 /* If X is not in memory or is at a constant address, it cannot be in
1339 a temporary slot, but it can contain something whose address was
1341 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1343 for (p = temp_slots; p; p = p->next)
1344 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1350 /* First see if we can find a match. */
1352 p = find_temp_slot_from_address (XEXP (x, 0));
1356 /* Move everything at our level whose address was taken to our new
1357 level in case we used its address. */
1358 struct temp_slot *q;
1360 if (p->level == temp_slot_level)
1362 for (q = temp_slots; q; q = q->next)
1363 if (q != p && q->addr_taken && q->level == p->level)
1372 /* Otherwise, preserve all non-kept slots at this level. */
1373 for (p = temp_slots; p; p = p->next)
1374 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1378 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1379 with that RTL_EXPR, promote it into a temporary slot at the present
1380 level so it will not be freed when we free slots made in the
1384 preserve_rtl_expr_result (x)
1387 struct temp_slot *p;
1389 /* If X is not in memory or is at a constant address, it cannot be in
1390 a temporary slot. */
1391 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1394 /* If we can find a match, move it to our level unless it is already at
1396 p = find_temp_slot_from_address (XEXP (x, 0));
1399 p->level = MIN (p->level, temp_slot_level);
1406 /* Free all temporaries used so far. This is normally called at the end
1407 of generating code for a statement. Don't free any temporaries
1408 currently in use for an RTL_EXPR that hasn't yet been emitted.
1409 We could eventually do better than this since it can be reused while
1410 generating the same RTL_EXPR, but this is complex and probably not
1416 struct temp_slot *p;
1418 for (p = temp_slots; p; p = p->next)
1419 if (p->in_use && p->level == temp_slot_level && ! p->keep
1420 && p->rtl_expr == 0)
1423 combine_temp_slots ();
1426 /* Free all temporary slots used in T, an RTL_EXPR node. */
1429 free_temps_for_rtl_expr (t)
1432 struct temp_slot *p;
1434 for (p = temp_slots; p; p = p->next)
1435 if (p->rtl_expr == t)
1437 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1438 needs to be preserved. This can happen if a temporary in
1439 the RTL_EXPR was addressed; preserve_temp_slots will move
1440 the temporary into a higher level. */
1441 if (temp_slot_level <= p->level)
1444 p->rtl_expr = NULL_TREE;
1447 combine_temp_slots ();
1450 /* Mark all temporaries ever allocated in this function as not suitable
1451 for reuse until the current level is exited. */
1454 mark_all_temps_used ()
1456 struct temp_slot *p;
1458 for (p = temp_slots; p; p = p->next)
1460 p->in_use = p->keep = 1;
1461 p->level = MIN (p->level, temp_slot_level);
1465 /* Push deeper into the nesting level for stack temporaries. */
1473 /* Likewise, but save the new level as the place to allocate variables
1477 push_temp_slots_for_block ()
1481 var_temp_slot_level = temp_slot_level;
1484 /* Likewise, but save the new level as the place to allocate temporaries
1485 for TARGET_EXPRs. */
1488 push_temp_slots_for_target ()
1492 target_temp_slot_level = temp_slot_level;
1495 /* Set and get the value of target_temp_slot_level. The only
1496 permitted use of these functions is to save and restore this value. */
1499 get_target_temp_slot_level ()
1501 return target_temp_slot_level;
1505 set_target_temp_slot_level (level)
1508 target_temp_slot_level = level;
1511 /* Pop a temporary nesting level. All slots in use in the current level
1517 struct temp_slot *p;
1519 for (p = temp_slots; p; p = p->next)
1520 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1523 combine_temp_slots ();
1528 /* Initialize temporary slots. */
1533 /* We have not allocated any temporaries yet. */
1535 temp_slot_level = 0;
1536 var_temp_slot_level = 0;
1537 target_temp_slot_level = 0;
1540 /* Retroactively move an auto variable from a register to a stack slot.
1541 This is done when an address-reference to the variable is seen. */
1544 put_var_into_stack (decl)
1548 enum machine_mode promoted_mode, decl_mode;
1549 struct function *function = 0;
1551 int can_use_addressof;
1553 context = decl_function_context (decl);
1555 /* Get the current rtl used for this object and its original mode. */
1556 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1558 /* No need to do anything if decl has no rtx yet
1559 since in that case caller is setting TREE_ADDRESSABLE
1560 and a stack slot will be assigned when the rtl is made. */
1564 /* Get the declared mode for this object. */
1565 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1566 : DECL_MODE (decl));
1567 /* Get the mode it's actually stored in. */
1568 promoted_mode = GET_MODE (reg);
1570 /* If this variable comes from an outer function,
1571 find that function's saved context. */
1572 if (context != current_function_decl && context != inline_function_decl)
1573 for (function = outer_function_chain; function; function = function->next)
1574 if (function->decl == context)
1577 /* If this is a variable-size object with a pseudo to address it,
1578 put that pseudo into the stack, if the var is nonlocal. */
1579 if (DECL_NONLOCAL (decl)
1580 && GET_CODE (reg) == MEM
1581 && GET_CODE (XEXP (reg, 0)) == REG
1582 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1584 reg = XEXP (reg, 0);
1585 decl_mode = promoted_mode = GET_MODE (reg);
1591 /* FIXME make it work for promoted modes too */
1592 && decl_mode == promoted_mode
1593 #ifdef NON_SAVING_SETJMP
1594 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1598 /* If we can't use ADDRESSOF, make sure we see through one we already
1600 if (! can_use_addressof && GET_CODE (reg) == MEM
1601 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1602 reg = XEXP (XEXP (reg, 0), 0);
1604 /* Now we should have a value that resides in one or more pseudo regs. */
1606 if (GET_CODE (reg) == REG)
1608 /* If this variable lives in the current function and we don't need
1609 to put things in the stack for the sake of setjmp, try to keep it
1610 in a register until we know we actually need the address. */
1611 if (can_use_addressof)
1612 gen_mem_addressof (reg, decl);
1614 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1615 promoted_mode, decl_mode,
1616 TREE_SIDE_EFFECTS (decl), 0,
1617 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1620 else if (GET_CODE (reg) == CONCAT)
1622 /* A CONCAT contains two pseudos; put them both in the stack.
1623 We do it so they end up consecutive. */
1624 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1625 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1626 #ifdef FRAME_GROWS_DOWNWARD
1627 /* Since part 0 should have a lower address, do it second. */
1628 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1629 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1630 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1632 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1633 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1634 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1637 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1638 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1639 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1641 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1642 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1643 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1647 /* Change the CONCAT into a combined MEM for both parts. */
1648 PUT_CODE (reg, MEM);
1649 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1650 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1652 /* The two parts are in memory order already.
1653 Use the lower parts address as ours. */
1654 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1655 /* Prevent sharing of rtl that might lose. */
1656 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1657 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1662 if (current_function_check_memory_usage)
1663 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1664 XEXP (reg, 0), Pmode,
1665 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1666 TYPE_MODE (sizetype),
1667 GEN_INT (MEMORY_USE_RW),
1668 TYPE_MODE (integer_type_node));
1671 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1672 into the stack frame of FUNCTION (0 means the current function).
1673 DECL_MODE is the machine mode of the user-level data type.
1674 PROMOTED_MODE is the machine mode of the register.
1675 VOLATILE_P is nonzero if this is for a "volatile" decl.
1676 USED_P is nonzero if this reg might have already been used in an insn. */
1679 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1680 original_regno, used_p, ht)
1681 struct function *function;
1684 enum machine_mode promoted_mode, decl_mode;
1688 struct hash_table *ht;
1691 int regno = original_regno;
1694 regno = REGNO (reg);
1698 if (regno < function->max_parm_reg)
1699 new = function->parm_reg_stack_loc[regno];
1701 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1706 if (regno < max_parm_reg)
1707 new = parm_reg_stack_loc[regno];
1709 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1712 PUT_MODE (reg, decl_mode);
1713 XEXP (reg, 0) = XEXP (new, 0);
1714 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1715 MEM_VOLATILE_P (reg) = volatile_p;
1716 PUT_CODE (reg, MEM);
1718 /* If this is a memory ref that contains aggregate components,
1719 mark it as such for cse and loop optimize. If we are reusing a
1720 previously generated stack slot, then we need to copy the bit in
1721 case it was set for other reasons. For instance, it is set for
1722 __builtin_va_alist. */
1723 MEM_SET_IN_STRUCT_P (reg,
1724 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1725 MEM_ALIAS_SET (reg) = get_alias_set (type);
1727 /* Now make sure that all refs to the variable, previously made
1728 when it was a register, are fixed up to be valid again. */
1730 if (used_p && function != 0)
1732 struct var_refs_queue *temp;
1734 /* Variable is inherited; fix it up when we get back to its function. */
1735 push_obstacks (function->function_obstack,
1736 function->function_maybepermanent_obstack);
1738 /* See comment in restore_tree_status in tree.c for why this needs to be
1739 on saveable obstack. */
1741 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1742 temp->modified = reg;
1743 temp->promoted_mode = promoted_mode;
1744 temp->unsignedp = TREE_UNSIGNED (type);
1745 temp->next = function->fixup_var_refs_queue;
1746 function->fixup_var_refs_queue = temp;
1750 /* Variable is local; fix it up now. */
1751 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1755 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1757 enum machine_mode promoted_mode;
1759 struct hash_table *ht;
1762 rtx first_insn = get_insns ();
1763 struct sequence_stack *stack = sequence_stack;
1764 tree rtl_exps = rtl_expr_chain;
1766 /* Must scan all insns for stack-refs that exceed the limit. */
1767 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1769 /* If there's a hash table, it must record all uses of VAR. */
1773 /* Scan all pending sequences too. */
1774 for (; stack; stack = stack->next)
1776 push_to_sequence (stack->first);
1777 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1778 stack->first, stack->next != 0, 0);
1779 /* Update remembered end of sequence
1780 in case we added an insn at the end. */
1781 stack->last = get_last_insn ();
1785 /* Scan all waiting RTL_EXPRs too. */
1786 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1788 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1789 if (seq != const0_rtx && seq != 0)
1791 push_to_sequence (seq);
1792 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1798 /* Scan the catch clauses for exception handling too. */
1799 push_to_sequence (catch_clauses);
1800 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1805 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1806 some part of an insn. Return a struct fixup_replacement whose OLD
1807 value is equal to X. Allocate a new structure if no such entry exists. */
1809 static struct fixup_replacement *
1810 find_fixup_replacement (replacements, x)
1811 struct fixup_replacement **replacements;
1814 struct fixup_replacement *p;
1816 /* See if we have already replaced this. */
1817 for (p = *replacements; p && p->old != x; p = p->next)
1822 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1825 p->next = *replacements;
1832 /* Scan the insn-chain starting with INSN for refs to VAR
1833 and fix them up. TOPLEVEL is nonzero if this chain is the
1834 main chain of insns for the current function. */
1837 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1839 enum machine_mode promoted_mode;
1843 struct hash_table *ht;
1846 rtx insn_list = NULL_RTX;
1848 /* If we already know which INSNs reference VAR there's no need
1849 to walk the entire instruction chain. */
1852 insn_list = ((struct insns_for_mem_entry *)
1853 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1854 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1855 insn_list = XEXP (insn_list, 1);
1860 rtx next = NEXT_INSN (insn);
1861 rtx set, prev, prev_set;
1864 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1866 /* If this is a CLOBBER of VAR, delete it.
1868 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1869 and REG_RETVAL notes too. */
1870 if (GET_CODE (PATTERN (insn)) == CLOBBER
1871 && (XEXP (PATTERN (insn), 0) == var
1872 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1873 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1874 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1876 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1877 /* The REG_LIBCALL note will go away since we are going to
1878 turn INSN into a NOTE, so just delete the
1879 corresponding REG_RETVAL note. */
1880 remove_note (XEXP (note, 0),
1881 find_reg_note (XEXP (note, 0), REG_RETVAL,
1884 /* In unoptimized compilation, we shouldn't call delete_insn
1885 except in jump.c doing warnings. */
1886 PUT_CODE (insn, NOTE);
1887 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1888 NOTE_SOURCE_FILE (insn) = 0;
1891 /* The insn to load VAR from a home in the arglist
1892 is now a no-op. When we see it, just delete it.
1893 Similarly if this is storing VAR from a register from which
1894 it was loaded in the previous insn. This will occur
1895 when an ADDRESSOF was made for an arglist slot. */
1897 && (set = single_set (insn)) != 0
1898 && SET_DEST (set) == var
1899 /* If this represents the result of an insn group,
1900 don't delete the insn. */
1901 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1902 && (rtx_equal_p (SET_SRC (set), var)
1903 || (GET_CODE (SET_SRC (set)) == REG
1904 && (prev = prev_nonnote_insn (insn)) != 0
1905 && (prev_set = single_set (prev)) != 0
1906 && SET_DEST (prev_set) == SET_SRC (set)
1907 && rtx_equal_p (SET_SRC (prev_set), var))))
1909 /* In unoptimized compilation, we shouldn't call delete_insn
1910 except in jump.c doing warnings. */
1911 PUT_CODE (insn, NOTE);
1912 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1913 NOTE_SOURCE_FILE (insn) = 0;
1914 if (insn == last_parm_insn)
1915 last_parm_insn = PREV_INSN (next);
1919 struct fixup_replacement *replacements = 0;
1920 rtx next_insn = NEXT_INSN (insn);
1922 if (SMALL_REGISTER_CLASSES)
1924 /* If the insn that copies the results of a CALL_INSN
1925 into a pseudo now references VAR, we have to use an
1926 intermediate pseudo since we want the life of the
1927 return value register to be only a single insn.
1929 If we don't use an intermediate pseudo, such things as
1930 address computations to make the address of VAR valid
1931 if it is not can be placed between the CALL_INSN and INSN.
1933 To make sure this doesn't happen, we record the destination
1934 of the CALL_INSN and see if the next insn uses both that
1937 if (call_dest != 0 && GET_CODE (insn) == INSN
1938 && reg_mentioned_p (var, PATTERN (insn))
1939 && reg_mentioned_p (call_dest, PATTERN (insn)))
1941 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1943 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1945 PATTERN (insn) = replace_rtx (PATTERN (insn),
1949 if (GET_CODE (insn) == CALL_INSN
1950 && GET_CODE (PATTERN (insn)) == SET)
1951 call_dest = SET_DEST (PATTERN (insn));
1952 else if (GET_CODE (insn) == CALL_INSN
1953 && GET_CODE (PATTERN (insn)) == PARALLEL
1954 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1955 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1960 /* See if we have to do anything to INSN now that VAR is in
1961 memory. If it needs to be loaded into a pseudo, use a single
1962 pseudo for the entire insn in case there is a MATCH_DUP
1963 between two operands. We pass a pointer to the head of
1964 a list of struct fixup_replacements. If fixup_var_refs_1
1965 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1966 it will record them in this list.
1968 If it allocated a pseudo for any replacement, we copy into
1971 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1974 /* If this is last_parm_insn, and any instructions were output
1975 after it to fix it up, then we must set last_parm_insn to
1976 the last such instruction emitted. */
1977 if (insn == last_parm_insn)
1978 last_parm_insn = PREV_INSN (next_insn);
1980 while (replacements)
1982 if (GET_CODE (replacements->new) == REG)
1987 /* OLD might be a (subreg (mem)). */
1988 if (GET_CODE (replacements->old) == SUBREG)
1990 = fixup_memory_subreg (replacements->old, insn, 0);
1993 = fixup_stack_1 (replacements->old, insn);
1995 insert_before = insn;
1997 /* If we are changing the mode, do a conversion.
1998 This might be wasteful, but combine.c will
1999 eliminate much of the waste. */
2001 if (GET_MODE (replacements->new)
2002 != GET_MODE (replacements->old))
2005 convert_move (replacements->new,
2006 replacements->old, unsignedp);
2007 seq = gen_sequence ();
2011 seq = gen_move_insn (replacements->new,
2014 emit_insn_before (seq, insert_before);
2017 replacements = replacements->next;
2021 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
2022 But don't touch other insns referred to by reg-notes;
2023 we will get them elsewhere. */
2024 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2025 if (GET_CODE (note) != INSN_LIST)
2027 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
2034 insn = XEXP (insn_list, 0);
2035 insn_list = XEXP (insn_list, 1);
2042 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
2043 See if the rtx expression at *LOC in INSN needs to be changed.
2045 REPLACEMENTS is a pointer to a list head that starts out zero, but may
2046 contain a list of original rtx's and replacements. If we find that we need
2047 to modify this insn by replacing a memory reference with a pseudo or by
2048 making a new MEM to implement a SUBREG, we consult that list to see if
2049 we have already chosen a replacement. If none has already been allocated,
2050 we allocate it and update the list. fixup_var_refs_insns will copy VAR
2051 or the SUBREG, as appropriate, to the pseudo. */
2054 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
2056 enum machine_mode promoted_mode;
2059 struct fixup_replacement **replacements;
2062 register rtx x = *loc;
2063 RTX_CODE code = GET_CODE (x);
2065 register rtx tem, tem1;
2066 struct fixup_replacement *replacement;
2071 if (XEXP (x, 0) == var)
2073 /* Prevent sharing of rtl that might lose. */
2074 rtx sub = copy_rtx (XEXP (var, 0));
2076 if (! validate_change (insn, loc, sub, 0))
2078 rtx y = gen_reg_rtx (GET_MODE (sub));
2081 /* We should be able to replace with a register or all is lost.
2082 Note that we can't use validate_change to verify this, since
2083 we're not caring for replacing all dups simultaneously. */
2084 if (! validate_replace_rtx (*loc, y, insn))
2087 /* Careful! First try to recognize a direct move of the
2088 value, mimicking how things are done in gen_reload wrt
2089 PLUS. Consider what happens when insn is a conditional
2090 move instruction and addsi3 clobbers flags. */
2093 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
2094 seq = gen_sequence ();
2097 if (recog_memoized (new_insn) < 0)
2099 /* That failed. Fall back on force_operand and hope. */
2102 force_operand (sub, y);
2103 seq = gen_sequence ();
2108 /* Don't separate setter from user. */
2109 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
2110 insn = PREV_INSN (insn);
2113 emit_insn_before (seq, insn);
2121 /* If we already have a replacement, use it. Otherwise,
2122 try to fix up this address in case it is invalid. */
2124 replacement = find_fixup_replacement (replacements, var);
2125 if (replacement->new)
2127 *loc = replacement->new;
2131 *loc = replacement->new = x = fixup_stack_1 (x, insn);
2133 /* Unless we are forcing memory to register or we changed the mode,
2134 we can leave things the way they are if the insn is valid. */
2136 INSN_CODE (insn) = -1;
2137 if (! flag_force_mem && GET_MODE (x) == promoted_mode
2138 && recog_memoized (insn) >= 0)
2141 *loc = replacement->new = gen_reg_rtx (promoted_mode);
2145 /* If X contains VAR, we need to unshare it here so that we update
2146 each occurrence separately. But all identical MEMs in one insn
2147 must be replaced with the same rtx because of the possibility of
2150 if (reg_mentioned_p (var, x))
2152 replacement = find_fixup_replacement (replacements, x);
2153 if (replacement->new == 0)
2154 replacement->new = copy_most_rtx (x, var);
2156 *loc = x = replacement->new;
2172 /* Note that in some cases those types of expressions are altered
2173 by optimize_bit_field, and do not survive to get here. */
2174 if (XEXP (x, 0) == var
2175 || (GET_CODE (XEXP (x, 0)) == SUBREG
2176 && SUBREG_REG (XEXP (x, 0)) == var))
2178 /* Get TEM as a valid MEM in the mode presently in the insn.
2180 We don't worry about the possibility of MATCH_DUP here; it
2181 is highly unlikely and would be tricky to handle. */
2184 if (GET_CODE (tem) == SUBREG)
2186 if (GET_MODE_BITSIZE (GET_MODE (tem))
2187 > GET_MODE_BITSIZE (GET_MODE (var)))
2189 replacement = find_fixup_replacement (replacements, var);
2190 if (replacement->new == 0)
2191 replacement->new = gen_reg_rtx (GET_MODE (var));
2192 SUBREG_REG (tem) = replacement->new;
2195 tem = fixup_memory_subreg (tem, insn, 0);
2198 tem = fixup_stack_1 (tem, insn);
2200 /* Unless we want to load from memory, get TEM into the proper mode
2201 for an extract from memory. This can only be done if the
2202 extract is at a constant position and length. */
2204 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2205 && GET_CODE (XEXP (x, 2)) == CONST_INT
2206 && ! mode_dependent_address_p (XEXP (tem, 0))
2207 && ! MEM_VOLATILE_P (tem))
2209 enum machine_mode wanted_mode = VOIDmode;
2210 enum machine_mode is_mode = GET_MODE (tem);
2211 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2214 if (GET_CODE (x) == ZERO_EXTRACT)
2216 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2217 if (wanted_mode == VOIDmode)
2218 wanted_mode = word_mode;
2222 if (GET_CODE (x) == SIGN_EXTRACT)
2224 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2225 if (wanted_mode == VOIDmode)
2226 wanted_mode = word_mode;
2229 /* If we have a narrower mode, we can do something. */
2230 if (wanted_mode != VOIDmode
2231 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2233 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2234 rtx old_pos = XEXP (x, 2);
2237 /* If the bytes and bits are counted differently, we
2238 must adjust the offset. */
2239 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2240 offset = (GET_MODE_SIZE (is_mode)
2241 - GET_MODE_SIZE (wanted_mode) - offset);
2243 pos %= GET_MODE_BITSIZE (wanted_mode);
2245 newmem = gen_rtx_MEM (wanted_mode,
2246 plus_constant (XEXP (tem, 0), offset));
2247 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2248 MEM_COPY_ATTRIBUTES (newmem, tem);
2250 /* Make the change and see if the insn remains valid. */
2251 INSN_CODE (insn) = -1;
2252 XEXP (x, 0) = newmem;
2253 XEXP (x, 2) = GEN_INT (pos);
2255 if (recog_memoized (insn) >= 0)
2258 /* Otherwise, restore old position. XEXP (x, 0) will be
2260 XEXP (x, 2) = old_pos;
2264 /* If we get here, the bitfield extract insn can't accept a memory
2265 reference. Copy the input into a register. */
2267 tem1 = gen_reg_rtx (GET_MODE (tem));
2268 emit_insn_before (gen_move_insn (tem1, tem), insn);
2275 if (SUBREG_REG (x) == var)
2277 /* If this is a special SUBREG made because VAR was promoted
2278 from a wider mode, replace it with VAR and call ourself
2279 recursively, this time saying that the object previously
2280 had its current mode (by virtue of the SUBREG). */
2282 if (SUBREG_PROMOTED_VAR_P (x))
2285 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2289 /* If this SUBREG makes VAR wider, it has become a paradoxical
2290 SUBREG with VAR in memory, but these aren't allowed at this
2291 stage of the compilation. So load VAR into a pseudo and take
2292 a SUBREG of that pseudo. */
2293 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2295 replacement = find_fixup_replacement (replacements, var);
2296 if (replacement->new == 0)
2297 replacement->new = gen_reg_rtx (GET_MODE (var));
2298 SUBREG_REG (x) = replacement->new;
2302 /* See if we have already found a replacement for this SUBREG.
2303 If so, use it. Otherwise, make a MEM and see if the insn
2304 is recognized. If not, or if we should force MEM into a register,
2305 make a pseudo for this SUBREG. */
2306 replacement = find_fixup_replacement (replacements, x);
2307 if (replacement->new)
2309 *loc = replacement->new;
2313 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2315 INSN_CODE (insn) = -1;
2316 if (! flag_force_mem && recog_memoized (insn) >= 0)
2319 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2325 /* First do special simplification of bit-field references. */
2326 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2327 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2328 optimize_bit_field (x, insn, 0);
2329 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2330 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2331 optimize_bit_field (x, insn, NULL_PTR);
2333 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2334 into a register and then store it back out. */
2335 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2336 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2337 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2338 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2339 > GET_MODE_SIZE (GET_MODE (var))))
2341 replacement = find_fixup_replacement (replacements, var);
2342 if (replacement->new == 0)
2343 replacement->new = gen_reg_rtx (GET_MODE (var));
2345 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2346 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2349 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2350 insn into a pseudo and store the low part of the pseudo into VAR. */
2351 if (GET_CODE (SET_DEST (x)) == SUBREG
2352 && SUBREG_REG (SET_DEST (x)) == var
2353 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2354 > GET_MODE_SIZE (GET_MODE (var))))
2356 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2357 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2364 rtx dest = SET_DEST (x);
2365 rtx src = SET_SRC (x);
2367 rtx outerdest = dest;
2370 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2371 || GET_CODE (dest) == SIGN_EXTRACT
2372 || GET_CODE (dest) == ZERO_EXTRACT)
2373 dest = XEXP (dest, 0);
2375 if (GET_CODE (src) == SUBREG)
2376 src = XEXP (src, 0);
2378 /* If VAR does not appear at the top level of the SET
2379 just scan the lower levels of the tree. */
2381 if (src != var && dest != var)
2384 /* We will need to rerecognize this insn. */
2385 INSN_CODE (insn) = -1;
2388 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2390 /* Since this case will return, ensure we fixup all the
2392 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2393 insn, replacements);
2394 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2395 insn, replacements);
2396 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2397 insn, replacements);
2399 tem = XEXP (outerdest, 0);
2401 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2402 that may appear inside a ZERO_EXTRACT.
2403 This was legitimate when the MEM was a REG. */
2404 if (GET_CODE (tem) == SUBREG
2405 && SUBREG_REG (tem) == var)
2406 tem = fixup_memory_subreg (tem, insn, 0);
2408 tem = fixup_stack_1 (tem, insn);
2410 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2411 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2412 && ! mode_dependent_address_p (XEXP (tem, 0))
2413 && ! MEM_VOLATILE_P (tem))
2415 enum machine_mode wanted_mode;
2416 enum machine_mode is_mode = GET_MODE (tem);
2417 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2419 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2420 if (wanted_mode == VOIDmode)
2421 wanted_mode = word_mode;
2423 /* If we have a narrower mode, we can do something. */
2424 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2426 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2427 rtx old_pos = XEXP (outerdest, 2);
2430 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2431 offset = (GET_MODE_SIZE (is_mode)
2432 - GET_MODE_SIZE (wanted_mode) - offset);
2434 pos %= GET_MODE_BITSIZE (wanted_mode);
2436 newmem = gen_rtx_MEM (wanted_mode,
2437 plus_constant (XEXP (tem, 0), offset));
2438 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2439 MEM_COPY_ATTRIBUTES (newmem, tem);
2441 /* Make the change and see if the insn remains valid. */
2442 INSN_CODE (insn) = -1;
2443 XEXP (outerdest, 0) = newmem;
2444 XEXP (outerdest, 2) = GEN_INT (pos);
2446 if (recog_memoized (insn) >= 0)
2449 /* Otherwise, restore old position. XEXP (x, 0) will be
2451 XEXP (outerdest, 2) = old_pos;
2455 /* If we get here, the bit-field store doesn't allow memory
2456 or isn't located at a constant position. Load the value into
2457 a register, do the store, and put it back into memory. */
2459 tem1 = gen_reg_rtx (GET_MODE (tem));
2460 emit_insn_before (gen_move_insn (tem1, tem), insn);
2461 emit_insn_after (gen_move_insn (tem, tem1), insn);
2462 XEXP (outerdest, 0) = tem1;
2467 /* STRICT_LOW_PART is a no-op on memory references
2468 and it can cause combinations to be unrecognizable,
2471 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2472 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2474 /* A valid insn to copy VAR into or out of a register
2475 must be left alone, to avoid an infinite loop here.
2476 If the reference to VAR is by a subreg, fix that up,
2477 since SUBREG is not valid for a memref.
2478 Also fix up the address of the stack slot.
2480 Note that we must not try to recognize the insn until
2481 after we know that we have valid addresses and no
2482 (subreg (mem ...) ...) constructs, since these interfere
2483 with determining the validity of the insn. */
2485 if ((SET_SRC (x) == var
2486 || (GET_CODE (SET_SRC (x)) == SUBREG
2487 && SUBREG_REG (SET_SRC (x)) == var))
2488 && (GET_CODE (SET_DEST (x)) == REG
2489 || (GET_CODE (SET_DEST (x)) == SUBREG
2490 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2491 && GET_MODE (var) == promoted_mode
2492 && x == single_set (insn))
2496 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2497 if (replacement->new)
2498 SET_SRC (x) = replacement->new;
2499 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2500 SET_SRC (x) = replacement->new
2501 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2503 SET_SRC (x) = replacement->new
2504 = fixup_stack_1 (SET_SRC (x), insn);
2506 if (recog_memoized (insn) >= 0)
2509 /* INSN is not valid, but we know that we want to
2510 copy SET_SRC (x) to SET_DEST (x) in some way. So
2511 we generate the move and see whether it requires more
2512 than one insn. If it does, we emit those insns and
2513 delete INSN. Otherwise, we an just replace the pattern
2514 of INSN; we have already verified above that INSN has
2515 no other function that to do X. */
2517 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2518 if (GET_CODE (pat) == SEQUENCE)
2520 emit_insn_after (pat, insn);
2521 PUT_CODE (insn, NOTE);
2522 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2523 NOTE_SOURCE_FILE (insn) = 0;
2526 PATTERN (insn) = pat;
2531 if ((SET_DEST (x) == var
2532 || (GET_CODE (SET_DEST (x)) == SUBREG
2533 && SUBREG_REG (SET_DEST (x)) == var))
2534 && (GET_CODE (SET_SRC (x)) == REG
2535 || (GET_CODE (SET_SRC (x)) == SUBREG
2536 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2537 && GET_MODE (var) == promoted_mode
2538 && x == single_set (insn))
2542 if (GET_CODE (SET_DEST (x)) == SUBREG)
2543 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2545 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2547 if (recog_memoized (insn) >= 0)
2550 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2551 if (GET_CODE (pat) == SEQUENCE)
2553 emit_insn_after (pat, insn);
2554 PUT_CODE (insn, NOTE);
2555 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2556 NOTE_SOURCE_FILE (insn) = 0;
2559 PATTERN (insn) = pat;
2564 /* Otherwise, storing into VAR must be handled specially
2565 by storing into a temporary and copying that into VAR
2566 with a new insn after this one. Note that this case
2567 will be used when storing into a promoted scalar since
2568 the insn will now have different modes on the input
2569 and output and hence will be invalid (except for the case
2570 of setting it to a constant, which does not need any
2571 change if it is valid). We generate extra code in that case,
2572 but combine.c will eliminate it. */
2577 rtx fixeddest = SET_DEST (x);
2579 /* STRICT_LOW_PART can be discarded, around a MEM. */
2580 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2581 fixeddest = XEXP (fixeddest, 0);
2582 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2583 if (GET_CODE (fixeddest) == SUBREG)
2585 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2586 promoted_mode = GET_MODE (fixeddest);
2589 fixeddest = fixup_stack_1 (fixeddest, insn);
2591 temp = gen_reg_rtx (promoted_mode);
2593 emit_insn_after (gen_move_insn (fixeddest,
2594 gen_lowpart (GET_MODE (fixeddest),
2598 SET_DEST (x) = temp;
2606 /* Nothing special about this RTX; fix its operands. */
2608 fmt = GET_RTX_FORMAT (code);
2609 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2612 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2616 for (j = 0; j < XVECLEN (x, i); j++)
2617 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2618 insn, replacements);
2623 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2624 return an rtx (MEM:m1 newaddr) which is equivalent.
2625 If any insns must be emitted to compute NEWADDR, put them before INSN.
2627 UNCRITICAL nonzero means accept paradoxical subregs.
2628 This is used for subregs found inside REG_NOTES. */
2631 fixup_memory_subreg (x, insn, uncritical)
2636 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2637 rtx addr = XEXP (SUBREG_REG (x), 0);
2638 enum machine_mode mode = GET_MODE (x);
2641 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2642 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2646 if (BYTES_BIG_ENDIAN)
2647 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2648 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2649 addr = plus_constant (addr, offset);
2650 if (!flag_force_addr && memory_address_p (mode, addr))
2651 /* Shortcut if no insns need be emitted. */
2652 return change_address (SUBREG_REG (x), mode, addr);
2654 result = change_address (SUBREG_REG (x), mode, addr);
2655 emit_insn_before (gen_sequence (), insn);
2660 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2661 Replace subexpressions of X in place.
2662 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2663 Otherwise return X, with its contents possibly altered.
2665 If any insns must be emitted to compute NEWADDR, put them before INSN.
2667 UNCRITICAL is as in fixup_memory_subreg. */
2670 walk_fixup_memory_subreg (x, insn, uncritical)
2675 register enum rtx_code code;
2682 code = GET_CODE (x);
2684 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2685 return fixup_memory_subreg (x, insn, uncritical);
2687 /* Nothing special about this RTX; fix its operands. */
2689 fmt = GET_RTX_FORMAT (code);
2690 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2693 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2697 for (j = 0; j < XVECLEN (x, i); j++)
2699 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2705 /* For each memory ref within X, if it refers to a stack slot
2706 with an out of range displacement, put the address in a temp register
2707 (emitting new insns before INSN to load these registers)
2708 and alter the memory ref to use that register.
2709 Replace each such MEM rtx with a copy, to avoid clobberage. */
2712 fixup_stack_1 (x, insn)
2717 register RTX_CODE code = GET_CODE (x);
2722 register rtx ad = XEXP (x, 0);
2723 /* If we have address of a stack slot but it's not valid
2724 (displacement is too large), compute the sum in a register. */
2725 if (GET_CODE (ad) == PLUS
2726 && GET_CODE (XEXP (ad, 0)) == REG
2727 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2728 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2729 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2730 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2731 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2733 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2734 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2735 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2736 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2739 if (memory_address_p (GET_MODE (x), ad))
2743 temp = copy_to_reg (ad);
2744 seq = gen_sequence ();
2746 emit_insn_before (seq, insn);
2747 return change_address (x, VOIDmode, temp);
2752 fmt = GET_RTX_FORMAT (code);
2753 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2756 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2760 for (j = 0; j < XVECLEN (x, i); j++)
2761 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2767 /* Optimization: a bit-field instruction whose field
2768 happens to be a byte or halfword in memory
2769 can be changed to a move instruction.
2771 We call here when INSN is an insn to examine or store into a bit-field.
2772 BODY is the SET-rtx to be altered.
2774 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2775 (Currently this is called only from function.c, and EQUIV_MEM
2779 optimize_bit_field (body, insn, equiv_mem)
2784 register rtx bitfield;
2787 enum machine_mode mode;
2789 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2790 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2791 bitfield = SET_DEST (body), destflag = 1;
2793 bitfield = SET_SRC (body), destflag = 0;
2795 /* First check that the field being stored has constant size and position
2796 and is in fact a byte or halfword suitably aligned. */
2798 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2799 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2800 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2802 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2804 register rtx memref = 0;
2806 /* Now check that the containing word is memory, not a register,
2807 and that it is safe to change the machine mode. */
2809 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2810 memref = XEXP (bitfield, 0);
2811 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2813 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2814 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2815 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2816 memref = SUBREG_REG (XEXP (bitfield, 0));
2817 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2819 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2820 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2823 && ! mode_dependent_address_p (XEXP (memref, 0))
2824 && ! MEM_VOLATILE_P (memref))
2826 /* Now adjust the address, first for any subreg'ing
2827 that we are now getting rid of,
2828 and then for which byte of the word is wanted. */
2830 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2833 /* Adjust OFFSET to count bits from low-address byte. */
2834 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2835 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2836 - offset - INTVAL (XEXP (bitfield, 1)));
2838 /* Adjust OFFSET to count bytes from low-address byte. */
2839 offset /= BITS_PER_UNIT;
2840 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2842 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2843 if (BYTES_BIG_ENDIAN)
2844 offset -= (MIN (UNITS_PER_WORD,
2845 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2846 - MIN (UNITS_PER_WORD,
2847 GET_MODE_SIZE (GET_MODE (memref))));
2851 memref = change_address (memref, mode,
2852 plus_constant (XEXP (memref, 0), offset));
2853 insns = get_insns ();
2855 emit_insns_before (insns, insn);
2857 /* Store this memory reference where
2858 we found the bit field reference. */
2862 validate_change (insn, &SET_DEST (body), memref, 1);
2863 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2865 rtx src = SET_SRC (body);
2866 while (GET_CODE (src) == SUBREG
2867 && SUBREG_WORD (src) == 0)
2868 src = SUBREG_REG (src);
2869 if (GET_MODE (src) != GET_MODE (memref))
2870 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2871 validate_change (insn, &SET_SRC (body), src, 1);
2873 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2874 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2875 /* This shouldn't happen because anything that didn't have
2876 one of these modes should have got converted explicitly
2877 and then referenced through a subreg.
2878 This is so because the original bit-field was
2879 handled by agg_mode and so its tree structure had
2880 the same mode that memref now has. */
2885 rtx dest = SET_DEST (body);
2887 while (GET_CODE (dest) == SUBREG
2888 && SUBREG_WORD (dest) == 0
2889 && (GET_MODE_CLASS (GET_MODE (dest))
2890 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2891 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2893 dest = SUBREG_REG (dest);
2895 validate_change (insn, &SET_DEST (body), dest, 1);
2897 if (GET_MODE (dest) == GET_MODE (memref))
2898 validate_change (insn, &SET_SRC (body), memref, 1);
2901 /* Convert the mem ref to the destination mode. */
2902 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2905 convert_move (newreg, memref,
2906 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2910 validate_change (insn, &SET_SRC (body), newreg, 1);
2914 /* See if we can convert this extraction or insertion into
2915 a simple move insn. We might not be able to do so if this
2916 was, for example, part of a PARALLEL.
2918 If we succeed, write out any needed conversions. If we fail,
2919 it is hard to guess why we failed, so don't do anything
2920 special; just let the optimization be suppressed. */
2922 if (apply_change_group () && seq)
2923 emit_insns_before (seq, insn);
2928 /* These routines are responsible for converting virtual register references
2929 to the actual hard register references once RTL generation is complete.
2931 The following four variables are used for communication between the
2932 routines. They contain the offsets of the virtual registers from their
2933 respective hard registers. */
2935 static int in_arg_offset;
2936 static int var_offset;
2937 static int dynamic_offset;
2938 static int out_arg_offset;
2939 static int cfa_offset;
2941 /* In most machines, the stack pointer register is equivalent to the bottom
2944 #ifndef STACK_POINTER_OFFSET
2945 #define STACK_POINTER_OFFSET 0
2948 /* If not defined, pick an appropriate default for the offset of dynamically
2949 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2950 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2952 #ifndef STACK_DYNAMIC_OFFSET
2954 #ifdef ACCUMULATE_OUTGOING_ARGS
2955 /* The bottom of the stack points to the actual arguments. If
2956 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2957 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2958 stack space for register parameters is not pushed by the caller, but
2959 rather part of the fixed stack areas and hence not included in
2960 `current_function_outgoing_args_size'. Nevertheless, we must allow
2961 for it when allocating stack dynamic objects. */
2963 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2964 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2965 (current_function_outgoing_args_size \
2966 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2969 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2970 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2974 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2978 /* On a few machines, the CFA coincides with the arg pointer. */
2980 #ifndef ARG_POINTER_CFA_OFFSET
2981 #define ARG_POINTER_CFA_OFFSET 0
2985 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2986 its address taken. DECL is the decl for the object stored in the
2987 register, for later use if we do need to force REG into the stack.
2988 REG is overwritten by the MEM like in put_reg_into_stack. */
2991 gen_mem_addressof (reg, decl)
2995 tree type = TREE_TYPE (decl);
2996 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2997 SET_ADDRESSOF_DECL (r, decl);
2998 /* If the original REG was a user-variable, then so is the REG whose
2999 address is being taken. */
3000 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
3003 PUT_CODE (reg, MEM);
3004 PUT_MODE (reg, DECL_MODE (decl));
3005 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
3006 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
3007 MEM_ALIAS_SET (reg) = get_alias_set (decl);
3009 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
3010 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
3015 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
3018 flush_addressof (decl)
3021 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
3022 && DECL_RTL (decl) != 0
3023 && GET_CODE (DECL_RTL (decl)) == MEM
3024 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
3025 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
3026 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
3029 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
3032 put_addressof_into_stack (r, ht)
3034 struct hash_table *ht;
3036 tree decl = ADDRESSOF_DECL (r);
3037 rtx reg = XEXP (r, 0);
3039 if (GET_CODE (reg) != REG)
3042 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
3043 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
3044 ADDRESSOF_REGNO (r),
3045 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
3048 /* List of replacements made below in purge_addressof_1 when creating
3049 bitfield insertions. */
3050 static rtx purge_bitfield_addressof_replacements;
3052 /* List of replacements made below in purge_addressof_1 for patterns
3053 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3054 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3055 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3056 enough in complex cases, e.g. when some field values can be
3057 extracted by usage MEM with narrower mode. */
3058 static rtx purge_addressof_replacements;
3060 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3061 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3062 the stack. If the function returns FALSE then the replacement could not
3066 purge_addressof_1 (loc, insn, force, store, ht)
3070 struct hash_table *ht;
3076 boolean result = true;
3078 /* Re-start here to avoid recursion in common cases. */
3085 code = GET_CODE (x);
3087 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
3090 /* We must create a copy of the rtx because it was created by
3091 overwriting a REG rtx which is always shared. */
3092 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3094 if (validate_change (insn, loc, sub, 0)
3095 || validate_replace_rtx (x, sub, insn))
3099 sub = force_operand (sub, NULL_RTX);
3100 if (! validate_change (insn, loc, sub, 0)
3101 && ! validate_replace_rtx (x, sub, insn))
3104 insns = gen_sequence ();
3106 emit_insn_before (insns, insn);
3109 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3111 rtx sub = XEXP (XEXP (x, 0), 0);
3114 if (GET_CODE (sub) == MEM)
3116 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3117 MEM_COPY_ATTRIBUTES (sub2, sub);
3118 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
3122 if (GET_CODE (sub) == REG
3123 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3125 put_addressof_into_stack (XEXP (x, 0), ht);
3128 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3130 int size_x, size_sub;
3134 /* When processing REG_NOTES look at the list of
3135 replacements done on the insn to find the register that X
3139 for (tem = purge_bitfield_addressof_replacements;
3141 tem = XEXP (XEXP (tem, 1), 1))
3142 if (rtx_equal_p (x, XEXP (tem, 0)))
3144 *loc = XEXP (XEXP (tem, 1), 0);
3148 /* See comment for purge_addressof_replacements. */
3149 for (tem = purge_addressof_replacements;
3151 tem = XEXP (XEXP (tem, 1), 1))
3152 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3154 rtx z = XEXP (XEXP (tem, 1), 0);
3156 if (GET_MODE (x) == GET_MODE (z)
3157 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3158 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3161 /* It can happen that the note may speak of things
3162 in a wider (or just different) mode than the
3163 code did. This is especially true of
3166 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3169 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3170 && (GET_MODE_SIZE (GET_MODE (x))
3171 > GET_MODE_SIZE (GET_MODE (z))))
3173 /* This can occur as a result in invalid
3174 pointer casts, e.g. float f; ...
3175 *(long long int *)&f.
3176 ??? We could emit a warning here, but
3177 without a line number that wouldn't be
3179 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3182 z = gen_lowpart (GET_MODE (x), z);
3188 /* Sometimes we may not be able to find the replacement. For
3189 example when the original insn was a MEM in a wider mode,
3190 and the note is part of a sign extension of a narrowed
3191 version of that MEM. Gcc testcase compile/990829-1.c can
3192 generate an example of this siutation. Rather than complain
3193 we return false, which will prompt our caller to remove the
3198 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3199 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3201 /* Don't even consider working with paradoxical subregs,
3202 or the moral equivalent seen here. */
3203 if (size_x <= size_sub
3204 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3206 /* Do a bitfield insertion to mirror what would happen
3213 rtx p = PREV_INSN (insn);
3216 val = gen_reg_rtx (GET_MODE (x));
3217 if (! validate_change (insn, loc, val, 0))
3219 /* Discard the current sequence and put the
3220 ADDRESSOF on stack. */
3224 seq = gen_sequence ();
3226 emit_insn_before (seq, insn);
3227 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3231 store_bit_field (sub, size_x, 0, GET_MODE (x),
3232 val, GET_MODE_SIZE (GET_MODE (sub)),
3233 GET_MODE_SIZE (GET_MODE (sub)));
3235 /* Make sure to unshare any shared rtl that store_bit_field
3236 might have created. */
3237 unshare_all_rtl_again (get_insns ());
3239 seq = gen_sequence ();
3241 p = emit_insn_after (seq, insn);
3242 if (NEXT_INSN (insn))
3243 compute_insns_for_mem (NEXT_INSN (insn),
3244 p ? NEXT_INSN (p) : NULL_RTX,
3249 rtx p = PREV_INSN (insn);
3252 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3253 GET_MODE (x), GET_MODE (x),
3254 GET_MODE_SIZE (GET_MODE (sub)),
3255 GET_MODE_SIZE (GET_MODE (sub)));
3257 if (! validate_change (insn, loc, val, 0))
3259 /* Discard the current sequence and put the
3260 ADDRESSOF on stack. */
3265 seq = gen_sequence ();
3267 emit_insn_before (seq, insn);
3268 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3272 /* Remember the replacement so that the same one can be done
3273 on the REG_NOTES. */
3274 purge_bitfield_addressof_replacements
3275 = gen_rtx_EXPR_LIST (VOIDmode, x,
3278 purge_bitfield_addressof_replacements));
3280 /* We replaced with a reg -- all done. */
3284 else if (validate_change (insn, loc, sub, 0))
3286 /* Remember the replacement so that the same one can be done
3287 on the REG_NOTES. */
3288 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3292 for (tem = purge_addressof_replacements;
3294 tem = XEXP (XEXP (tem, 1), 1))
3295 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3297 XEXP (XEXP (tem, 1), 0) = sub;
3300 purge_addressof_replacements
3301 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3302 gen_rtx_EXPR_LIST (VOIDmode, sub,
3303 purge_addressof_replacements));
3309 /* else give up and put it into the stack */
3311 else if (code == ADDRESSOF)
3313 put_addressof_into_stack (x, ht);
3316 else if (code == SET)
3318 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3319 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3323 /* Scan all subexpressions. */
3324 fmt = GET_RTX_FORMAT (code);
3325 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3328 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3329 else if (*fmt == 'E')
3330 for (j = 0; j < XVECLEN (x, i); j++)
3331 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3337 /* Return a new hash table entry in HT. */
3339 static struct hash_entry *
3340 insns_for_mem_newfunc (he, ht, k)
3341 struct hash_entry *he;
3342 struct hash_table *ht;
3343 hash_table_key k ATTRIBUTE_UNUSED;
3345 struct insns_for_mem_entry *ifmhe;
3349 ifmhe = ((struct insns_for_mem_entry *)
3350 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3351 ifmhe->insns = NULL_RTX;
3356 /* Return a hash value for K, a REG. */
3358 static unsigned long
3359 insns_for_mem_hash (k)
3362 /* K is really a RTX. Just use the address as the hash value. */
3363 return (unsigned long) k;
3366 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3369 insns_for_mem_comp (k1, k2)
3376 struct insns_for_mem_walk_info {
3377 /* The hash table that we are using to record which INSNs use which
3379 struct hash_table *ht;
3381 /* The INSN we are currently proessing. */
3384 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3385 to find the insns that use the REGs in the ADDRESSOFs. */
3389 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3390 that might be used in an ADDRESSOF expression, record this INSN in
3391 the hash table given by DATA (which is really a pointer to an
3392 insns_for_mem_walk_info structure). */
3395 insns_for_mem_walk (r, data)
3399 struct insns_for_mem_walk_info *ifmwi
3400 = (struct insns_for_mem_walk_info *) data;
3402 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3403 && GET_CODE (XEXP (*r, 0)) == REG)
3404 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3405 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3407 /* Lookup this MEM in the hashtable, creating it if necessary. */
3408 struct insns_for_mem_entry *ifme
3409 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3414 /* If we have not already recorded this INSN, do so now. Since
3415 we process the INSNs in order, we know that if we have
3416 recorded it it must be at the front of the list. */
3417 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3419 /* We do the allocation on the same obstack as is used for
3420 the hash table since this memory will not be used once
3421 the hash table is deallocated. */
3422 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3423 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3432 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3433 which REGs in HT. */
3436 compute_insns_for_mem (insns, last_insn, ht)
3439 struct hash_table *ht;
3442 struct insns_for_mem_walk_info ifmwi;
3445 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3446 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3447 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3450 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3454 /* Helper function for purge_addressof called through for_each_rtx.
3455 Returns true iff the rtl is an ADDRESSOF. */
3457 is_addressof (rtl, data)
3459 void * data ATTRIBUTE_UNUSED;
3461 return GET_CODE (* rtl) == ADDRESSOF;
3464 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3465 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3469 purge_addressof (insns)
3473 struct hash_table ht;
3475 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3476 requires a fixup pass over the instruction stream to correct
3477 INSNs that depended on the REG being a REG, and not a MEM. But,
3478 these fixup passes are slow. Furthermore, more MEMs are not
3479 mentioned in very many instructions. So, we speed up the process
3480 by pre-calculating which REGs occur in which INSNs; that allows
3481 us to perform the fixup passes much more quickly. */
3482 hash_table_init (&ht,
3483 insns_for_mem_newfunc,
3485 insns_for_mem_comp);
3486 compute_insns_for_mem (insns, NULL_RTX, &ht);
3488 for (insn = insns; insn; insn = NEXT_INSN (insn))
3489 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3490 || GET_CODE (insn) == CALL_INSN)
3492 if (! purge_addressof_1 (&PATTERN (insn), insn,
3493 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3494 /* If we could not replace the ADDRESSOFs in the insn,
3495 something is wrong. */
3498 if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht))
3500 /* If we could not replace the ADDRESSOFs in the insn's notes,
3501 we can just remove the offending notes instead. */
3504 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3506 /* If we find a REG_RETVAL note then the insn is a libcall.
3507 Such insns must have REG_EQUAL notes as well, in order
3508 for later passes of the compiler to work. So it is not
3509 safe to delete the notes here, and instead we abort. */
3510 if (REG_NOTE_KIND (note) == REG_RETVAL)
3512 if (for_each_rtx (& note, is_addressof, NULL))
3513 remove_note (insn, note);
3519 hash_table_free (&ht);
3520 purge_bitfield_addressof_replacements = 0;
3521 purge_addressof_replacements = 0;
3523 /* REGs are shared. purge_addressof will destructively replace a REG
3524 with a MEM, which creates shared MEMs.
3526 Unfortunately, the children of put_reg_into_stack assume that MEMs
3527 referring to the same stack slot are shared (fixup_var_refs and
3528 the associated hash table code).
3530 So, we have to do another unsharing pass after we have flushed any
3531 REGs that had their address taken into the stack.
3533 It may be worth tracking whether or not we converted any REGs into
3534 MEMs to avoid this overhead when it is not needed. */
3535 unshare_all_rtl_again (get_insns ());
3538 /* Pass through the INSNS of function FNDECL and convert virtual register
3539 references to hard register references. */
3542 instantiate_virtual_regs (fndecl, insns)
3549 /* Compute the offsets to use for this function. */
3550 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3551 var_offset = STARTING_FRAME_OFFSET;
3552 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3553 out_arg_offset = STACK_POINTER_OFFSET;
3554 cfa_offset = ARG_POINTER_CFA_OFFSET;
3556 /* Scan all variables and parameters of this function. For each that is
3557 in memory, instantiate all virtual registers if the result is a valid
3558 address. If not, we do it later. That will handle most uses of virtual
3559 regs on many machines. */
3560 instantiate_decls (fndecl, 1);
3562 /* Initialize recognition, indicating that volatile is OK. */
3565 /* Scan through all the insns, instantiating every virtual register still
3567 for (insn = insns; insn; insn = NEXT_INSN (insn))
3568 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3569 || GET_CODE (insn) == CALL_INSN)
3571 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3572 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
3575 /* Instantiate the stack slots for the parm registers, for later use in
3576 addressof elimination. */
3577 for (i = 0; i < max_parm_reg; ++i)
3578 if (parm_reg_stack_loc[i])
3579 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3581 /* Now instantiate the remaining register equivalences for debugging info.
3582 These will not be valid addresses. */
3583 instantiate_decls (fndecl, 0);
3585 /* Indicate that, from now on, assign_stack_local should use
3586 frame_pointer_rtx. */
3587 virtuals_instantiated = 1;
3590 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3591 all virtual registers in their DECL_RTL's.
3593 If VALID_ONLY, do this only if the resulting address is still valid.
3594 Otherwise, always do it. */
3597 instantiate_decls (fndecl, valid_only)
3603 if (DECL_SAVED_INSNS (fndecl))
3604 /* When compiling an inline function, the obstack used for
3605 rtl allocation is the maybepermanent_obstack. Calling
3606 `resume_temporary_allocation' switches us back to that
3607 obstack while we process this function's parameters. */
3608 resume_temporary_allocation ();
3610 /* Process all parameters of the function. */
3611 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3613 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3615 instantiate_decl (DECL_RTL (decl), size, valid_only);
3617 /* If the parameter was promoted, then the incoming RTL mode may be
3618 larger than the declared type size. We must use the larger of
3620 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3621 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3624 /* Now process all variables defined in the function or its subblocks. */
3625 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3627 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3629 /* Save all rtl allocated for this function by raising the
3630 high-water mark on the maybepermanent_obstack. */
3632 /* All further rtl allocation is now done in the current_obstack. */
3633 rtl_in_current_obstack ();
3637 /* Subroutine of instantiate_decls: Process all decls in the given
3638 BLOCK node and all its subblocks. */
3641 instantiate_decls_1 (let, valid_only)
3647 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3648 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3651 /* Process all subblocks. */
3652 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3653 instantiate_decls_1 (t, valid_only);
3656 /* Subroutine of the preceding procedures: Given RTL representing a
3657 decl and the size of the object, do any instantiation required.
3659 If VALID_ONLY is non-zero, it means that the RTL should only be
3660 changed if the new address is valid. */
3663 instantiate_decl (x, size, valid_only)
3668 enum machine_mode mode;
3671 /* If this is not a MEM, no need to do anything. Similarly if the
3672 address is a constant or a register that is not a virtual register. */
3674 if (x == 0 || GET_CODE (x) != MEM)
3678 if (CONSTANT_P (addr)
3679 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3680 || (GET_CODE (addr) == REG
3681 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3682 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3685 /* If we should only do this if the address is valid, copy the address.
3686 We need to do this so we can undo any changes that might make the
3687 address invalid. This copy is unfortunate, but probably can't be
3691 addr = copy_rtx (addr);
3693 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3697 /* Now verify that the resulting address is valid for every integer or
3698 floating-point mode up to and including SIZE bytes long. We do this
3699 since the object might be accessed in any mode and frame addresses
3702 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3703 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3704 mode = GET_MODE_WIDER_MODE (mode))
3705 if (! memory_address_p (mode, addr))
3708 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3709 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3710 mode = GET_MODE_WIDER_MODE (mode))
3711 if (! memory_address_p (mode, addr))
3715 /* Put back the address now that we have updated it and we either know
3716 it is valid or we don't care whether it is valid. */
3721 /* Given a pointer to a piece of rtx and an optional pointer to the
3722 containing object, instantiate any virtual registers present in it.
3724 If EXTRA_INSNS, we always do the replacement and generate
3725 any extra insns before OBJECT. If it zero, we do nothing if replacement
3728 Return 1 if we either had nothing to do or if we were able to do the
3729 needed replacement. Return 0 otherwise; we only return zero if
3730 EXTRA_INSNS is zero.
3732 We first try some simple transformations to avoid the creation of extra
3736 instantiate_virtual_regs_1 (loc, object, extra_insns)
3744 HOST_WIDE_INT offset = 0;
3750 /* Re-start here to avoid recursion in common cases. */
3757 code = GET_CODE (x);
3759 /* Check for some special cases. */
3776 /* We are allowed to set the virtual registers. This means that
3777 the actual register should receive the source minus the
3778 appropriate offset. This is used, for example, in the handling
3779 of non-local gotos. */
3780 if (SET_DEST (x) == virtual_incoming_args_rtx)
3781 new = arg_pointer_rtx, offset = - in_arg_offset;
3782 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3783 new = frame_pointer_rtx, offset = - var_offset;
3784 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3785 new = stack_pointer_rtx, offset = - dynamic_offset;
3786 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3787 new = stack_pointer_rtx, offset = - out_arg_offset;
3788 else if (SET_DEST (x) == virtual_cfa_rtx)
3789 new = arg_pointer_rtx, offset = - cfa_offset;
3793 /* The only valid sources here are PLUS or REG. Just do
3794 the simplest possible thing to handle them. */
3795 if (GET_CODE (SET_SRC (x)) != REG
3796 && GET_CODE (SET_SRC (x)) != PLUS)
3800 if (GET_CODE (SET_SRC (x)) != REG)
3801 temp = force_operand (SET_SRC (x), NULL_RTX);
3804 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3808 emit_insns_before (seq, object);
3811 if (! validate_change (object, &SET_SRC (x), temp, 0)
3818 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3823 /* Handle special case of virtual register plus constant. */
3824 if (CONSTANT_P (XEXP (x, 1)))
3826 rtx old, new_offset;
3828 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3829 if (GET_CODE (XEXP (x, 0)) == PLUS)
3831 rtx inner = XEXP (XEXP (x, 0), 0);
3833 if (inner == virtual_incoming_args_rtx)
3834 new = arg_pointer_rtx, offset = in_arg_offset;
3835 else if (inner == virtual_stack_vars_rtx)
3836 new = frame_pointer_rtx, offset = var_offset;
3837 else if (inner == virtual_stack_dynamic_rtx)
3838 new = stack_pointer_rtx, offset = dynamic_offset;
3839 else if (inner == virtual_outgoing_args_rtx)
3840 new = stack_pointer_rtx, offset = out_arg_offset;
3841 else if (inner == virtual_cfa_rtx)
3842 new = arg_pointer_rtx, offset = cfa_offset;
3849 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3851 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3854 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3855 new = arg_pointer_rtx, offset = in_arg_offset;
3856 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3857 new = frame_pointer_rtx, offset = var_offset;
3858 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3859 new = stack_pointer_rtx, offset = dynamic_offset;
3860 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3861 new = stack_pointer_rtx, offset = out_arg_offset;
3862 else if (XEXP (x, 0) == virtual_cfa_rtx)
3863 new = arg_pointer_rtx, offset = cfa_offset;
3866 /* We know the second operand is a constant. Unless the
3867 first operand is a REG (which has been already checked),
3868 it needs to be checked. */
3869 if (GET_CODE (XEXP (x, 0)) != REG)
3877 new_offset = plus_constant (XEXP (x, 1), offset);
3879 /* If the new constant is zero, try to replace the sum with just
3881 if (new_offset == const0_rtx
3882 && validate_change (object, loc, new, 0))
3885 /* Next try to replace the register and new offset.
3886 There are two changes to validate here and we can't assume that
3887 in the case of old offset equals new just changing the register
3888 will yield a valid insn. In the interests of a little efficiency,
3889 however, we only call validate change once (we don't queue up the
3890 changes and then call apply_change_group). */
3894 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3895 : (XEXP (x, 0) = new,
3896 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3904 /* Otherwise copy the new constant into a register and replace
3905 constant with that register. */
3906 temp = gen_reg_rtx (Pmode);
3908 if (validate_change (object, &XEXP (x, 1), temp, 0))
3909 emit_insn_before (gen_move_insn (temp, new_offset), object);
3912 /* If that didn't work, replace this expression with a
3913 register containing the sum. */
3916 new = gen_rtx_PLUS (Pmode, new, new_offset);
3919 temp = force_operand (new, NULL_RTX);
3923 emit_insns_before (seq, object);
3924 if (! validate_change (object, loc, temp, 0)
3925 && ! validate_replace_rtx (x, temp, object))
3933 /* Fall through to generic two-operand expression case. */
3939 case DIV: case UDIV:
3940 case MOD: case UMOD:
3941 case AND: case IOR: case XOR:
3942 case ROTATERT: case ROTATE:
3943 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3945 case GE: case GT: case GEU: case GTU:
3946 case LE: case LT: case LEU: case LTU:
3947 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3948 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3953 /* Most cases of MEM that convert to valid addresses have already been
3954 handled by our scan of decls. The only special handling we
3955 need here is to make a copy of the rtx to ensure it isn't being
3956 shared if we have to change it to a pseudo.
3958 If the rtx is a simple reference to an address via a virtual register,
3959 it can potentially be shared. In such cases, first try to make it
3960 a valid address, which can also be shared. Otherwise, copy it and
3963 First check for common cases that need no processing. These are
3964 usually due to instantiation already being done on a previous instance
3968 if (CONSTANT_ADDRESS_P (temp)
3969 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3970 || temp == arg_pointer_rtx
3972 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3973 || temp == hard_frame_pointer_rtx
3975 || temp == frame_pointer_rtx)
3978 if (GET_CODE (temp) == PLUS
3979 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3980 && (XEXP (temp, 0) == frame_pointer_rtx
3981 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3982 || XEXP (temp, 0) == hard_frame_pointer_rtx
3984 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3985 || XEXP (temp, 0) == arg_pointer_rtx
3990 if (temp == virtual_stack_vars_rtx
3991 || temp == virtual_incoming_args_rtx
3992 || (GET_CODE (temp) == PLUS
3993 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3994 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3995 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3997 /* This MEM may be shared. If the substitution can be done without
3998 the need to generate new pseudos, we want to do it in place
3999 so all copies of the shared rtx benefit. The call below will
4000 only make substitutions if the resulting address is still
4003 Note that we cannot pass X as the object in the recursive call
4004 since the insn being processed may not allow all valid
4005 addresses. However, if we were not passed on object, we can
4006 only modify X without copying it if X will have a valid
4009 ??? Also note that this can still lose if OBJECT is an insn that
4010 has less restrictions on an address that some other insn.
4011 In that case, we will modify the shared address. This case
4012 doesn't seem very likely, though. One case where this could
4013 happen is in the case of a USE or CLOBBER reference, but we
4014 take care of that below. */
4016 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4017 object ? object : x, 0))
4020 /* Otherwise make a copy and process that copy. We copy the entire
4021 RTL expression since it might be a PLUS which could also be
4023 *loc = x = copy_rtx (x);
4026 /* Fall through to generic unary operation case. */
4028 case STRICT_LOW_PART:
4030 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4031 case SIGN_EXTEND: case ZERO_EXTEND:
4032 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4033 case FLOAT: case FIX:
4034 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4038 /* These case either have just one operand or we know that we need not
4039 check the rest of the operands. */
4045 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4046 go ahead and make the invalid one, but do it to a copy. For a REG,
4047 just make the recursive call, since there's no chance of a problem. */
4049 if ((GET_CODE (XEXP (x, 0)) == MEM
4050 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4052 || (GET_CODE (XEXP (x, 0)) == REG
4053 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4056 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4061 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4062 in front of this insn and substitute the temporary. */
4063 if (x == virtual_incoming_args_rtx)
4064 new = arg_pointer_rtx, offset = in_arg_offset;
4065 else if (x == virtual_stack_vars_rtx)
4066 new = frame_pointer_rtx, offset = var_offset;
4067 else if (x == virtual_stack_dynamic_rtx)
4068 new = stack_pointer_rtx, offset = dynamic_offset;
4069 else if (x == virtual_outgoing_args_rtx)
4070 new = stack_pointer_rtx, offset = out_arg_offset;
4071 else if (x == virtual_cfa_rtx)
4072 new = arg_pointer_rtx, offset = cfa_offset;
4076 temp = plus_constant (new, offset);
4077 if (!validate_change (object, loc, temp, 0))
4083 temp = force_operand (temp, NULL_RTX);
4087 emit_insns_before (seq, object);
4088 if (! validate_change (object, loc, temp, 0)
4089 && ! validate_replace_rtx (x, temp, object))
4097 if (GET_CODE (XEXP (x, 0)) == REG)
4100 else if (GET_CODE (XEXP (x, 0)) == MEM)
4102 /* If we have a (addressof (mem ..)), do any instantiation inside
4103 since we know we'll be making the inside valid when we finally
4104 remove the ADDRESSOF. */
4105 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4114 /* Scan all subexpressions. */
4115 fmt = GET_RTX_FORMAT (code);
4116 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4119 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4122 else if (*fmt == 'E')
4123 for (j = 0; j < XVECLEN (x, i); j++)
4124 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4131 /* Optimization: assuming this function does not receive nonlocal gotos,
4132 delete the handlers for such, as well as the insns to establish
4133 and disestablish them. */
4139 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4141 /* Delete the handler by turning off the flag that would
4142 prevent jump_optimize from deleting it.
4143 Also permit deletion of the nonlocal labels themselves
4144 if nothing local refers to them. */
4145 if (GET_CODE (insn) == CODE_LABEL)
4149 LABEL_PRESERVE_P (insn) = 0;
4151 /* Remove it from the nonlocal_label list, to avoid confusing
4153 for (t = nonlocal_labels, last_t = 0; t;
4154 last_t = t, t = TREE_CHAIN (t))
4155 if (DECL_RTL (TREE_VALUE (t)) == insn)
4160 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4162 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4165 if (GET_CODE (insn) == INSN)
4169 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4170 if (reg_mentioned_p (t, PATTERN (insn)))
4176 || (nonlocal_goto_stack_level != 0
4177 && reg_mentioned_p (nonlocal_goto_stack_level,
4184 /* Output a USE for any register use in RTL.
4185 This is used with -noreg to mark the extent of lifespan
4186 of any registers used in a user-visible variable's DECL_RTL. */
4192 if (GET_CODE (rtl) == REG)
4193 /* This is a register variable. */
4194 emit_insn (gen_rtx_USE (VOIDmode, rtl));
4195 else if (GET_CODE (rtl) == MEM
4196 && GET_CODE (XEXP (rtl, 0)) == REG
4197 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4198 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4199 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4200 /* This is a variable-sized structure. */
4201 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
4204 /* Like use_variable except that it outputs the USEs after INSN
4205 instead of at the end of the insn-chain. */
4208 use_variable_after (rtl, insn)
4211 if (GET_CODE (rtl) == REG)
4212 /* This is a register variable. */
4213 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
4214 else if (GET_CODE (rtl) == MEM
4215 && GET_CODE (XEXP (rtl, 0)) == REG
4216 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4217 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4218 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4219 /* This is a variable-sized structure. */
4220 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
4226 return max_parm_reg;
4229 /* Return the first insn following those generated by `assign_parms'. */
4232 get_first_nonparm_insn ()
4235 return NEXT_INSN (last_parm_insn);
4236 return get_insns ();
4239 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4240 Crash if there is none. */
4243 get_first_block_beg ()
4245 register rtx searcher;
4246 register rtx insn = get_first_nonparm_insn ();
4248 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4249 if (GET_CODE (searcher) == NOTE
4250 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4253 abort (); /* Invalid call to this function. (See comments above.) */
4257 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4258 This means a type for which function calls must pass an address to the
4259 function or get an address back from the function.
4260 EXP may be a type node or an expression (whose type is tested). */
4263 aggregate_value_p (exp)
4266 int i, regno, nregs;
4269 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
4272 type = TREE_TYPE (exp);
4274 if (RETURN_IN_MEMORY (type))
4276 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4277 and thus can't be returned in registers. */
4278 if (TREE_ADDRESSABLE (type))
4280 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4282 /* Make sure we have suitable call-clobbered regs to return
4283 the value in; if not, we must return it in memory. */
4284 reg = hard_function_value (type, 0);
4286 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4288 if (GET_CODE (reg) != REG)
4291 regno = REGNO (reg);
4292 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4293 for (i = 0; i < nregs; i++)
4294 if (! call_used_regs[regno + i])
4299 /* Assign RTL expressions to the function's parameters.
4300 This may involve copying them into registers and using
4301 those registers as the RTL for them.
4303 If SECOND_TIME is non-zero it means that this function is being
4304 called a second time. This is done by integrate.c when a function's
4305 compilation is deferred. We need to come back here in case the
4306 FUNCTION_ARG macro computes items needed for the rest of the compilation
4307 (such as changing which registers are fixed or caller-saved). But suppress
4308 writing any insns or setting DECL_RTL of anything in this case. */
4311 assign_parms (fndecl, second_time)
4316 register rtx entry_parm = 0;
4317 register rtx stack_parm = 0;
4318 CUMULATIVE_ARGS args_so_far;
4319 enum machine_mode promoted_mode, passed_mode;
4320 enum machine_mode nominal_mode, promoted_nominal_mode;
4322 /* Total space needed so far for args on the stack,
4323 given as a constant and a tree-expression. */
4324 struct args_size stack_args_size;
4325 tree fntype = TREE_TYPE (fndecl);
4326 tree fnargs = DECL_ARGUMENTS (fndecl);
4327 /* This is used for the arg pointer when referring to stack args. */
4328 rtx internal_arg_pointer;
4329 /* This is a dummy PARM_DECL that we used for the function result if
4330 the function returns a structure. */
4331 tree function_result_decl = 0;
4332 #ifdef SETUP_INCOMING_VARARGS
4333 int varargs_setup = 0;
4335 rtx conversion_insns = 0;
4337 /* Nonzero if the last arg is named `__builtin_va_alist',
4338 which is used on some machines for old-fashioned non-ANSI varargs.h;
4339 this should be stuck onto the stack as if it had arrived there. */
4341 = (current_function_varargs
4343 && (parm = tree_last (fnargs)) != 0
4345 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4346 "__builtin_va_alist")));
4348 /* Nonzero if function takes extra anonymous args.
4349 This means the last named arg must be on the stack
4350 right before the anonymous ones. */
4352 = (TYPE_ARG_TYPES (fntype) != 0
4353 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4354 != void_type_node));
4356 current_function_stdarg = stdarg;
4358 /* If the reg that the virtual arg pointer will be translated into is
4359 not a fixed reg or is the stack pointer, make a copy of the virtual
4360 arg pointer, and address parms via the copy. The frame pointer is
4361 considered fixed even though it is not marked as such.
4363 The second time through, simply use ap to avoid generating rtx. */
4365 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4366 || ! (fixed_regs[ARG_POINTER_REGNUM]
4367 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
4369 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4371 internal_arg_pointer = virtual_incoming_args_rtx;
4372 current_function_internal_arg_pointer = internal_arg_pointer;
4374 stack_args_size.constant = 0;
4375 stack_args_size.var = 0;
4377 /* If struct value address is treated as the first argument, make it so. */
4378 if (aggregate_value_p (DECL_RESULT (fndecl))
4379 && ! current_function_returns_pcc_struct
4380 && struct_value_incoming_rtx == 0)
4382 tree type = build_pointer_type (TREE_TYPE (fntype));
4384 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4386 DECL_ARG_TYPE (function_result_decl) = type;
4387 TREE_CHAIN (function_result_decl) = fnargs;
4388 fnargs = function_result_decl;
4391 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4392 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4393 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
4395 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4396 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4398 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4401 /* We haven't yet found an argument that we must push and pretend the
4403 current_function_pretend_args_size = 0;
4405 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4407 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4408 struct args_size stack_offset;
4409 struct args_size arg_size;
4410 int passed_pointer = 0;
4411 int did_conversion = 0;
4412 tree passed_type = DECL_ARG_TYPE (parm);
4413 tree nominal_type = TREE_TYPE (parm);
4416 /* Set LAST_NAMED if this is last named arg before some
4418 int last_named = ((TREE_CHAIN (parm) == 0
4419 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4420 && (stdarg || current_function_varargs));
4421 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4422 most machines, if this is a varargs/stdarg function, then we treat
4423 the last named arg as if it were anonymous too. */
4424 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4426 if (TREE_TYPE (parm) == error_mark_node
4427 /* This can happen after weird syntax errors
4428 or if an enum type is defined among the parms. */
4429 || TREE_CODE (parm) != PARM_DECL
4430 || passed_type == NULL)
4432 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4433 = gen_rtx_MEM (BLKmode, const0_rtx);
4434 TREE_USED (parm) = 1;
4438 /* For varargs.h function, save info about regs and stack space
4439 used by the individual args, not including the va_alist arg. */
4440 if (hide_last_arg && last_named)
4441 current_function_args_info = args_so_far;
4443 /* Find mode of arg as it is passed, and mode of arg
4444 as it should be during execution of this function. */
4445 passed_mode = TYPE_MODE (passed_type);
4446 nominal_mode = TYPE_MODE (nominal_type);
4448 /* If the parm's mode is VOID, its value doesn't matter,
4449 and avoid the usual things like emit_move_insn that could crash. */
4450 if (nominal_mode == VOIDmode)
4452 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4456 /* If the parm is to be passed as a transparent union, use the
4457 type of the first field for the tests below. We have already
4458 verified that the modes are the same. */
4459 if (DECL_TRANSPARENT_UNION (parm)
4460 || TYPE_TRANSPARENT_UNION (passed_type))
4461 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4463 /* See if this arg was passed by invisible reference. It is if
4464 it is an object whose size depends on the contents of the
4465 object itself or if the machine requires these objects be passed
4468 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4469 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4470 || TREE_ADDRESSABLE (passed_type)
4471 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4472 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4473 passed_type, named_arg)
4477 passed_type = nominal_type = build_pointer_type (passed_type);
4479 passed_mode = nominal_mode = Pmode;
4482 promoted_mode = passed_mode;
4484 #ifdef PROMOTE_FUNCTION_ARGS
4485 /* Compute the mode in which the arg is actually extended to. */
4486 unsignedp = TREE_UNSIGNED (passed_type);
4487 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4490 /* Let machine desc say which reg (if any) the parm arrives in.
4491 0 means it arrives on the stack. */
4492 #ifdef FUNCTION_INCOMING_ARG
4493 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4494 passed_type, named_arg);
4496 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4497 passed_type, named_arg);
4500 if (entry_parm == 0)
4501 promoted_mode = passed_mode;
4503 #ifdef SETUP_INCOMING_VARARGS
4504 /* If this is the last named parameter, do any required setup for
4505 varargs or stdargs. We need to know about the case of this being an
4506 addressable type, in which case we skip the registers it
4507 would have arrived in.
4509 For stdargs, LAST_NAMED will be set for two parameters, the one that
4510 is actually the last named, and the dummy parameter. We only
4511 want to do this action once.
4513 Also, indicate when RTL generation is to be suppressed. */
4514 if (last_named && !varargs_setup)
4516 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4517 current_function_pretend_args_size,
4523 /* Determine parm's home in the stack,
4524 in case it arrives in the stack or we should pretend it did.
4526 Compute the stack position and rtx where the argument arrives
4529 There is one complexity here: If this was a parameter that would
4530 have been passed in registers, but wasn't only because it is
4531 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4532 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4533 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4534 0 as it was the previous time. */
4536 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4537 locate_and_pad_parm (promoted_mode, passed_type,
4538 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4541 #ifdef FUNCTION_INCOMING_ARG
4542 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4544 pretend_named) != 0,
4546 FUNCTION_ARG (args_so_far, promoted_mode,
4548 pretend_named) != 0,
4551 fndecl, &stack_args_size, &stack_offset, &arg_size);
4555 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4557 if (offset_rtx == const0_rtx)
4558 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4560 stack_parm = gen_rtx_MEM (promoted_mode,
4561 gen_rtx_PLUS (Pmode,
4562 internal_arg_pointer,
4565 /* If this is a memory ref that contains aggregate components,
4566 mark it as such for cse and loop optimize. Likewise if it
4568 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4569 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4570 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4573 /* If this parameter was passed both in registers and in the stack,
4574 use the copy on the stack. */
4575 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4578 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4579 /* If this parm was passed part in regs and part in memory,
4580 pretend it arrived entirely in memory
4581 by pushing the register-part onto the stack.
4583 In the special case of a DImode or DFmode that is split,
4584 we could put it together in a pseudoreg directly,
4585 but for now that's not worth bothering with. */
4589 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4590 passed_type, named_arg);
4594 current_function_pretend_args_size
4595 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4596 / (PARM_BOUNDARY / BITS_PER_UNIT)
4597 * (PARM_BOUNDARY / BITS_PER_UNIT));
4601 /* Handle calls that pass values in multiple non-contiguous
4602 locations. The Irix 6 ABI has examples of this. */
4603 if (GET_CODE (entry_parm) == PARALLEL)
4604 emit_group_store (validize_mem (stack_parm), entry_parm,
4605 int_size_in_bytes (TREE_TYPE (parm)),
4606 (TYPE_ALIGN (TREE_TYPE (parm))
4609 move_block_from_reg (REGNO (entry_parm),
4610 validize_mem (stack_parm), nregs,
4611 int_size_in_bytes (TREE_TYPE (parm)));
4613 entry_parm = stack_parm;
4618 /* If we didn't decide this parm came in a register,
4619 by default it came on the stack. */
4620 if (entry_parm == 0)
4621 entry_parm = stack_parm;
4623 /* Record permanently how this parm was passed. */
4625 DECL_INCOMING_RTL (parm) = entry_parm;
4627 /* If there is actually space on the stack for this parm,
4628 count it in stack_args_size; otherwise set stack_parm to 0
4629 to indicate there is no preallocated stack slot for the parm. */
4631 if (entry_parm == stack_parm
4632 || (GET_CODE (entry_parm) == PARALLEL
4633 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4634 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4635 /* On some machines, even if a parm value arrives in a register
4636 there is still an (uninitialized) stack slot allocated for it.
4638 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4639 whether this parameter already has a stack slot allocated,
4640 because an arg block exists only if current_function_args_size
4641 is larger than some threshold, and we haven't calculated that
4642 yet. So, for now, we just assume that stack slots never exist
4644 || REG_PARM_STACK_SPACE (fndecl) > 0
4648 stack_args_size.constant += arg_size.constant;
4650 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4653 /* No stack slot was pushed for this parm. */
4656 /* Update info on where next arg arrives in registers. */
4658 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4659 passed_type, named_arg);
4661 /* If this is our second time through, we are done with this parm. */
4665 /* If we can't trust the parm stack slot to be aligned enough
4666 for its ultimate type, don't use that slot after entry.
4667 We'll make another stack slot, if we need one. */
4669 int thisparm_boundary
4670 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4672 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4676 /* If parm was passed in memory, and we need to convert it on entry,
4677 don't store it back in that same slot. */
4679 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4683 /* Now adjust STACK_PARM to the mode and precise location
4684 where this parameter should live during execution,
4685 if we discover that it must live in the stack during execution.
4686 To make debuggers happier on big-endian machines, we store
4687 the value in the last bytes of the space available. */
4689 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4694 if (BYTES_BIG_ENDIAN
4695 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4696 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4697 - GET_MODE_SIZE (nominal_mode));
4699 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4700 if (offset_rtx == const0_rtx)
4701 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4703 stack_parm = gen_rtx_MEM (nominal_mode,
4704 gen_rtx_PLUS (Pmode,
4705 internal_arg_pointer,
4708 /* If this is a memory ref that contains aggregate components,
4709 mark it as such for cse and loop optimize. */
4710 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4715 /* We need this "use" info, because the gcc-register->stack-register
4716 converter in reg-stack.c needs to know which registers are active
4717 at the start of the function call. The actual parameter loading
4718 instructions are not always available then anymore, since they might
4719 have been optimised away. */
4721 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4722 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4725 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4726 in the mode in which it arrives.
4727 STACK_PARM is an RTX for a stack slot where the parameter can live
4728 during the function (in case we want to put it there).
4729 STACK_PARM is 0 if no stack slot was pushed for it.
4731 Now output code if necessary to convert ENTRY_PARM to
4732 the type in which this function declares it,
4733 and store that result in an appropriate place,
4734 which may be a pseudo reg, may be STACK_PARM,
4735 or may be a local stack slot if STACK_PARM is 0.
4737 Set DECL_RTL to that place. */
4739 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4741 /* If a BLKmode arrives in registers, copy it to a stack slot.
4742 Handle calls that pass values in multiple non-contiguous
4743 locations. The Irix 6 ABI has examples of this. */
4744 if (GET_CODE (entry_parm) == REG
4745 || GET_CODE (entry_parm) == PARALLEL)
4748 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4751 /* Note that we will be storing an integral number of words.
4752 So we have to be careful to ensure that we allocate an
4753 integral number of words. We do this below in the
4754 assign_stack_local if space was not allocated in the argument
4755 list. If it was, this will not work if PARM_BOUNDARY is not
4756 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4757 if it becomes a problem. */
4759 if (stack_parm == 0)
4762 = assign_stack_local (GET_MODE (entry_parm),
4765 /* If this is a memory ref that contains aggregate
4766 components, mark it as such for cse and loop optimize. */
4767 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4770 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4773 if (TREE_READONLY (parm))
4774 RTX_UNCHANGING_P (stack_parm) = 1;
4776 /* Handle calls that pass values in multiple non-contiguous
4777 locations. The Irix 6 ABI has examples of this. */
4778 if (GET_CODE (entry_parm) == PARALLEL)
4779 emit_group_store (validize_mem (stack_parm), entry_parm,
4780 int_size_in_bytes (TREE_TYPE (parm)),
4781 (TYPE_ALIGN (TREE_TYPE (parm))
4784 move_block_from_reg (REGNO (entry_parm),
4785 validize_mem (stack_parm),
4786 size_stored / UNITS_PER_WORD,
4787 int_size_in_bytes (TREE_TYPE (parm)));
4789 DECL_RTL (parm) = stack_parm;
4791 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4792 && ! DECL_INLINE (fndecl))
4793 /* layout_decl may set this. */
4794 || TREE_ADDRESSABLE (parm)
4795 || TREE_SIDE_EFFECTS (parm)
4796 /* If -ffloat-store specified, don't put explicit
4797 float variables into registers. */
4798 || (flag_float_store
4799 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4800 /* Always assign pseudo to structure return or item passed
4801 by invisible reference. */
4802 || passed_pointer || parm == function_result_decl)
4804 /* Store the parm in a pseudoregister during the function, but we
4805 may need to do it in a wider mode. */
4807 register rtx parmreg;
4808 int regno, regnoi = 0, regnor = 0;
4810 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4812 promoted_nominal_mode
4813 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4815 parmreg = gen_reg_rtx (promoted_nominal_mode);
4816 mark_user_reg (parmreg);
4818 /* If this was an item that we received a pointer to, set DECL_RTL
4823 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4824 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4827 DECL_RTL (parm) = parmreg;
4829 /* Copy the value into the register. */
4830 if (nominal_mode != passed_mode
4831 || promoted_nominal_mode != promoted_mode)
4834 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4835 mode, by the caller. We now have to convert it to
4836 NOMINAL_MODE, if different. However, PARMREG may be in
4837 a different mode than NOMINAL_MODE if it is being stored
4840 If ENTRY_PARM is a hard register, it might be in a register
4841 not valid for operating in its mode (e.g., an odd-numbered
4842 register for a DFmode). In that case, moves are the only
4843 thing valid, so we can't do a convert from there. This
4844 occurs when the calling sequence allow such misaligned
4847 In addition, the conversion may involve a call, which could
4848 clobber parameters which haven't been copied to pseudo
4849 registers yet. Therefore, we must first copy the parm to
4850 a pseudo reg here, and save the conversion until after all
4851 parameters have been moved. */
4853 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4855 emit_move_insn (tempreg, validize_mem (entry_parm));
4857 push_to_sequence (conversion_insns);
4858 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4860 /* TREE_USED gets set erroneously during expand_assignment. */
4861 save_tree_used = TREE_USED (parm);
4862 expand_assignment (parm,
4863 make_tree (nominal_type, tempreg), 0, 0);
4864 TREE_USED (parm) = save_tree_used;
4865 conversion_insns = get_insns ();
4870 emit_move_insn (parmreg, validize_mem (entry_parm));
4872 /* If we were passed a pointer but the actual value
4873 can safely live in a register, put it in one. */
4874 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4875 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4876 && ! DECL_INLINE (fndecl))
4877 /* layout_decl may set this. */
4878 || TREE_ADDRESSABLE (parm)
4879 || TREE_SIDE_EFFECTS (parm)
4880 /* If -ffloat-store specified, don't put explicit
4881 float variables into registers. */
4882 || (flag_float_store
4883 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4885 /* We can't use nominal_mode, because it will have been set to
4886 Pmode above. We must use the actual mode of the parm. */
4887 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4888 mark_user_reg (parmreg);
4889 emit_move_insn (parmreg, DECL_RTL (parm));
4890 DECL_RTL (parm) = parmreg;
4891 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4895 #ifdef FUNCTION_ARG_CALLEE_COPIES
4896 /* If we are passed an arg by reference and it is our responsibility
4897 to make a copy, do it now.
4898 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4899 original argument, so we must recreate them in the call to
4900 FUNCTION_ARG_CALLEE_COPIES. */
4901 /* ??? Later add code to handle the case that if the argument isn't
4902 modified, don't do the copy. */
4904 else if (passed_pointer
4905 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4906 TYPE_MODE (DECL_ARG_TYPE (parm)),
4907 DECL_ARG_TYPE (parm),
4909 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4912 tree type = DECL_ARG_TYPE (parm);
4914 /* This sequence may involve a library call perhaps clobbering
4915 registers that haven't been copied to pseudos yet. */
4917 push_to_sequence (conversion_insns);
4919 if (TYPE_SIZE (type) == 0
4920 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4921 /* This is a variable sized object. */
4922 copy = gen_rtx_MEM (BLKmode,
4923 allocate_dynamic_stack_space
4924 (expr_size (parm), NULL_RTX,
4925 TYPE_ALIGN (type)));
4927 copy = assign_stack_temp (TYPE_MODE (type),
4928 int_size_in_bytes (type), 1);
4929 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4930 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4932 store_expr (parm, copy, 0);
4933 emit_move_insn (parmreg, XEXP (copy, 0));
4934 if (current_function_check_memory_usage)
4935 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4936 XEXP (copy, 0), Pmode,
4937 GEN_INT (int_size_in_bytes (type)),
4938 TYPE_MODE (sizetype),
4939 GEN_INT (MEMORY_USE_RW),
4940 TYPE_MODE (integer_type_node));
4941 conversion_insns = get_insns ();
4945 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4947 /* In any case, record the parm's desired stack location
4948 in case we later discover it must live in the stack.
4950 If it is a COMPLEX value, store the stack location for both
4953 if (GET_CODE (parmreg) == CONCAT)
4954 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4956 regno = REGNO (parmreg);
4958 if (regno >= max_parm_reg)
4961 int old_max_parm_reg = max_parm_reg;
4963 /* It's slow to expand this one register at a time,
4964 but it's also rare and we need max_parm_reg to be
4965 precisely correct. */
4966 max_parm_reg = regno + 1;
4967 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4968 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4969 old_max_parm_reg * sizeof (rtx));
4970 bzero ((char *) (new + old_max_parm_reg),
4971 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4972 parm_reg_stack_loc = new;
4975 if (GET_CODE (parmreg) == CONCAT)
4977 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4979 regnor = REGNO (gen_realpart (submode, parmreg));
4980 regnoi = REGNO (gen_imagpart (submode, parmreg));
4982 if (stack_parm != 0)
4984 parm_reg_stack_loc[regnor]
4985 = gen_realpart (submode, stack_parm);
4986 parm_reg_stack_loc[regnoi]
4987 = gen_imagpart (submode, stack_parm);
4991 parm_reg_stack_loc[regnor] = 0;
4992 parm_reg_stack_loc[regnoi] = 0;
4996 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4998 /* Mark the register as eliminable if we did no conversion
4999 and it was copied from memory at a fixed offset,
5000 and the arg pointer was not copied to a pseudo-reg.
5001 If the arg pointer is a pseudo reg or the offset formed
5002 an invalid address, such memory-equivalences
5003 as we make here would screw up life analysis for it. */
5004 if (nominal_mode == passed_mode
5007 && GET_CODE (stack_parm) == MEM
5008 && stack_offset.var == 0
5009 && reg_mentioned_p (virtual_incoming_args_rtx,
5010 XEXP (stack_parm, 0)))
5012 rtx linsn = get_last_insn ();
5015 /* Mark complex types separately. */
5016 if (GET_CODE (parmreg) == CONCAT)
5017 /* Scan backwards for the set of the real and
5019 for (sinsn = linsn; sinsn != 0;
5020 sinsn = prev_nonnote_insn (sinsn))
5022 set = single_set (sinsn);
5024 && SET_DEST (set) == regno_reg_rtx [regnoi])
5026 = gen_rtx_EXPR_LIST (REG_EQUIV,
5027 parm_reg_stack_loc[regnoi],
5030 && SET_DEST (set) == regno_reg_rtx [regnor])
5032 = gen_rtx_EXPR_LIST (REG_EQUIV,
5033 parm_reg_stack_loc[regnor],
5036 else if ((set = single_set (linsn)) != 0
5037 && SET_DEST (set) == parmreg)
5039 = gen_rtx_EXPR_LIST (REG_EQUIV,
5040 stack_parm, REG_NOTES (linsn));
5043 /* For pointer data type, suggest pointer register. */
5044 if (POINTER_TYPE_P (TREE_TYPE (parm)))
5045 mark_reg_pointer (parmreg,
5046 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
5051 /* Value must be stored in the stack slot STACK_PARM
5052 during function execution. */
5054 if (promoted_mode != nominal_mode)
5056 /* Conversion is required. */
5057 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5059 emit_move_insn (tempreg, validize_mem (entry_parm));
5061 push_to_sequence (conversion_insns);
5062 entry_parm = convert_to_mode (nominal_mode, tempreg,
5063 TREE_UNSIGNED (TREE_TYPE (parm)));
5066 /* ??? This may need a big-endian conversion on sparc64. */
5067 stack_parm = change_address (stack_parm, nominal_mode,
5070 conversion_insns = get_insns ();
5075 if (entry_parm != stack_parm)
5077 if (stack_parm == 0)
5080 = assign_stack_local (GET_MODE (entry_parm),
5081 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5082 /* If this is a memory ref that contains aggregate components,
5083 mark it as such for cse and loop optimize. */
5084 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
5087 if (promoted_mode != nominal_mode)
5089 push_to_sequence (conversion_insns);
5090 emit_move_insn (validize_mem (stack_parm),
5091 validize_mem (entry_parm));
5092 conversion_insns = get_insns ();
5096 emit_move_insn (validize_mem (stack_parm),
5097 validize_mem (entry_parm));
5099 if (current_function_check_memory_usage)
5101 push_to_sequence (conversion_insns);
5102 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
5103 XEXP (stack_parm, 0), Pmode,
5104 GEN_INT (GET_MODE_SIZE (GET_MODE
5106 TYPE_MODE (sizetype),
5107 GEN_INT (MEMORY_USE_RW),
5108 TYPE_MODE (integer_type_node));
5110 conversion_insns = get_insns ();
5113 DECL_RTL (parm) = stack_parm;
5116 /* If this "parameter" was the place where we are receiving the
5117 function's incoming structure pointer, set up the result. */
5118 if (parm == function_result_decl)
5120 tree result = DECL_RESULT (fndecl);
5121 tree restype = TREE_TYPE (result);
5124 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
5126 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
5127 AGGREGATE_TYPE_P (restype));
5130 if (TREE_THIS_VOLATILE (parm))
5131 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
5132 if (TREE_READONLY (parm))
5133 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
5136 /* Output all parameter conversion instructions (possibly including calls)
5137 now that all parameters have been copied out of hard registers. */
5138 emit_insns (conversion_insns);
5140 last_parm_insn = get_last_insn ();
5142 current_function_args_size = stack_args_size.constant;
5144 /* Adjust function incoming argument size for alignment and
5147 #ifdef REG_PARM_STACK_SPACE
5148 #ifndef MAYBE_REG_PARM_STACK_SPACE
5149 current_function_args_size = MAX (current_function_args_size,
5150 REG_PARM_STACK_SPACE (fndecl));
5154 #ifdef STACK_BOUNDARY
5155 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5157 current_function_args_size
5158 = ((current_function_args_size + STACK_BYTES - 1)
5159 / STACK_BYTES) * STACK_BYTES;
5162 #ifdef ARGS_GROW_DOWNWARD
5163 current_function_arg_offset_rtx
5164 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5165 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
5166 size_int (-stack_args_size.constant)),
5167 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5169 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5172 /* See how many bytes, if any, of its args a function should try to pop
5175 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5176 current_function_args_size);
5178 /* For stdarg.h function, save info about
5179 regs and stack space used by the named args. */
5182 current_function_args_info = args_so_far;
5184 /* Set the rtx used for the function return value. Put this in its
5185 own variable so any optimizers that need this information don't have
5186 to include tree.h. Do this here so it gets done when an inlined
5187 function gets output. */
5189 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
5192 /* Indicate whether REGNO is an incoming argument to the current function
5193 that was promoted to a wider mode. If so, return the RTX for the
5194 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5195 that REGNO is promoted from and whether the promotion was signed or
5198 #ifdef PROMOTE_FUNCTION_ARGS
5201 promoted_input_arg (regno, pmode, punsignedp)
5203 enum machine_mode *pmode;
5208 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5209 arg = TREE_CHAIN (arg))
5210 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5211 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5212 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5214 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5215 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5217 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5218 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5219 && mode != DECL_MODE (arg))
5221 *pmode = DECL_MODE (arg);
5222 *punsignedp = unsignedp;
5223 return DECL_INCOMING_RTL (arg);
5232 /* Compute the size and offset from the start of the stacked arguments for a
5233 parm passed in mode PASSED_MODE and with type TYPE.
5235 INITIAL_OFFSET_PTR points to the current offset into the stacked
5238 The starting offset and size for this parm are returned in *OFFSET_PTR
5239 and *ARG_SIZE_PTR, respectively.
5241 IN_REGS is non-zero if the argument will be passed in registers. It will
5242 never be set if REG_PARM_STACK_SPACE is not defined.
5244 FNDECL is the function in which the argument was defined.
5246 There are two types of rounding that are done. The first, controlled by
5247 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5248 list to be aligned to the specific boundary (in bits). This rounding
5249 affects the initial and starting offsets, but not the argument size.
5251 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5252 optionally rounds the size of the parm to PARM_BOUNDARY. The
5253 initial offset is not affected by this rounding, while the size always
5254 is and the starting offset may be. */
5256 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5257 initial_offset_ptr is positive because locate_and_pad_parm's
5258 callers pass in the total size of args so far as
5259 initial_offset_ptr. arg_size_ptr is always positive.*/
5262 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5263 initial_offset_ptr, offset_ptr, arg_size_ptr)
5264 enum machine_mode passed_mode;
5267 tree fndecl ATTRIBUTE_UNUSED;
5268 struct args_size *initial_offset_ptr;
5269 struct args_size *offset_ptr;
5270 struct args_size *arg_size_ptr;
5273 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5274 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5275 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5277 #ifdef REG_PARM_STACK_SPACE
5278 /* If we have found a stack parm before we reach the end of the
5279 area reserved for registers, skip that area. */
5282 int reg_parm_stack_space = 0;
5284 #ifdef MAYBE_REG_PARM_STACK_SPACE
5285 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5287 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5289 if (reg_parm_stack_space > 0)
5291 if (initial_offset_ptr->var)
5293 initial_offset_ptr->var
5294 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5295 size_int (reg_parm_stack_space));
5296 initial_offset_ptr->constant = 0;
5298 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5299 initial_offset_ptr->constant = reg_parm_stack_space;
5302 #endif /* REG_PARM_STACK_SPACE */
5304 arg_size_ptr->var = 0;
5305 arg_size_ptr->constant = 0;
5307 #ifdef ARGS_GROW_DOWNWARD
5308 if (initial_offset_ptr->var)
5310 offset_ptr->constant = 0;
5311 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
5312 initial_offset_ptr->var);
5316 offset_ptr->constant = - initial_offset_ptr->constant;
5317 offset_ptr->var = 0;
5319 if (where_pad != none
5320 && (TREE_CODE (sizetree) != INTEGER_CST
5321 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5322 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5323 SUB_PARM_SIZE (*offset_ptr, sizetree);
5324 if (where_pad != downward)
5325 pad_to_arg_alignment (offset_ptr, boundary);
5326 if (initial_offset_ptr->var)
5328 arg_size_ptr->var = size_binop (MINUS_EXPR,
5329 size_binop (MINUS_EXPR,
5331 initial_offset_ptr->var),
5336 arg_size_ptr->constant = (- initial_offset_ptr->constant
5337 - offset_ptr->constant);
5339 #else /* !ARGS_GROW_DOWNWARD */
5341 #ifdef REG_PARM_STACK_SPACE
5342 || REG_PARM_STACK_SPACE (fndecl) > 0
5344 /* For the gcc-2_95-branch we want to make sure not to break something
5345 on platforms which pass argument in registers but don't define
5346 REG_PARM_STACK_SPACE. So we force the original behaviour here. */
5350 pad_to_arg_alignment (initial_offset_ptr, boundary);
5352 *offset_ptr = *initial_offset_ptr;
5354 #ifdef PUSH_ROUNDING
5355 if (passed_mode != BLKmode)
5356 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5359 /* Pad_below needs the pre-rounded size to know how much to pad below
5360 so this must be done before rounding up. */
5361 if (where_pad == downward
5362 /* However, BLKmode args passed in regs have their padding done elsewhere.
5363 The stack slot must be able to hold the entire register. */
5364 && !(in_regs && passed_mode == BLKmode))
5365 pad_below (offset_ptr, passed_mode, sizetree);
5367 if (where_pad != none
5368 && (TREE_CODE (sizetree) != INTEGER_CST
5369 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5370 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5372 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5373 #endif /* ARGS_GROW_DOWNWARD */
5376 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5377 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5380 pad_to_arg_alignment (offset_ptr, boundary)
5381 struct args_size *offset_ptr;
5384 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5386 if (boundary > BITS_PER_UNIT)
5388 if (offset_ptr->var)
5391 #ifdef ARGS_GROW_DOWNWARD
5396 (ARGS_SIZE_TREE (*offset_ptr),
5397 boundary / BITS_PER_UNIT);
5398 offset_ptr->constant = 0; /*?*/
5401 offset_ptr->constant =
5402 #ifdef ARGS_GROW_DOWNWARD
5403 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5405 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5410 #ifndef ARGS_GROW_DOWNWARD
5412 pad_below (offset_ptr, passed_mode, sizetree)
5413 struct args_size *offset_ptr;
5414 enum machine_mode passed_mode;
5417 if (passed_mode != BLKmode)
5419 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5420 offset_ptr->constant
5421 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5422 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5423 - GET_MODE_SIZE (passed_mode));
5427 if (TREE_CODE (sizetree) != INTEGER_CST
5428 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5430 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5431 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5433 ADD_PARM_SIZE (*offset_ptr, s2);
5434 SUB_PARM_SIZE (*offset_ptr, sizetree);
5440 #ifdef ARGS_GROW_DOWNWARD
5442 round_down (value, divisor)
5446 return size_binop (MULT_EXPR,
5447 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5448 size_int (divisor));
5452 /* Walk the tree of blocks describing the binding levels within a function
5453 and warn about uninitialized variables.
5454 This is done after calling flow_analysis and before global_alloc
5455 clobbers the pseudo-regs to hard regs. */
5458 uninitialized_vars_warning (block)
5461 register tree decl, sub;
5462 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5464 if (TREE_CODE (decl) == VAR_DECL
5465 /* These warnings are unreliable for and aggregates
5466 because assigning the fields one by one can fail to convince
5467 flow.c that the entire aggregate was initialized.
5468 Unions are troublesome because members may be shorter. */
5469 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5470 && DECL_RTL (decl) != 0
5471 && GET_CODE (DECL_RTL (decl)) == REG
5472 /* Global optimizations can make it difficult to determine if a
5473 particular variable has been initialized. However, a VAR_DECL
5474 with a nonzero DECL_INITIAL had an initializer, so do not
5475 claim it is potentially uninitialized.
5477 We do not care about the actual value in DECL_INITIAL, so we do
5478 not worry that it may be a dangling pointer. */
5479 && DECL_INITIAL (decl) == NULL_TREE
5480 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5481 warning_with_decl (decl,
5482 "`%s' might be used uninitialized in this function");
5483 if (TREE_CODE (decl) == VAR_DECL
5484 && DECL_RTL (decl) != 0
5485 && GET_CODE (DECL_RTL (decl)) == REG
5486 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5487 warning_with_decl (decl,
5488 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5490 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5491 uninitialized_vars_warning (sub);
5494 /* Do the appropriate part of uninitialized_vars_warning
5495 but for arguments instead of local variables. */
5498 setjmp_args_warning ()
5501 for (decl = DECL_ARGUMENTS (current_function_decl);
5502 decl; decl = TREE_CHAIN (decl))
5503 if (DECL_RTL (decl) != 0
5504 && GET_CODE (DECL_RTL (decl)) == REG
5505 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5506 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5509 /* If this function call setjmp, put all vars into the stack
5510 unless they were declared `register'. */
5513 setjmp_protect (block)
5516 register tree decl, sub;
5517 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5518 if ((TREE_CODE (decl) == VAR_DECL
5519 || TREE_CODE (decl) == PARM_DECL)
5520 && DECL_RTL (decl) != 0
5521 && (GET_CODE (DECL_RTL (decl)) == REG
5522 || (GET_CODE (DECL_RTL (decl)) == MEM
5523 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5524 /* If this variable came from an inline function, it must be
5525 that its life doesn't overlap the setjmp. If there was a
5526 setjmp in the function, it would already be in memory. We
5527 must exclude such variable because their DECL_RTL might be
5528 set to strange things such as virtual_stack_vars_rtx. */
5529 && ! DECL_FROM_INLINE (decl)
5531 #ifdef NON_SAVING_SETJMP
5532 /* If longjmp doesn't restore the registers,
5533 don't put anything in them. */
5537 ! DECL_REGISTER (decl)))
5538 put_var_into_stack (decl);
5539 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5540 setjmp_protect (sub);
5543 /* Like the previous function, but for args instead of local variables. */
5546 setjmp_protect_args ()
5549 for (decl = DECL_ARGUMENTS (current_function_decl);
5550 decl; decl = TREE_CHAIN (decl))
5551 if ((TREE_CODE (decl) == VAR_DECL
5552 || TREE_CODE (decl) == PARM_DECL)
5553 && DECL_RTL (decl) != 0
5554 && (GET_CODE (DECL_RTL (decl)) == REG
5555 || (GET_CODE (DECL_RTL (decl)) == MEM
5556 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5558 /* If longjmp doesn't restore the registers,
5559 don't put anything in them. */
5560 #ifdef NON_SAVING_SETJMP
5564 ! DECL_REGISTER (decl)))
5565 put_var_into_stack (decl);
5568 /* Return the context-pointer register corresponding to DECL,
5569 or 0 if it does not need one. */
5572 lookup_static_chain (decl)
5575 tree context = decl_function_context (decl);
5579 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5582 /* We treat inline_function_decl as an alias for the current function
5583 because that is the inline function whose vars, types, etc.
5584 are being merged into the current function.
5585 See expand_inline_function. */
5586 if (context == current_function_decl || context == inline_function_decl)
5587 return virtual_stack_vars_rtx;
5589 for (link = context_display; link; link = TREE_CHAIN (link))
5590 if (TREE_PURPOSE (link) == context)
5591 return RTL_EXPR_RTL (TREE_VALUE (link));
5596 /* Convert a stack slot address ADDR for variable VAR
5597 (from a containing function)
5598 into an address valid in this function (using a static chain). */
5601 fix_lexical_addr (addr, var)
5606 HOST_WIDE_INT displacement;
5607 tree context = decl_function_context (var);
5608 struct function *fp;
5611 /* If this is the present function, we need not do anything. */
5612 if (context == current_function_decl || context == inline_function_decl)
5615 for (fp = outer_function_chain; fp; fp = fp->next)
5616 if (fp->decl == context)
5622 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5623 addr = XEXP (XEXP (addr, 0), 0);
5625 /* Decode given address as base reg plus displacement. */
5626 if (GET_CODE (addr) == REG)
5627 basereg = addr, displacement = 0;
5628 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5629 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5633 /* We accept vars reached via the containing function's
5634 incoming arg pointer and via its stack variables pointer. */
5635 if (basereg == fp->internal_arg_pointer)
5637 /* If reached via arg pointer, get the arg pointer value
5638 out of that function's stack frame.
5640 There are two cases: If a separate ap is needed, allocate a
5641 slot in the outer function for it and dereference it that way.
5642 This is correct even if the real ap is actually a pseudo.
5643 Otherwise, just adjust the offset from the frame pointer to
5646 #ifdef NEED_SEPARATE_AP
5649 if (fp->arg_pointer_save_area == 0)
5650 fp->arg_pointer_save_area
5651 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5653 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5654 addr = memory_address (Pmode, addr);
5656 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5658 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5659 base = lookup_static_chain (var);
5663 else if (basereg == virtual_stack_vars_rtx)
5665 /* This is the same code as lookup_static_chain, duplicated here to
5666 avoid an extra call to decl_function_context. */
5669 for (link = context_display; link; link = TREE_CHAIN (link))
5670 if (TREE_PURPOSE (link) == context)
5672 base = RTL_EXPR_RTL (TREE_VALUE (link));
5680 /* Use same offset, relative to appropriate static chain or argument
5682 return plus_constant (base, displacement);
5685 /* Return the address of the trampoline for entering nested fn FUNCTION.
5686 If necessary, allocate a trampoline (in the stack frame)
5687 and emit rtl to initialize its contents (at entry to this function). */
5690 trampoline_address (function)
5696 struct function *fp;
5699 /* Find an existing trampoline and return it. */
5700 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5701 if (TREE_PURPOSE (link) == function)
5703 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5705 for (fp = outer_function_chain; fp; fp = fp->next)
5706 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5707 if (TREE_PURPOSE (link) == function)
5709 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5711 return round_trampoline_addr (tramp);
5714 /* None exists; we must make one. */
5716 /* Find the `struct function' for the function containing FUNCTION. */
5718 fn_context = decl_function_context (function);
5719 if (fn_context != current_function_decl
5720 && fn_context != inline_function_decl)
5721 for (fp = outer_function_chain; fp; fp = fp->next)
5722 if (fp->decl == fn_context)
5725 /* Allocate run-time space for this trampoline
5726 (usually in the defining function's stack frame). */
5727 #ifdef ALLOCATE_TRAMPOLINE
5728 tramp = ALLOCATE_TRAMPOLINE (fp);
5730 /* If rounding needed, allocate extra space
5731 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5732 #ifdef TRAMPOLINE_ALIGNMENT
5733 #define TRAMPOLINE_REAL_SIZE \
5734 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5736 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5739 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5741 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5744 /* Record the trampoline for reuse and note it for later initialization
5745 by expand_function_end. */
5748 push_obstacks (fp->function_maybepermanent_obstack,
5749 fp->function_maybepermanent_obstack);
5750 rtlexp = make_node (RTL_EXPR);
5751 RTL_EXPR_RTL (rtlexp) = tramp;
5752 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5757 /* Make the RTL_EXPR node temporary, not momentary, so that the
5758 trampoline_list doesn't become garbage. */
5759 int momentary = suspend_momentary ();
5760 rtlexp = make_node (RTL_EXPR);
5761 resume_momentary (momentary);
5763 RTL_EXPR_RTL (rtlexp) = tramp;
5764 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5767 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5768 return round_trampoline_addr (tramp);
5771 /* Given a trampoline address,
5772 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5775 round_trampoline_addr (tramp)
5778 #ifdef TRAMPOLINE_ALIGNMENT
5779 /* Round address up to desired boundary. */
5780 rtx temp = gen_reg_rtx (Pmode);
5781 temp = expand_binop (Pmode, add_optab, tramp,
5782 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5783 temp, 0, OPTAB_LIB_WIDEN);
5784 tramp = expand_binop (Pmode, and_optab, temp,
5785 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5786 temp, 0, OPTAB_LIB_WIDEN);
5791 /* The functions identify_blocks and reorder_blocks provide a way to
5792 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5793 duplicate portions of the RTL code. Call identify_blocks before
5794 changing the RTL, and call reorder_blocks after. */
5796 /* Put all this function's BLOCK nodes including those that are chained
5797 onto the first block into a vector, and return it.
5798 Also store in each NOTE for the beginning or end of a block
5799 the index of that block in the vector.
5800 The arguments are BLOCK, the chain of top-level blocks of the function,
5801 and INSNS, the insn chain of the function. */
5804 identify_blocks (block, insns)
5812 int next_block_number = 1;
5813 int current_block_number = 1;
5819 n_blocks = all_blocks (block, 0);
5820 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5821 block_stack = (int *) alloca (n_blocks * sizeof (int));
5823 all_blocks (block, block_vector);
5825 for (insn = insns; insn; insn = NEXT_INSN (insn))
5826 if (GET_CODE (insn) == NOTE)
5828 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5830 block_stack[depth++] = current_block_number;
5831 current_block_number = next_block_number;
5832 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5834 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5836 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5837 current_block_number = block_stack[--depth];
5841 if (n_blocks != next_block_number)
5844 return block_vector;
5847 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5848 and a revised instruction chain, rebuild the tree structure
5849 of BLOCK nodes to correspond to the new order of RTL.
5850 The new block tree is inserted below TOP_BLOCK.
5851 Returns the current top-level block. */
5854 reorder_blocks (block_vector, block, insns)
5859 tree current_block = block;
5862 if (block_vector == 0)
5865 /* Prune the old trees away, so that it doesn't get in the way. */
5866 BLOCK_SUBBLOCKS (current_block) = 0;
5867 BLOCK_CHAIN (current_block) = 0;
5869 for (insn = insns; insn; insn = NEXT_INSN (insn))
5870 if (GET_CODE (insn) == NOTE)
5872 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5874 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5875 /* If we have seen this block before, copy it. */
5876 if (TREE_ASM_WRITTEN (block))
5877 block = copy_node (block);
5878 BLOCK_SUBBLOCKS (block) = 0;
5879 TREE_ASM_WRITTEN (block) = 1;
5880 BLOCK_SUPERCONTEXT (block) = current_block;
5881 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5882 BLOCK_SUBBLOCKS (current_block) = block;
5883 current_block = block;
5884 NOTE_SOURCE_FILE (insn) = 0;
5886 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5888 BLOCK_SUBBLOCKS (current_block)
5889 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5890 current_block = BLOCK_SUPERCONTEXT (current_block);
5891 NOTE_SOURCE_FILE (insn) = 0;
5895 BLOCK_SUBBLOCKS (current_block)
5896 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5897 return current_block;
5900 /* Reverse the order of elements in the chain T of blocks,
5901 and return the new head of the chain (old last element). */
5907 register tree prev = 0, decl, next;
5908 for (decl = t; decl; decl = next)
5910 next = BLOCK_CHAIN (decl);
5911 BLOCK_CHAIN (decl) = prev;
5917 /* Count the subblocks of the list starting with BLOCK, and list them
5918 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5922 all_blocks (block, vector)
5930 TREE_ASM_WRITTEN (block) = 0;
5932 /* Record this block. */
5934 vector[n_blocks] = block;
5938 /* Record the subblocks, and their subblocks... */
5939 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5940 vector ? vector + n_blocks : 0);
5941 block = BLOCK_CHAIN (block);
5947 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5948 and initialize static variables for generating RTL for the statements
5952 init_function_start (subr, filename, line)
5957 init_stmt_for_function ();
5959 cse_not_expected = ! optimize;
5961 /* Caller save not needed yet. */
5962 caller_save_needed = 0;
5964 /* No stack slots have been made yet. */
5965 stack_slot_list = 0;
5967 /* There is no stack slot for handling nonlocal gotos. */
5968 nonlocal_goto_handler_slots = 0;
5969 nonlocal_goto_stack_level = 0;
5971 /* No labels have been declared for nonlocal use. */
5972 nonlocal_labels = 0;
5973 nonlocal_goto_handler_labels = 0;
5975 /* No function calls so far in this function. */
5976 function_call_count = 0;
5978 /* No parm regs have been allocated.
5979 (This is important for output_inline_function.) */
5980 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5982 /* Initialize the RTL mechanism. */
5985 /* Initialize the queue of pending postincrement and postdecrements,
5986 and some other info in expr.c. */
5989 /* We haven't done register allocation yet. */
5992 init_const_rtx_hash_table ();
5994 current_function_name = (*decl_printable_name) (subr, 2);
5996 /* Nonzero if this is a nested function that uses a static chain. */
5998 current_function_needs_context
5999 = (decl_function_context (current_function_decl) != 0
6000 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6002 /* Set if a call to setjmp is seen. */
6003 current_function_calls_setjmp = 0;
6005 /* Set if a call to longjmp is seen. */
6006 current_function_calls_longjmp = 0;
6008 current_function_calls_alloca = 0;
6009 current_function_has_nonlocal_label = 0;
6010 current_function_has_nonlocal_goto = 0;
6011 current_function_contains_functions = 0;
6012 current_function_is_leaf = 0;
6013 current_function_sp_is_unchanging = 0;
6014 current_function_uses_only_leaf_regs = 0;
6015 current_function_has_computed_jump = 0;
6016 current_function_is_thunk = 0;
6018 current_function_returns_pcc_struct = 0;
6019 current_function_returns_struct = 0;
6020 current_function_epilogue_delay_list = 0;
6021 current_function_uses_const_pool = 0;
6022 current_function_uses_pic_offset_table = 0;
6023 current_function_cannot_inline = 0;
6025 /* We have not yet needed to make a label to jump to for tail-recursion. */
6026 tail_recursion_label = 0;
6028 /* We haven't had a need to make a save area for ap yet. */
6030 arg_pointer_save_area = 0;
6032 /* No stack slots allocated yet. */
6035 /* No SAVE_EXPRs in this function yet. */
6038 /* No RTL_EXPRs in this function yet. */
6041 /* Set up to allocate temporaries. */
6044 /* Within function body, compute a type's size as soon it is laid out. */
6045 immediate_size_expand++;
6047 /* We haven't made any trampolines for this function yet. */
6048 trampoline_list = 0;
6050 init_pending_stack_adjust ();
6051 inhibit_defer_pop = 0;
6053 current_function_outgoing_args_size = 0;
6055 /* Prevent ever trying to delete the first instruction of a function.
6056 Also tell final how to output a linenum before the function prologue.
6057 Note linenums could be missing, e.g. when compiling a Java .class file. */
6059 emit_line_note (filename, line);
6061 /* Make sure first insn is a note even if we don't want linenums.
6062 This makes sure the first insn will never be deleted.
6063 Also, final expects a note to appear there. */
6064 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6066 /* Set flags used by final.c. */
6067 if (aggregate_value_p (DECL_RESULT (subr)))
6069 #ifdef PCC_STATIC_STRUCT_RETURN
6070 current_function_returns_pcc_struct = 1;
6072 current_function_returns_struct = 1;
6075 /* Warn if this value is an aggregate type,
6076 regardless of which calling convention we are using for it. */
6077 if (warn_aggregate_return
6078 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6079 warning ("function returns an aggregate");
6081 current_function_returns_pointer
6082 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6084 /* Indicate that we need to distinguish between the return value of the
6085 present function and the return value of a function being called. */
6086 rtx_equal_function_value_matters = 1;
6088 /* Indicate that we have not instantiated virtual registers yet. */
6089 virtuals_instantiated = 0;
6091 /* Indicate we have no need of a frame pointer yet. */
6092 frame_pointer_needed = 0;
6094 /* By default assume not varargs or stdarg. */
6095 current_function_varargs = 0;
6096 current_function_stdarg = 0;
6099 /* Indicate that the current function uses extra args
6100 not explicitly mentioned in the argument list in any fashion. */
6105 current_function_varargs = 1;
6108 /* Expand a call to __main at the beginning of a possible main function. */
6110 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6111 #undef HAS_INIT_SECTION
6112 #define HAS_INIT_SECTION
6115 #ifndef GEN_CALL__MAIN
6116 #define GEN_CALL__MAIN \
6118 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0, \
6124 expand_main_function ()
6126 #if defined(INVOKE__main) || !defined (HAS_INIT_SECTION)
6128 #endif /* not HAS_INIT_SECTION */
6131 extern struct obstack permanent_obstack;
6133 /* Start the RTL for a new function, and set variables used for
6135 SUBR is the FUNCTION_DECL node.
6136 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6137 the function's parameters, which must be run at any return statement. */
6140 expand_function_start (subr, parms_have_cleanups)
6142 int parms_have_cleanups;
6146 rtx last_ptr = NULL_RTX;
6148 /* Make sure volatile mem refs aren't considered
6149 valid operands of arithmetic insns. */
6150 init_recog_no_volatile ();
6152 /* Set this before generating any memory accesses. */
6153 current_function_check_memory_usage
6154 = (flag_check_memory_usage
6155 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6157 current_function_instrument_entry_exit
6158 = (flag_instrument_function_entry_exit
6159 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6161 /* If function gets a static chain arg, store it in the stack frame.
6162 Do this first, so it gets the first stack slot offset. */
6163 if (current_function_needs_context)
6165 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6167 /* Delay copying static chain if it is not a register to avoid
6168 conflicts with regs used for parameters. */
6169 if (! SMALL_REGISTER_CLASSES
6170 || GET_CODE (static_chain_incoming_rtx) == REG)
6171 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6174 /* If the parameters of this function need cleaning up, get a label
6175 for the beginning of the code which executes those cleanups. This must
6176 be done before doing anything with return_label. */
6177 if (parms_have_cleanups)
6178 cleanup_label = gen_label_rtx ();
6182 /* Make the label for return statements to jump to, if this machine
6183 does not have a one-instruction return and uses an epilogue,
6184 or if it returns a structure, or if it has parm cleanups. */
6186 if (cleanup_label == 0 && HAVE_return
6187 && ! current_function_instrument_entry_exit
6188 && ! current_function_returns_pcc_struct
6189 && ! (current_function_returns_struct && ! optimize))
6192 return_label = gen_label_rtx ();
6194 return_label = gen_label_rtx ();
6197 /* Initialize rtx used to return the value. */
6198 /* Do this before assign_parms so that we copy the struct value address
6199 before any library calls that assign parms might generate. */
6201 /* Decide whether to return the value in memory or in a register. */
6202 if (aggregate_value_p (DECL_RESULT (subr)))
6204 /* Returning something that won't go in a register. */
6205 register rtx value_address = 0;
6207 #ifdef PCC_STATIC_STRUCT_RETURN
6208 if (current_function_returns_pcc_struct)
6210 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6211 value_address = assemble_static_space (size);
6216 /* Expect to be passed the address of a place to store the value.
6217 If it is passed as an argument, assign_parms will take care of
6219 if (struct_value_incoming_rtx)
6221 value_address = gen_reg_rtx (Pmode);
6222 emit_move_insn (value_address, struct_value_incoming_rtx);
6227 DECL_RTL (DECL_RESULT (subr))
6228 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6229 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
6230 AGGREGATE_TYPE_P (TREE_TYPE
6235 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6236 /* If return mode is void, this decl rtl should not be used. */
6237 DECL_RTL (DECL_RESULT (subr)) = 0;
6238 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6240 /* If function will end with cleanup code for parms,
6241 compute the return values into a pseudo reg,
6242 which we will copy into the true return register
6243 after the cleanups are done. */
6245 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6247 #ifdef PROMOTE_FUNCTION_RETURN
6248 tree type = TREE_TYPE (DECL_RESULT (subr));
6249 int unsignedp = TREE_UNSIGNED (type);
6251 mode = promote_mode (type, mode, &unsignedp, 1);
6254 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6257 /* Scalar, returned in a register. */
6259 #ifdef FUNCTION_OUTGOING_VALUE
6260 DECL_RTL (DECL_RESULT (subr))
6261 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6263 DECL_RTL (DECL_RESULT (subr))
6264 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6267 /* Mark this reg as the function's return value. */
6268 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6270 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6271 /* Needed because we may need to move this to memory
6272 in case it's a named return value whose address is taken. */
6273 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6277 /* Initialize rtx for parameters and local variables.
6278 In some cases this requires emitting insns. */
6280 assign_parms (subr, 0);
6282 /* Copy the static chain now if it wasn't a register. The delay is to
6283 avoid conflicts with the parameter passing registers. */
6285 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6286 if (GET_CODE (static_chain_incoming_rtx) != REG)
6287 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6289 /* The following was moved from init_function_start.
6290 The move is supposed to make sdb output more accurate. */
6291 /* Indicate the beginning of the function body,
6292 as opposed to parm setup. */
6293 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6295 /* If doing stupid allocation, mark parms as born here. */
6297 if (GET_CODE (get_last_insn ()) != NOTE)
6298 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6299 parm_birth_insn = get_last_insn ();
6303 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6304 use_variable (regno_reg_rtx[i]);
6306 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6307 use_variable (current_function_internal_arg_pointer);
6310 context_display = 0;
6311 if (current_function_needs_context)
6313 /* Fetch static chain values for containing functions. */
6314 tem = decl_function_context (current_function_decl);
6315 /* If not doing stupid register allocation copy the static chain
6316 pointer into a pseudo. If we have small register classes, copy
6317 the value from memory if static_chain_incoming_rtx is a REG. If
6318 we do stupid register allocation, we use the stack address
6320 if (tem && ! obey_regdecls)
6322 /* If the static chain originally came in a register, put it back
6323 there, then move it out in the next insn. The reason for
6324 this peculiar code is to satisfy function integration. */
6325 if (SMALL_REGISTER_CLASSES
6326 && GET_CODE (static_chain_incoming_rtx) == REG)
6327 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6328 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6333 tree rtlexp = make_node (RTL_EXPR);
6335 RTL_EXPR_RTL (rtlexp) = last_ptr;
6336 context_display = tree_cons (tem, rtlexp, context_display);
6337 tem = decl_function_context (tem);
6340 /* Chain thru stack frames, assuming pointer to next lexical frame
6341 is found at the place we always store it. */
6342 #ifdef FRAME_GROWS_DOWNWARD
6343 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6345 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6346 memory_address (Pmode, last_ptr)));
6348 /* If we are not optimizing, ensure that we know that this
6349 piece of context is live over the entire function. */
6351 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6356 if (current_function_instrument_entry_exit)
6358 rtx fun = DECL_RTL (current_function_decl);
6359 if (GET_CODE (fun) == MEM)
6360 fun = XEXP (fun, 0);
6363 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6365 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6367 hard_frame_pointer_rtx),
6371 /* After the display initializations is where the tail-recursion label
6372 should go, if we end up needing one. Ensure we have a NOTE here
6373 since some things (like trampolines) get placed before this. */
6374 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6376 /* Evaluate now the sizes of any types declared among the arguments. */
6377 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6379 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6380 EXPAND_MEMORY_USE_BAD);
6381 /* Flush the queue in case this parameter declaration has
6386 /* Make sure there is a line number after the function entry setup code. */
6387 force_next_line_note ();
6390 /* Generate RTL for the end of the current function.
6391 FILENAME and LINE are the current position in the source file.
6393 It is up to language-specific callers to do cleanups for parameters--
6394 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6397 expand_function_end (filename, line, end_bindings)
6405 #ifdef TRAMPOLINE_TEMPLATE
6406 static rtx initial_trampoline;
6409 #ifdef NON_SAVING_SETJMP
6410 /* Don't put any variables in registers if we call setjmp
6411 on a machine that fails to restore the registers. */
6412 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6414 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6415 setjmp_protect (DECL_INITIAL (current_function_decl));
6417 setjmp_protect_args ();
6421 /* Save the argument pointer if a save area was made for it. */
6422 if (arg_pointer_save_area)
6424 /* arg_pointer_save_area may not be a valid memory address, so we
6425 have to check it and fix it if necessary. */
6428 emit_move_insn (validize_mem (arg_pointer_save_area),
6429 virtual_incoming_args_rtx);
6430 seq = gen_sequence ();
6432 emit_insn_before (seq, tail_recursion_reentry);
6435 /* Initialize any trampolines required by this function. */
6436 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6438 tree function = TREE_PURPOSE (link);
6439 rtx context = lookup_static_chain (function);
6440 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6441 #ifdef TRAMPOLINE_TEMPLATE
6446 #ifdef TRAMPOLINE_TEMPLATE
6447 /* First make sure this compilation has a template for
6448 initializing trampolines. */
6449 if (initial_trampoline == 0)
6451 end_temporary_allocation ();
6453 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6454 resume_temporary_allocation ();
6458 /* Generate insns to initialize the trampoline. */
6460 tramp = round_trampoline_addr (XEXP (tramp, 0));
6461 #ifdef TRAMPOLINE_TEMPLATE
6462 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6463 emit_block_move (blktramp, initial_trampoline,
6464 GEN_INT (TRAMPOLINE_SIZE),
6465 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6467 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6471 /* Put those insns at entry to the containing function (this one). */
6472 emit_insns_before (seq, tail_recursion_reentry);
6475 /* If we are doing stack checking and this function makes calls,
6476 do a stack probe at the start of the function to ensure we have enough
6477 space for another stack frame. */
6478 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6482 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6483 if (GET_CODE (insn) == CALL_INSN)
6486 probe_stack_range (STACK_CHECK_PROTECT,
6487 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6490 emit_insns_before (seq, tail_recursion_reentry);
6495 /* Warn about unused parms if extra warnings were specified. */
6496 if (warn_unused && extra_warnings)
6500 for (decl = DECL_ARGUMENTS (current_function_decl);
6501 decl; decl = TREE_CHAIN (decl))
6502 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6503 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6504 warning_with_decl (decl, "unused parameter `%s'");
6507 /* Delete handlers for nonlocal gotos if nothing uses them. */
6508 if (nonlocal_goto_handler_slots != 0
6509 && ! current_function_has_nonlocal_label)
6512 /* End any sequences that failed to be closed due to syntax errors. */
6513 while (in_sequence_p ())
6516 /* Outside function body, can't compute type's actual size
6517 until next function's body starts. */
6518 immediate_size_expand--;
6520 /* If doing stupid register allocation,
6521 mark register parms as dying here. */
6526 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6527 use_variable (regno_reg_rtx[i]);
6529 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6531 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6533 use_variable (XEXP (tem, 0));
6534 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6537 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6538 use_variable (current_function_internal_arg_pointer);
6541 clear_pending_stack_adjust ();
6542 do_pending_stack_adjust ();
6544 /* Mark the end of the function body.
6545 If control reaches this insn, the function can drop through
6546 without returning a value. */
6547 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6549 /* Must mark the last line number note in the function, so that the test
6550 coverage code can avoid counting the last line twice. This just tells
6551 the code to ignore the immediately following line note, since there
6552 already exists a copy of this note somewhere above. This line number
6553 note is still needed for debugging though, so we can't delete it. */
6554 if (flag_test_coverage)
6555 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6557 /* Output a linenumber for the end of the function.
6558 SDB depends on this. */
6559 emit_line_note_force (filename, line);
6561 /* Output the label for the actual return from the function,
6562 if one is expected. This happens either because a function epilogue
6563 is used instead of a return instruction, or because a return was done
6564 with a goto in order to run local cleanups, or because of pcc-style
6565 structure returning. */
6568 emit_label (return_label);
6570 /* C++ uses this. */
6572 expand_end_bindings (0, 0, 0);
6574 /* Now handle any leftover exception regions that may have been
6575 created for the parameters. */
6577 rtx last = get_last_insn ();
6580 expand_leftover_cleanups ();
6582 /* If the above emitted any code, may sure we jump around it. */
6583 if (last != get_last_insn ())
6585 label = gen_label_rtx ();
6586 last = emit_jump_insn_after (gen_jump (label), last);
6587 last = emit_barrier_after (last);
6592 if (current_function_instrument_entry_exit)
6594 rtx fun = DECL_RTL (current_function_decl);
6595 if (GET_CODE (fun) == MEM)
6596 fun = XEXP (fun, 0);
6599 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6601 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6603 hard_frame_pointer_rtx),
6607 /* If we had calls to alloca, and this machine needs
6608 an accurate stack pointer to exit the function,
6609 insert some code to save and restore the stack pointer. */
6610 #ifdef EXIT_IGNORE_STACK
6611 if (! EXIT_IGNORE_STACK)
6613 if (current_function_calls_alloca)
6617 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6618 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6621 /* If scalar return value was computed in a pseudo-reg,
6622 copy that to the hard return register. */
6623 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6624 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6625 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6626 >= FIRST_PSEUDO_REGISTER))
6628 rtx real_decl_result;
6630 #ifdef FUNCTION_OUTGOING_VALUE
6632 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6633 current_function_decl);
6636 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6637 current_function_decl);
6639 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6640 /* If this is a BLKmode structure being returned in registers, then use
6641 the mode computed in expand_return. */
6642 if (GET_MODE (real_decl_result) == BLKmode)
6643 PUT_MODE (real_decl_result,
6644 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6645 emit_move_insn (real_decl_result,
6646 DECL_RTL (DECL_RESULT (current_function_decl)));
6647 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6649 /* The delay slot scheduler assumes that current_function_return_rtx
6650 holds the hard register containing the return value, not a temporary
6652 current_function_return_rtx = real_decl_result;
6655 /* If returning a structure, arrange to return the address of the value
6656 in a place where debuggers expect to find it.
6658 If returning a structure PCC style,
6659 the caller also depends on this value.
6660 And current_function_returns_pcc_struct is not necessarily set. */
6661 if (current_function_returns_struct
6662 || current_function_returns_pcc_struct)
6664 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6665 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6666 #ifdef FUNCTION_OUTGOING_VALUE
6668 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6669 current_function_decl);
6672 = FUNCTION_VALUE (build_pointer_type (type),
6673 current_function_decl);
6676 /* Mark this as a function return value so integrate will delete the
6677 assignment and USE below when inlining this function. */
6678 REG_FUNCTION_VALUE_P (outgoing) = 1;
6680 emit_move_insn (outgoing, value_address);
6681 use_variable (outgoing);
6684 /* If this is an implementation of __throw, do what's necessary to
6685 communicate between __builtin_eh_return and the epilogue. */
6686 expand_eh_return ();
6688 /* Output a return insn if we are using one.
6689 Otherwise, let the rtl chain end here, to drop through
6690 into the epilogue. */
6695 emit_jump_insn (gen_return ());
6700 /* Fix up any gotos that jumped out to the outermost
6701 binding level of the function.
6702 Must follow emitting RETURN_LABEL. */
6704 /* If you have any cleanups to do at this point,
6705 and they need to create temporary variables,
6706 then you will lose. */
6707 expand_fixups (get_insns ());
6710 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6712 static int *prologue;
6713 static int *epilogue;
6715 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6716 or a single insn). */
6718 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6720 record_insns (insns)
6725 if (GET_CODE (insns) == SEQUENCE)
6727 int len = XVECLEN (insns, 0);
6728 vec = (int *) oballoc ((len + 1) * sizeof (int));
6731 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6735 vec = (int *) oballoc (2 * sizeof (int));
6736 vec[0] = INSN_UID (insns);
6742 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6745 contains (insn, vec)
6751 if (GET_CODE (insn) == INSN
6752 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6755 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6756 for (j = 0; vec[j]; j++)
6757 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6763 for (j = 0; vec[j]; j++)
6764 if (INSN_UID (insn) == vec[j])
6769 #endif /* HAVE_prologue || HAVE_epilogue */
6771 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6772 this into place with notes indicating where the prologue ends and where
6773 the epilogue begins. Update the basic block information when possible. */
6776 thread_prologue_and_epilogue_insns (f)
6777 rtx f ATTRIBUTE_UNUSED;
6780 #ifdef HAVE_prologue
6781 rtx prologue_end = NULL_RTX;
6785 #ifdef HAVE_prologue
6791 seq = gen_prologue();
6794 /* Retain a map of the prologue insns. */
6795 if (GET_CODE (seq) != SEQUENCE)
6797 prologue = record_insns (seq);
6799 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6800 seq = gen_sequence ();
6803 /* If optimization is off, and perhaps in an empty function,
6804 the entry block will have no successors. */
6805 if (ENTRY_BLOCK_PTR->succ)
6807 /* Can't deal with multiple successsors of the entry block. */
6808 if (ENTRY_BLOCK_PTR->succ->succ_next)
6811 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6815 emit_insn_after (seq, f);
6820 #ifdef HAVE_epilogue
6825 rtx tail = get_last_insn ();
6827 /* ??? This is gastly. If function returns were not done via uses,
6828 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6829 and all of this uglyness would go away. */
6834 /* If the exit block has no non-fake predecessors, we don't
6835 need an epilogue. Furthermore, only pay attention to the
6836 fallthru predecessors; if (conditional) return insns were
6837 generated, by definition we do not need to emit epilogue
6840 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6841 if ((e->flags & EDGE_FAKE) == 0
6842 && (e->flags & EDGE_FALLTHRU) != 0)
6847 /* We can't handle multiple epilogues -- if one is needed,
6848 we won't be able to place it multiple times.
6850 ??? Fix epilogue expanders to not assume they are the
6851 last thing done compiling the function. Either that
6852 or copy_rtx each insn.
6854 ??? Blah, it's not a simple expression to assert that
6855 we've exactly one fallthru exit edge. */
6860 /* ??? If the last insn of the basic block is a jump, then we
6861 are creating a new basic block. Wimp out and leave these
6862 insns outside any block. */
6863 if (GET_CODE (tail) == JUMP_INSN)
6869 rtx prev, seq, first_use;
6871 /* Move the USE insns at the end of a function onto a list. */
6873 if (GET_CODE (prev) == BARRIER
6874 || GET_CODE (prev) == NOTE)
6875 prev = prev_nonnote_insn (prev);
6879 && GET_CODE (prev) == INSN
6880 && GET_CODE (PATTERN (prev)) == USE)
6882 /* If the end of the block is the use, grab hold of something
6883 else so that we emit barriers etc in the right place. */
6887 tail = PREV_INSN (tail);
6888 while (GET_CODE (tail) == INSN
6889 && GET_CODE (PATTERN (tail)) == USE);
6895 prev = prev_nonnote_insn (prev);
6900 NEXT_INSN (use) = first_use;
6901 PREV_INSN (first_use) = use;
6904 NEXT_INSN (use) = NULL_RTX;
6908 && GET_CODE (prev) == INSN
6909 && GET_CODE (PATTERN (prev)) == USE);
6912 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6913 epilogue insns, the USE insns at the end of a function,
6914 the jump insn that returns, and then a BARRIER. */
6916 if (GET_CODE (tail) != BARRIER)
6918 prev = next_nonnote_insn (tail);
6919 if (!prev || GET_CODE (prev) != BARRIER)
6920 emit_barrier_after (tail);
6923 seq = gen_epilogue ();
6925 tail = emit_jump_insn_after (seq, tail);
6927 /* Insert the USE insns immediately before the return insn, which
6928 must be the last instruction emitted in the sequence. */
6930 emit_insns_before (first_use, tail);
6931 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6933 /* Update the tail of the basic block. */
6937 /* Retain a map of the epilogue insns. */
6938 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6945 commit_edge_insertions ();
6947 #ifdef HAVE_prologue
6952 /* GDB handles `break f' by setting a breakpoint on the first
6953 line note *after* the prologue. Which means (1) that if
6954 there are line number notes before where we inserted the
6955 prologue we should move them, and (2) if there is no such
6956 note, then we should generate one at the prologue. */
6958 for (insn = prologue_end; insn ; insn = prev)
6960 prev = PREV_INSN (insn);
6961 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
6963 /* Note that we cannot reorder the first insn in the
6964 chain, since rest_of_compilation relies on that
6965 remaining constant. Do the next best thing. */
6968 emit_line_note_after (NOTE_SOURCE_FILE (insn),
6969 NOTE_LINE_NUMBER (insn),
6971 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6974 reorder_insns (insn, insn, prologue_end);
6978 insn = NEXT_INSN (prologue_end);
6979 if (! insn || GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) <= 0)
6981 for (insn = next_active_insn (f); insn ; insn = PREV_INSN (insn))
6983 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
6985 emit_line_note_after (NOTE_SOURCE_FILE (insn),
6986 NOTE_LINE_NUMBER (insn),
6996 /* Reposition the prologue-end and epilogue-begin notes after instruction
6997 scheduling and delayed branch scheduling. */
7000 reposition_prologue_and_epilogue_notes (f)
7001 rtx f ATTRIBUTE_UNUSED;
7003 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7004 /* Reposition the prologue and epilogue notes. */
7011 register rtx insn, note = 0;
7013 /* Scan from the beginning until we reach the last prologue insn.
7014 We apparently can't depend on basic_block_{head,end} after
7016 for (len = 0; prologue[len]; len++)
7018 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7020 if (GET_CODE (insn) == NOTE)
7022 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7025 else if ((len -= contains (insn, prologue)) == 0)
7028 /* Find the prologue-end note if we haven't already, and
7029 move it to just after the last prologue insn. */
7032 for (note = insn; (note = NEXT_INSN (note));)
7033 if (GET_CODE (note) == NOTE
7034 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7038 next = NEXT_INSN (note);
7040 /* Whether or not we can depend on BLOCK_HEAD,
7041 attempt to keep it up-to-date. */
7042 if (BLOCK_HEAD (0) == note)
7043 BLOCK_HEAD (0) = next;
7046 add_insn_after (note, insn);
7053 register rtx insn, note = 0;
7055 /* Scan from the end until we reach the first epilogue insn.
7056 We apparently can't depend on basic_block_{head,end} after
7058 for (len = 0; epilogue[len]; len++)
7060 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7062 if (GET_CODE (insn) == NOTE)
7064 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7067 else if ((len -= contains (insn, epilogue)) == 0)
7069 /* Find the epilogue-begin note if we haven't already, and
7070 move it to just before the first epilogue insn. */
7073 for (note = insn; (note = PREV_INSN (note));)
7074 if (GET_CODE (note) == NOTE
7075 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7079 /* Whether or not we can depend on BLOCK_HEAD,
7080 attempt to keep it up-to-date. */
7082 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7083 BLOCK_HEAD (n_basic_blocks-1) = note;
7086 add_insn_before (note, insn);
7091 #endif /* HAVE_prologue or HAVE_epilogue */