1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* $FreeBSD: src/contrib/gcc/function.c,v 1.6.2.3 2002/06/20 23:12:27 obrien Exp $ */
25 /* This file handles the generation of rtl code from tree structure
26 at the level of the function as a whole.
27 It creates the rtl expressions for parameters and auto variables
28 and has full responsibility for allocating stack slots.
30 `expand_function_start' is called at the beginning of a function,
31 before the function body is parsed, and `expand_function_end' is
32 called after parsing the body.
34 Call `assign_stack_local' to allocate a stack slot for a local variable.
35 This is usually done during the RTL generation for the function body,
36 but it can also be done in the reload pass when a pseudo-register does
37 not get a hard register.
39 Call `put_var_into_stack' when you learn, belatedly, that a variable
40 previously given a pseudo-register must in fact go in the stack.
41 This function changes the DECL_RTL to be a stack slot instead of a reg
42 then scans all the RTL instructions so far generated to correct them. */
51 #include "insn-flags.h"
53 #include "insn-codes.h"
55 #include "hard-reg-set.h"
56 #include "insn-config.h"
59 #include "basic-block.h"
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
72 /* Some systems use __main in a way incompatible with its use in gcc, in these
73 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
74 give the same symbol without quotes for an alternative entry point. You
75 must define both, or neither. */
77 #define NAME__MAIN "__main"
78 #define SYMBOL__MAIN __main
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86 /* Similar, but round to the next highest integer that meets the
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
91 during rtl generation. If they are different register numbers, this is
92 always true. It may also be true if
93 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
94 generation. See fix_lexical_addr for details. */
96 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
97 #define NEED_SEPARATE_AP
100 /* Number of bytes of args popped by function being compiled on its return.
101 Zero if no bytes are to be popped.
102 May affect compilation of return insn or of function epilogue. */
104 int current_function_pops_args;
106 /* Nonzero if function being compiled needs to be given an address
107 where the value should be stored. */
109 int current_function_returns_struct;
111 /* Nonzero if function being compiled needs to
112 return the address of where it has put a structure value. */
114 int current_function_returns_pcc_struct;
116 /* Nonzero if function being compiled needs to be passed a static chain. */
118 int current_function_needs_context;
120 /* Nonzero if function being compiled can call setjmp. */
122 int current_function_calls_setjmp;
124 /* Nonzero if function being compiled can call longjmp. */
126 int current_function_calls_longjmp;
128 /* Nonzero if function being compiled receives nonlocal gotos
129 from nested functions. */
131 int current_function_has_nonlocal_label;
133 /* Nonzero if function being compiled has nonlocal gotos to parent
136 int current_function_has_nonlocal_goto;
138 /* Nonzero if function being compiled contains nested functions. */
140 int current_function_contains_functions;
142 /* Nonzero if function being compiled doesn't contain any calls
143 (ignoring the prologue and epilogue). This is set prior to
144 local register allocation and is valid for the remaining
147 int current_function_is_leaf;
149 /* Nonzero if function being compiled doesn't modify the stack pointer
150 (ignoring the prologue and epilogue). This is only valid after
151 life_analysis has run. */
153 int current_function_sp_is_unchanging;
155 /* Nonzero if the function being compiled is a leaf function which only
156 uses leaf registers. This is valid after reload (specifically after
157 sched2) and is useful only if the port defines LEAF_REGISTERS. */
159 int current_function_uses_only_leaf_regs;
161 /* Nonzero if the function being compiled issues a computed jump. */
163 int current_function_has_computed_jump;
165 /* Nonzero if the current function is a thunk (a lightweight function that
166 just adjusts one of its arguments and forwards to another function), so
167 we should try to cut corners where we can. */
168 int current_function_is_thunk;
170 /* Nonzero if function being compiled can call alloca,
171 either as a subroutine or builtin. */
173 int current_function_calls_alloca;
175 /* Nonzero if the current function returns a pointer type */
177 int current_function_returns_pointer;
179 /* If some insns can be deferred to the delay slots of the epilogue, the
180 delay list for them is recorded here. */
182 rtx current_function_epilogue_delay_list;
184 /* If function's args have a fixed size, this is that size, in bytes.
186 May affect compilation of return insn or of function epilogue. */
188 int current_function_args_size;
190 /* # bytes the prologue should push and pretend that the caller pushed them.
191 The prologue must do this, but only if parms can be passed in registers. */
193 int current_function_pretend_args_size;
195 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
196 defined, the needed space is pushed by the prologue. */
198 int current_function_outgoing_args_size;
200 /* This is the offset from the arg pointer to the place where the first
201 anonymous arg can be found, if there is one. */
203 rtx current_function_arg_offset_rtx;
205 /* Nonzero if current function uses varargs.h or equivalent.
206 Zero for functions that use stdarg.h. */
208 int current_function_varargs;
210 /* Nonzero if current function uses stdarg.h or equivalent.
211 Zero for functions that use varargs.h. */
213 int current_function_stdarg;
215 /* Quantities of various kinds of registers
216 used for the current function's args. */
218 CUMULATIVE_ARGS current_function_args_info;
220 /* Name of function now being compiled. */
222 char *current_function_name;
224 /* If non-zero, an RTL expression for the location at which the current
225 function returns its result. If the current function returns its
226 result in a register, current_function_return_rtx will always be
227 the hard register containing the result. */
229 rtx current_function_return_rtx;
231 /* Nonzero if the current function uses the constant pool. */
233 int current_function_uses_const_pool;
235 /* Nonzero if the current function uses pic_offset_table_rtx. */
236 int current_function_uses_pic_offset_table;
238 /* The arg pointer hard register, or the pseudo into which it was copied. */
239 rtx current_function_internal_arg_pointer;
241 /* Language-specific reason why the current function cannot be made inline. */
242 char *current_function_cannot_inline;
244 /* Nonzero if instrumentation calls for function entry and exit should be
246 int current_function_instrument_entry_exit;
248 /* Nonzero if memory access checking be enabled in the current function. */
249 int current_function_check_memory_usage;
251 /* The FUNCTION_DECL for an inline function currently being expanded. */
252 tree inline_function_decl;
254 /* Number of function calls seen so far in current function. */
256 int function_call_count;
258 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
259 (labels to which there can be nonlocal gotos from nested functions)
262 tree nonlocal_labels;
264 /* List (chain of EXPR_LIST) of stack slots that hold the current handlers
265 for nonlocal gotos. There is one for every nonlocal label in the function;
266 this list matches the one in nonlocal_labels.
267 Zero when function does not have nonlocal labels. */
269 rtx nonlocal_goto_handler_slots;
271 /* List (chain of EXPR_LIST) of labels heading the current handlers for
274 rtx nonlocal_goto_handler_labels;
276 /* RTX for stack slot that holds the stack pointer value to restore
278 Zero when function does not have nonlocal labels. */
280 rtx nonlocal_goto_stack_level;
282 /* Label that will go on parm cleanup code, if any.
283 Jumping to this label runs cleanup code for parameters, if
284 such code must be run. Following this code is the logical return label. */
288 /* Label that will go on function epilogue.
289 Jumping to this label serves as a "return" instruction
290 on machines which require execution of the epilogue on all returns. */
294 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
295 So we can mark them all live at the end of the function, if nonopt. */
298 /* List (chain of EXPR_LISTs) of all stack slots in this function.
299 Made for the sake of unshare_all_rtl. */
302 /* Chain of all RTL_EXPRs that have insns in them. */
305 /* Label to jump back to for tail recursion, or 0 if we have
306 not yet needed one for this function. */
307 rtx tail_recursion_label;
309 /* Place after which to insert the tail_recursion_label if we need one. */
310 rtx tail_recursion_reentry;
312 /* Location at which to save the argument pointer if it will need to be
313 referenced. There are two cases where this is done: if nonlocal gotos
314 exist, or if vars stored at an offset from the argument pointer will be
315 needed by inner routines. */
317 rtx arg_pointer_save_area;
319 /* Offset to end of allocated area of stack frame.
320 If stack grows down, this is the address of the last stack slot allocated.
321 If stack grows up, this is the address for the next slot. */
322 HOST_WIDE_INT frame_offset;
324 /* List (chain of TREE_LISTs) of static chains for containing functions.
325 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
326 in an RTL_EXPR in the TREE_VALUE. */
327 static tree context_display;
329 /* List (chain of TREE_LISTs) of trampolines for nested functions.
330 The trampoline sets up the static chain and jumps to the function.
331 We supply the trampoline's address when the function's address is requested.
333 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
334 in an RTL_EXPR in the TREE_VALUE. */
335 static tree trampoline_list;
337 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
338 static rtx parm_birth_insn;
341 /* Nonzero if a stack slot has been generated whose address is not
342 actually valid. It means that the generated rtl must all be scanned
343 to detect and correct the invalid addresses where they occur. */
344 static int invalid_stack_slot;
347 /* Last insn of those whose job was to put parms into their nominal homes. */
348 static rtx last_parm_insn;
350 /* 1 + last pseudo register number possibly used for loading a copy
351 of a parameter of this function. */
354 /* Vector indexed by REGNO, containing location on stack in which
355 to put the parm which is nominally in pseudo register REGNO,
356 if we discover that that parm must go in the stack. The highest
357 element in this vector is one less than MAX_PARM_REG, above. */
358 rtx *parm_reg_stack_loc;
360 /* Nonzero once virtual register instantiation has been done.
361 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
362 static int virtuals_instantiated;
364 /* These variables hold pointers to functions to
365 save and restore machine-specific data,
366 in push_function_context and pop_function_context. */
367 void (*save_machine_status) PROTO((struct function *));
368 void (*restore_machine_status) PROTO((struct function *));
370 /* Nonzero if we need to distinguish between the return value of this function
371 and the return value of a function called by this function. This helps
374 extern int rtx_equal_function_value_matters;
375 extern tree sequence_rtl_expr;
377 /* In order to evaluate some expressions, such as function calls returning
378 structures in memory, we need to temporarily allocate stack locations.
379 We record each allocated temporary in the following structure.
381 Associated with each temporary slot is a nesting level. When we pop up
382 one level, all temporaries associated with the previous level are freed.
383 Normally, all temporaries are freed after the execution of the statement
384 in which they were created. However, if we are inside a ({...}) grouping,
385 the result may be in a temporary and hence must be preserved. If the
386 result could be in a temporary, we preserve it if we can determine which
387 one it is in. If we cannot determine which temporary may contain the
388 result, all temporaries are preserved. A temporary is preserved by
389 pretending it was allocated at the previous nesting level.
391 Automatic variables are also assigned temporary slots, at the nesting
392 level where they are defined. They are marked a "kept" so that
393 free_temp_slots will not free them. */
397 /* Points to next temporary slot. */
398 struct temp_slot *next;
399 /* The rtx to used to reference the slot. */
401 /* The rtx used to represent the address if not the address of the
402 slot above. May be an EXPR_LIST if multiple addresses exist. */
404 /* The alignment (in bits) of the slot. */
406 /* The size, in units, of the slot. */
408 /* The alias set for the slot. If the alias set is zero, we don't
409 know anything about the alias set of the slot. We must only
410 reuse a slot if it is assigned an object of the same alias set.
411 Otherwise, the rest of the compiler may assume that the new use
412 of the slot cannot alias the old use of the slot, which is
413 false. If the slot has alias set zero, then we can't reuse the
414 slot at all, since we have no idea what alias set may have been
415 imposed on the memory. For example, if the stack slot is the
416 call frame for an inline functioned, we have no idea what alias
417 sets will be assigned to various pieces of the call frame. */
419 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
421 /* Non-zero if this temporary is currently in use. */
423 /* Non-zero if this temporary has its address taken. */
425 /* Nesting level at which this slot is being used. */
427 /* Non-zero if this should survive a call to free_temp_slots. */
429 /* The offset of the slot from the frame_pointer, including extra space
430 for alignment. This info is for combine_temp_slots. */
431 HOST_WIDE_INT base_offset;
432 /* The size of the slot, including extra space for alignment. This
433 info is for combine_temp_slots. */
434 HOST_WIDE_INT full_size;
437 /* List of all temporaries allocated, both available and in use. */
439 struct temp_slot *temp_slots;
441 /* Current nesting level for temporaries. */
445 /* Current nesting level for variables in a block. */
447 int var_temp_slot_level;
449 /* When temporaries are created by TARGET_EXPRs, they are created at
450 this level of temp_slot_level, so that they can remain allocated
451 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
453 int target_temp_slot_level;
455 /* This structure is used to record MEMs or pseudos used to replace VAR, any
456 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
457 maintain this list in case two operands of an insn were required to match;
458 in that case we must ensure we use the same replacement. */
460 struct fixup_replacement
464 struct fixup_replacement *next;
467 struct insns_for_mem_entry {
468 /* The KEY in HE will be a MEM. */
469 struct hash_entry he;
470 /* These are the INSNS which reference the MEM. */
474 /* Forward declarations. */
476 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
477 int, struct function *));
478 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
480 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
481 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
482 enum machine_mode, enum machine_mode,
484 struct hash_table *));
485 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
486 struct hash_table *));
487 static struct fixup_replacement
488 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
489 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
490 rtx, int, struct hash_table *));
491 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
492 struct fixup_replacement **));
493 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
494 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
495 static rtx fixup_stack_1 PROTO((rtx, rtx));
496 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
497 static void instantiate_decls PROTO((tree, int));
498 static void instantiate_decls_1 PROTO((tree, int));
499 static void instantiate_decl PROTO((rtx, int, int));
500 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
501 static void delete_handlers PROTO((void));
502 static void pad_to_arg_alignment PROTO((struct args_size *, int));
503 #ifndef ARGS_GROW_DOWNWARD
504 static void pad_below PROTO((struct args_size *, enum machine_mode,
507 #ifdef ARGS_GROW_DOWNWARD
508 static tree round_down PROTO((tree, int));
510 static rtx round_trampoline_addr PROTO((rtx));
511 static tree blocks_nreverse PROTO((tree));
512 static int all_blocks PROTO((tree, tree *));
513 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
514 static int *record_insns PROTO((rtx));
515 static int contains PROTO((rtx, int *));
516 #endif /* HAVE_prologue || HAVE_epilogue */
517 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
518 static boolean purge_addressof_1 PROTO((rtx *, rtx, int, int,
519 struct hash_table *));
520 static int is_addressof PROTO ((rtx *, void *));
521 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
524 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
525 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
526 static int insns_for_mem_walk PROTO ((rtx *, void *));
527 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
530 /* Pointer to chain of `struct function' for containing functions. */
531 struct function *outer_function_chain;
533 /* Given a function decl for a containing function,
534 return the `struct function' for it. */
537 find_function_data (decl)
542 for (p = outer_function_chain; p; p = p->next)
549 /* Save the current context for compilation of a nested function.
550 This is called from language-specific code.
551 The caller is responsible for saving any language-specific status,
552 since this function knows only about language-independent variables. */
555 push_function_context_to (context)
558 struct function *p = (struct function *) xmalloc (sizeof (struct function));
560 p->next = outer_function_chain;
561 outer_function_chain = p;
563 p->name = current_function_name;
564 p->decl = current_function_decl;
565 p->pops_args = current_function_pops_args;
566 p->returns_struct = current_function_returns_struct;
567 p->returns_pcc_struct = current_function_returns_pcc_struct;
568 p->returns_pointer = current_function_returns_pointer;
569 p->needs_context = current_function_needs_context;
570 p->calls_setjmp = current_function_calls_setjmp;
571 p->calls_longjmp = current_function_calls_longjmp;
572 p->calls_alloca = current_function_calls_alloca;
573 p->has_nonlocal_label = current_function_has_nonlocal_label;
574 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
575 p->contains_functions = current_function_contains_functions;
576 p->has_computed_jump = current_function_has_computed_jump;
577 p->is_thunk = current_function_is_thunk;
578 p->args_size = current_function_args_size;
579 p->pretend_args_size = current_function_pretend_args_size;
580 p->arg_offset_rtx = current_function_arg_offset_rtx;
581 p->varargs = current_function_varargs;
582 p->stdarg = current_function_stdarg;
583 p->uses_const_pool = current_function_uses_const_pool;
584 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
585 p->internal_arg_pointer = current_function_internal_arg_pointer;
586 p->cannot_inline = current_function_cannot_inline;
587 p->max_parm_reg = max_parm_reg;
588 p->parm_reg_stack_loc = parm_reg_stack_loc;
589 p->outgoing_args_size = current_function_outgoing_args_size;
590 p->return_rtx = current_function_return_rtx;
591 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
592 p->nonlocal_goto_handler_labels = nonlocal_goto_handler_labels;
593 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
594 p->nonlocal_labels = nonlocal_labels;
595 p->cleanup_label = cleanup_label;
596 p->return_label = return_label;
597 p->save_expr_regs = save_expr_regs;
598 p->stack_slot_list = stack_slot_list;
599 p->parm_birth_insn = parm_birth_insn;
600 p->frame_offset = frame_offset;
601 p->tail_recursion_label = tail_recursion_label;
602 p->tail_recursion_reentry = tail_recursion_reentry;
603 p->arg_pointer_save_area = arg_pointer_save_area;
604 p->rtl_expr_chain = rtl_expr_chain;
605 p->last_parm_insn = last_parm_insn;
606 p->context_display = context_display;
607 p->trampoline_list = trampoline_list;
608 p->function_call_count = function_call_count;
609 p->temp_slots = temp_slots;
610 p->temp_slot_level = temp_slot_level;
611 p->target_temp_slot_level = target_temp_slot_level;
612 p->var_temp_slot_level = var_temp_slot_level;
613 p->fixup_var_refs_queue = 0;
614 p->epilogue_delay_list = current_function_epilogue_delay_list;
615 p->args_info = current_function_args_info;
616 p->check_memory_usage = current_function_check_memory_usage;
617 p->instrument_entry_exit = current_function_instrument_entry_exit;
619 save_tree_status (p, context);
620 save_storage_status (p);
621 save_emit_status (p);
622 save_expr_status (p);
623 save_stmt_status (p);
624 save_varasm_status (p, context);
625 if (save_machine_status)
626 (*save_machine_status) (p);
630 push_function_context ()
632 push_function_context_to (current_function_decl);
635 /* Restore the last saved context, at the end of a nested function.
636 This function is called from language-specific code. */
639 pop_function_context_from (context)
642 struct function *p = outer_function_chain;
643 struct var_refs_queue *queue;
645 outer_function_chain = p->next;
647 current_function_contains_functions
648 = p->contains_functions || p->inline_obstacks
649 || context == current_function_decl;
650 current_function_has_computed_jump = p->has_computed_jump;
651 current_function_name = p->name;
652 current_function_decl = p->decl;
653 current_function_pops_args = p->pops_args;
654 current_function_returns_struct = p->returns_struct;
655 current_function_returns_pcc_struct = p->returns_pcc_struct;
656 current_function_returns_pointer = p->returns_pointer;
657 current_function_needs_context = p->needs_context;
658 current_function_calls_setjmp = p->calls_setjmp;
659 current_function_calls_longjmp = p->calls_longjmp;
660 current_function_calls_alloca = p->calls_alloca;
661 current_function_has_nonlocal_label = p->has_nonlocal_label;
662 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
663 current_function_is_thunk = p->is_thunk;
664 current_function_args_size = p->args_size;
665 current_function_pretend_args_size = p->pretend_args_size;
666 current_function_arg_offset_rtx = p->arg_offset_rtx;
667 current_function_varargs = p->varargs;
668 current_function_stdarg = p->stdarg;
669 current_function_uses_const_pool = p->uses_const_pool;
670 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
671 current_function_internal_arg_pointer = p->internal_arg_pointer;
672 current_function_cannot_inline = p->cannot_inline;
673 max_parm_reg = p->max_parm_reg;
674 parm_reg_stack_loc = p->parm_reg_stack_loc;
675 current_function_outgoing_args_size = p->outgoing_args_size;
676 current_function_return_rtx = p->return_rtx;
677 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
678 nonlocal_goto_handler_labels = p->nonlocal_goto_handler_labels;
679 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
680 nonlocal_labels = p->nonlocal_labels;
681 cleanup_label = p->cleanup_label;
682 return_label = p->return_label;
683 save_expr_regs = p->save_expr_regs;
684 stack_slot_list = p->stack_slot_list;
685 parm_birth_insn = p->parm_birth_insn;
686 frame_offset = p->frame_offset;
687 tail_recursion_label = p->tail_recursion_label;
688 tail_recursion_reentry = p->tail_recursion_reentry;
689 arg_pointer_save_area = p->arg_pointer_save_area;
690 rtl_expr_chain = p->rtl_expr_chain;
691 last_parm_insn = p->last_parm_insn;
692 context_display = p->context_display;
693 trampoline_list = p->trampoline_list;
694 function_call_count = p->function_call_count;
695 temp_slots = p->temp_slots;
696 temp_slot_level = p->temp_slot_level;
697 target_temp_slot_level = p->target_temp_slot_level;
698 var_temp_slot_level = p->var_temp_slot_level;
699 current_function_epilogue_delay_list = p->epilogue_delay_list;
701 current_function_args_info = p->args_info;
702 current_function_check_memory_usage = p->check_memory_usage;
703 current_function_instrument_entry_exit = p->instrument_entry_exit;
705 restore_tree_status (p, context);
706 restore_storage_status (p);
707 restore_expr_status (p);
708 restore_emit_status (p);
709 restore_stmt_status (p);
710 restore_varasm_status (p);
712 if (restore_machine_status)
713 (*restore_machine_status) (p);
715 /* Finish doing put_var_into_stack for any of our variables
716 which became addressable during the nested function. */
717 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
718 fixup_var_refs (queue->modified, queue->promoted_mode,
719 queue->unsignedp, 0);
723 /* Reset variables that have known state during rtx generation. */
724 rtx_equal_function_value_matters = 1;
725 virtuals_instantiated = 0;
728 void pop_function_context ()
730 pop_function_context_from (current_function_decl);
733 /* Allocate fixed slots in the stack frame of the current function. */
735 /* Return size needed for stack frame based on slots so far allocated.
736 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
737 the caller may have to do that. */
742 #ifdef FRAME_GROWS_DOWNWARD
743 return -frame_offset;
749 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
750 with machine mode MODE.
752 ALIGN controls the amount of alignment for the address of the slot:
753 0 means according to MODE,
754 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
755 positive specifies alignment boundary in bits.
757 We do not round to stack_boundary here. */
760 assign_stack_local (mode, size, align)
761 enum machine_mode mode;
765 register rtx x, addr;
766 int bigend_correction = 0;
773 alignment = GET_MODE_ALIGNMENT (mode);
775 alignment = BIGGEST_ALIGNMENT;
777 /* Allow the target to (possibly) increase the alignment of this
779 type = type_for_mode (mode, 0);
781 alignment = LOCAL_ALIGNMENT (type, alignment);
783 alignment /= BITS_PER_UNIT;
785 else if (align == -1)
787 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
788 size = CEIL_ROUND (size, alignment);
791 alignment = align / BITS_PER_UNIT;
793 #ifdef FRAME_GROWS_DOWNWARD
794 frame_offset -= size;
797 /* Round frame offset to that alignment.
798 We must be careful here, since FRAME_OFFSET might be negative and
799 division with a negative dividend isn't as well defined as we might
800 like. So we instead assume that ALIGNMENT is a power of two and
801 use logical operations which are unambiguous. */
802 #ifdef FRAME_GROWS_DOWNWARD
803 frame_offset = FLOOR_ROUND (frame_offset, alignment);
805 frame_offset = CEIL_ROUND (frame_offset, alignment);
808 /* On a big-endian machine, if we are allocating more space than we will use,
809 use the least significant bytes of those that are allocated. */
810 if (BYTES_BIG_ENDIAN && mode != BLKmode)
811 bigend_correction = size - GET_MODE_SIZE (mode);
813 /* If we have already instantiated virtual registers, return the actual
814 address relative to the frame pointer. */
815 if (virtuals_instantiated)
816 addr = plus_constant (frame_pointer_rtx,
817 (frame_offset + bigend_correction
818 + STARTING_FRAME_OFFSET));
820 addr = plus_constant (virtual_stack_vars_rtx,
821 frame_offset + bigend_correction);
823 #ifndef FRAME_GROWS_DOWNWARD
824 frame_offset += size;
827 x = gen_rtx_MEM (mode, addr);
829 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
834 /* Assign a stack slot in a containing function.
835 First three arguments are same as in preceding function.
836 The last argument specifies the function to allocate in. */
839 assign_outer_stack_local (mode, size, align, function)
840 enum machine_mode mode;
843 struct function *function;
845 register rtx x, addr;
846 int bigend_correction = 0;
849 /* Allocate in the memory associated with the function in whose frame
851 push_obstacks (function->function_obstack,
852 function->function_maybepermanent_obstack);
858 alignment = GET_MODE_ALIGNMENT (mode);
860 alignment = BIGGEST_ALIGNMENT;
862 /* Allow the target to (possibly) increase the alignment of this
864 type = type_for_mode (mode, 0);
866 alignment = LOCAL_ALIGNMENT (type, alignment);
868 alignment /= BITS_PER_UNIT;
870 else if (align == -1)
872 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
873 size = CEIL_ROUND (size, alignment);
876 alignment = align / BITS_PER_UNIT;
878 #ifdef FRAME_GROWS_DOWNWARD
879 function->frame_offset -= size;
882 /* Round frame offset to that alignment. */
883 #ifdef FRAME_GROWS_DOWNWARD
884 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
886 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
889 /* On a big-endian machine, if we are allocating more space than we will use,
890 use the least significant bytes of those that are allocated. */
891 if (BYTES_BIG_ENDIAN && mode != BLKmode)
892 bigend_correction = size - GET_MODE_SIZE (mode);
894 addr = plus_constant (virtual_stack_vars_rtx,
895 function->frame_offset + bigend_correction);
896 #ifndef FRAME_GROWS_DOWNWARD
897 function->frame_offset += size;
900 x = gen_rtx_MEM (mode, addr);
902 function->stack_slot_list
903 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
910 /* Allocate a temporary stack slot and record it for possible later
913 MODE is the machine mode to be given to the returned rtx.
915 SIZE is the size in units of the space required. We do no rounding here
916 since assign_stack_local will do any required rounding.
918 KEEP is 1 if this slot is to be retained after a call to
919 free_temp_slots. Automatic variables for a block are allocated
920 with this flag. KEEP is 2 if we allocate a longer term temporary,
921 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
922 if we are to allocate something at an inner level to be treated as
923 a variable in the block (e.g., a SAVE_EXPR).
925 TYPE is the type that will be used for the stack slot. */
928 assign_stack_temp_for_type (mode, size, keep, type)
929 enum machine_mode mode;
936 struct temp_slot *p, *best_p = 0;
938 /* If SIZE is -1 it means that somebody tried to allocate a temporary
939 of a variable size. */
943 /* If we know the alias set for the memory that will be used, use
944 it. If there's no TYPE, then we don't know anything about the
945 alias set for the memory. */
947 alias_set = get_alias_set (type);
951 align = GET_MODE_ALIGNMENT (mode);
953 align = BIGGEST_ALIGNMENT;
956 type = type_for_mode (mode, 0);
958 align = LOCAL_ALIGNMENT (type, align);
960 /* Try to find an available, already-allocated temporary of the proper
961 mode which meets the size and alignment requirements. Choose the
962 smallest one with the closest alignment. */
963 for (p = temp_slots; p; p = p->next)
964 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
966 && (!flag_strict_aliasing
967 || (alias_set && p->alias_set == alias_set))
968 && (best_p == 0 || best_p->size > p->size
969 || (best_p->size == p->size && best_p->align > p->align)))
971 if (p->align == align && p->size == size)
979 /* Make our best, if any, the one to use. */
982 /* If there are enough aligned bytes left over, make them into a new
983 temp_slot so that the extra bytes don't get wasted. Do this only
984 for BLKmode slots, so that we can be sure of the alignment. */
985 if (GET_MODE (best_p->slot) == BLKmode
986 /* We can't split slots if -fstrict-aliasing because the
987 information about the alias set for the new slot will be
989 && !flag_strict_aliasing)
991 int alignment = best_p->align / BITS_PER_UNIT;
992 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
994 if (best_p->size - rounded_size >= alignment)
996 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
997 p->in_use = p->addr_taken = 0;
998 p->size = best_p->size - rounded_size;
999 p->base_offset = best_p->base_offset + rounded_size;
1000 p->full_size = best_p->full_size - rounded_size;
1001 p->slot = gen_rtx_MEM (BLKmode,
1002 plus_constant (XEXP (best_p->slot, 0),
1004 p->align = best_p->align;
1007 p->next = temp_slots;
1010 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
1013 best_p->size = rounded_size;
1014 best_p->full_size = rounded_size;
1021 /* If we still didn't find one, make a new temporary. */
1024 HOST_WIDE_INT frame_offset_old = frame_offset;
1026 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
1028 /* We are passing an explicit alignment request to assign_stack_local.
1029 One side effect of that is assign_stack_local will not round SIZE
1030 to ensure the frame offset remains suitably aligned.
1032 So for requests which depended on the rounding of SIZE, we go ahead
1033 and round it now. We also make sure ALIGNMENT is at least
1034 BIGGEST_ALIGNMENT. */
1035 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
1037 p->slot = assign_stack_local (mode,
1039 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
1044 p->alias_set = alias_set;
1046 /* The following slot size computation is necessary because we don't
1047 know the actual size of the temporary slot until assign_stack_local
1048 has performed all the frame alignment and size rounding for the
1049 requested temporary. Note that extra space added for alignment
1050 can be either above or below this stack slot depending on which
1051 way the frame grows. We include the extra space if and only if it
1052 is above this slot. */
1053 #ifdef FRAME_GROWS_DOWNWARD
1054 p->size = frame_offset_old - frame_offset;
1059 /* Now define the fields used by combine_temp_slots. */
1060 #ifdef FRAME_GROWS_DOWNWARD
1061 p->base_offset = frame_offset;
1062 p->full_size = frame_offset_old - frame_offset;
1064 p->base_offset = frame_offset_old;
1065 p->full_size = frame_offset - frame_offset_old;
1068 p->next = temp_slots;
1074 p->rtl_expr = sequence_rtl_expr;
1078 p->level = target_temp_slot_level;
1083 p->level = var_temp_slot_level;
1088 p->level = temp_slot_level;
1092 /* We may be reusing an old slot, so clear any MEM flags that may have been
1094 RTX_UNCHANGING_P (p->slot) = 0;
1095 MEM_IN_STRUCT_P (p->slot) = 0;
1096 MEM_SCALAR_P (p->slot) = 0;
1097 MEM_ALIAS_SET (p->slot) = 0;
1101 /* Allocate a temporary stack slot and record it for possible later
1102 reuse. First three arguments are same as in preceding function. */
1105 assign_stack_temp (mode, size, keep)
1106 enum machine_mode mode;
1110 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
1113 /* Assign a temporary of given TYPE.
1114 KEEP is as for assign_stack_temp.
1115 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
1116 it is 0 if a register is OK.
1117 DONT_PROMOTE is 1 if we should not promote values in register
1121 assign_temp (type, keep, memory_required, dont_promote)
1124 int memory_required;
1127 enum machine_mode mode = TYPE_MODE (type);
1128 int unsignedp = TREE_UNSIGNED (type);
1130 if (mode == BLKmode || memory_required)
1132 HOST_WIDE_INT size = int_size_in_bytes (type);
1135 /* Unfortunately, we don't yet know how to allocate variable-sized
1136 temporaries. However, sometimes we have a fixed upper limit on
1137 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
1138 instead. This is the case for Chill variable-sized strings. */
1139 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1140 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1141 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1142 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1144 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1145 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
1149 #ifndef PROMOTE_FOR_CALL_ONLY
1151 mode = promote_mode (type, mode, &unsignedp, 0);
1154 return gen_reg_rtx (mode);
1157 /* Combine temporary stack slots which are adjacent on the stack.
1159 This allows for better use of already allocated stack space. This is only
1160 done for BLKmode slots because we can be sure that we won't have alignment
1161 problems in this case. */
1164 combine_temp_slots ()
1166 struct temp_slot *p, *q;
1167 struct temp_slot *prev_p, *prev_q;
1170 /* We can't combine slots, because the information about which slot
1171 is in which alias set will be lost. */
1172 if (flag_strict_aliasing)
1175 /* If there are a lot of temp slots, don't do anything unless
1176 high levels of optimizaton. */
1177 if (! flag_expensive_optimizations)
1178 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1179 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1182 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1186 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1187 for (q = p->next, prev_q = p; q; q = prev_q->next)
1190 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1192 if (p->base_offset + p->full_size == q->base_offset)
1194 /* Q comes after P; combine Q into P. */
1196 p->full_size += q->full_size;
1199 else if (q->base_offset + q->full_size == p->base_offset)
1201 /* P comes after Q; combine P into Q. */
1203 q->full_size += p->full_size;
1208 /* Either delete Q or advance past it. */
1210 prev_q->next = q->next;
1214 /* Either delete P or advance past it. */
1218 prev_p->next = p->next;
1220 temp_slots = p->next;
1227 /* Find the temp slot corresponding to the object at address X. */
1229 static struct temp_slot *
1230 find_temp_slot_from_address (x)
1233 struct temp_slot *p;
1236 for (p = temp_slots; p; p = p->next)
1241 else if (XEXP (p->slot, 0) == x
1243 || (GET_CODE (x) == PLUS
1244 && XEXP (x, 0) == virtual_stack_vars_rtx
1245 && GET_CODE (XEXP (x, 1)) == CONST_INT
1246 && INTVAL (XEXP (x, 1)) >= p->base_offset
1247 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1250 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1251 for (next = p->address; next; next = XEXP (next, 1))
1252 if (XEXP (next, 0) == x)
1259 /* Indicate that NEW is an alternate way of referring to the temp slot
1260 that previously was known by OLD. */
1263 update_temp_slot_address (old, new)
1266 struct temp_slot *p = find_temp_slot_from_address (old);
1268 /* If none, return. Else add NEW as an alias. */
1271 else if (p->address == 0)
1275 if (GET_CODE (p->address) != EXPR_LIST)
1276 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1278 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1282 /* If X could be a reference to a temporary slot, mark the fact that its
1283 address was taken. */
1286 mark_temp_addr_taken (x)
1289 struct temp_slot *p;
1294 /* If X is not in memory or is at a constant address, it cannot be in
1295 a temporary slot. */
1296 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1299 p = find_temp_slot_from_address (XEXP (x, 0));
1304 /* If X could be a reference to a temporary slot, mark that slot as
1305 belonging to the to one level higher than the current level. If X
1306 matched one of our slots, just mark that one. Otherwise, we can't
1307 easily predict which it is, so upgrade all of them. Kept slots
1308 need not be touched.
1310 This is called when an ({...}) construct occurs and a statement
1311 returns a value in memory. */
1314 preserve_temp_slots (x)
1317 struct temp_slot *p = 0;
1319 /* If there is no result, we still might have some objects whose address
1320 were taken, so we need to make sure they stay around. */
1323 for (p = temp_slots; p; p = p->next)
1324 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1330 /* If X is a register that is being used as a pointer, see if we have
1331 a temporary slot we know it points to. To be consistent with
1332 the code below, we really should preserve all non-kept slots
1333 if we can't find a match, but that seems to be much too costly. */
1334 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1335 p = find_temp_slot_from_address (x);
1337 /* If X is not in memory or is at a constant address, it cannot be in
1338 a temporary slot, but it can contain something whose address was
1340 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1342 for (p = temp_slots; p; p = p->next)
1343 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1349 /* First see if we can find a match. */
1351 p = find_temp_slot_from_address (XEXP (x, 0));
1355 /* Move everything at our level whose address was taken to our new
1356 level in case we used its address. */
1357 struct temp_slot *q;
1359 if (p->level == temp_slot_level)
1361 for (q = temp_slots; q; q = q->next)
1362 if (q != p && q->addr_taken && q->level == p->level)
1371 /* Otherwise, preserve all non-kept slots at this level. */
1372 for (p = temp_slots; p; p = p->next)
1373 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1377 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1378 with that RTL_EXPR, promote it into a temporary slot at the present
1379 level so it will not be freed when we free slots made in the
1383 preserve_rtl_expr_result (x)
1386 struct temp_slot *p;
1388 /* If X is not in memory or is at a constant address, it cannot be in
1389 a temporary slot. */
1390 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1393 /* If we can find a match, move it to our level unless it is already at
1395 p = find_temp_slot_from_address (XEXP (x, 0));
1398 p->level = MIN (p->level, temp_slot_level);
1405 /* Free all temporaries used so far. This is normally called at the end
1406 of generating code for a statement. Don't free any temporaries
1407 currently in use for an RTL_EXPR that hasn't yet been emitted.
1408 We could eventually do better than this since it can be reused while
1409 generating the same RTL_EXPR, but this is complex and probably not
1415 struct temp_slot *p;
1417 for (p = temp_slots; p; p = p->next)
1418 if (p->in_use && p->level == temp_slot_level && ! p->keep
1419 && p->rtl_expr == 0)
1422 combine_temp_slots ();
1425 /* Free all temporary slots used in T, an RTL_EXPR node. */
1428 free_temps_for_rtl_expr (t)
1431 struct temp_slot *p;
1433 for (p = temp_slots; p; p = p->next)
1434 if (p->rtl_expr == t)
1436 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1437 needs to be preserved. This can happen if a temporary in
1438 the RTL_EXPR was addressed; preserve_temp_slots will move
1439 the temporary into a higher level. */
1440 if (temp_slot_level <= p->level)
1443 p->rtl_expr = NULL_TREE;
1446 combine_temp_slots ();
1449 /* Mark all temporaries ever allocated in this function as not suitable
1450 for reuse until the current level is exited. */
1453 mark_all_temps_used ()
1455 struct temp_slot *p;
1457 for (p = temp_slots; p; p = p->next)
1459 p->in_use = p->keep = 1;
1460 p->level = MIN (p->level, temp_slot_level);
1464 /* Push deeper into the nesting level for stack temporaries. */
1472 /* Likewise, but save the new level as the place to allocate variables
1476 push_temp_slots_for_block ()
1480 var_temp_slot_level = temp_slot_level;
1483 /* Likewise, but save the new level as the place to allocate temporaries
1484 for TARGET_EXPRs. */
1487 push_temp_slots_for_target ()
1491 target_temp_slot_level = temp_slot_level;
1494 /* Set and get the value of target_temp_slot_level. The only
1495 permitted use of these functions is to save and restore this value. */
1498 get_target_temp_slot_level ()
1500 return target_temp_slot_level;
1504 set_target_temp_slot_level (level)
1507 target_temp_slot_level = level;
1510 /* Pop a temporary nesting level. All slots in use in the current level
1516 struct temp_slot *p;
1518 for (p = temp_slots; p; p = p->next)
1519 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1522 combine_temp_slots ();
1527 /* Initialize temporary slots. */
1532 /* We have not allocated any temporaries yet. */
1534 temp_slot_level = 0;
1535 var_temp_slot_level = 0;
1536 target_temp_slot_level = 0;
1539 /* Retroactively move an auto variable from a register to a stack slot.
1540 This is done when an address-reference to the variable is seen. */
1543 put_var_into_stack (decl)
1547 enum machine_mode promoted_mode, decl_mode;
1548 struct function *function = 0;
1550 int can_use_addressof;
1552 context = decl_function_context (decl);
1554 /* Get the current rtl used for this object and its original mode. */
1555 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1557 /* No need to do anything if decl has no rtx yet
1558 since in that case caller is setting TREE_ADDRESSABLE
1559 and a stack slot will be assigned when the rtl is made. */
1563 /* Get the declared mode for this object. */
1564 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1565 : DECL_MODE (decl));
1566 /* Get the mode it's actually stored in. */
1567 promoted_mode = GET_MODE (reg);
1569 /* If this variable comes from an outer function,
1570 find that function's saved context. */
1571 if (context != current_function_decl && context != inline_function_decl)
1572 for (function = outer_function_chain; function; function = function->next)
1573 if (function->decl == context)
1576 /* If this is a variable-size object with a pseudo to address it,
1577 put that pseudo into the stack, if the var is nonlocal. */
1578 if (DECL_NONLOCAL (decl)
1579 && GET_CODE (reg) == MEM
1580 && GET_CODE (XEXP (reg, 0)) == REG
1581 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1583 reg = XEXP (reg, 0);
1584 decl_mode = promoted_mode = GET_MODE (reg);
1590 /* FIXME make it work for promoted modes too */
1591 && decl_mode == promoted_mode
1592 #ifdef NON_SAVING_SETJMP
1593 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1597 /* If we can't use ADDRESSOF, make sure we see through one we already
1599 if (! can_use_addressof && GET_CODE (reg) == MEM
1600 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1601 reg = XEXP (XEXP (reg, 0), 0);
1603 /* Now we should have a value that resides in one or more pseudo regs. */
1605 if (GET_CODE (reg) == REG)
1607 /* If this variable lives in the current function and we don't need
1608 to put things in the stack for the sake of setjmp, try to keep it
1609 in a register until we know we actually need the address. */
1610 if (can_use_addressof)
1611 gen_mem_addressof (reg, decl);
1613 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1614 promoted_mode, decl_mode,
1615 TREE_SIDE_EFFECTS (decl), 0,
1616 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1619 else if (GET_CODE (reg) == CONCAT)
1621 /* A CONCAT contains two pseudos; put them both in the stack.
1622 We do it so they end up consecutive. */
1623 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1624 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1625 #ifdef FRAME_GROWS_DOWNWARD
1626 /* Since part 0 should have a lower address, do it second. */
1627 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1628 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1629 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1631 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1632 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1633 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1636 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1637 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1638 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1640 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1641 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1642 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1646 /* Change the CONCAT into a combined MEM for both parts. */
1647 PUT_CODE (reg, MEM);
1648 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1649 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1651 /* The two parts are in memory order already.
1652 Use the lower parts address as ours. */
1653 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1654 /* Prevent sharing of rtl that might lose. */
1655 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1656 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1661 if (current_function_check_memory_usage)
1662 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1663 XEXP (reg, 0), Pmode,
1664 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1665 TYPE_MODE (sizetype),
1666 GEN_INT (MEMORY_USE_RW),
1667 TYPE_MODE (integer_type_node));
1670 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1671 into the stack frame of FUNCTION (0 means the current function).
1672 DECL_MODE is the machine mode of the user-level data type.
1673 PROMOTED_MODE is the machine mode of the register.
1674 VOLATILE_P is nonzero if this is for a "volatile" decl.
1675 USED_P is nonzero if this reg might have already been used in an insn. */
1678 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1679 original_regno, used_p, ht)
1680 struct function *function;
1683 enum machine_mode promoted_mode, decl_mode;
1687 struct hash_table *ht;
1690 int regno = original_regno;
1693 regno = REGNO (reg);
1697 if (regno < function->max_parm_reg)
1698 new = function->parm_reg_stack_loc[regno];
1700 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1705 if (regno < max_parm_reg)
1706 new = parm_reg_stack_loc[regno];
1708 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1711 PUT_MODE (reg, decl_mode);
1712 XEXP (reg, 0) = XEXP (new, 0);
1713 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1714 MEM_VOLATILE_P (reg) = volatile_p;
1715 PUT_CODE (reg, MEM);
1717 /* If this is a memory ref that contains aggregate components,
1718 mark it as such for cse and loop optimize. If we are reusing a
1719 previously generated stack slot, then we need to copy the bit in
1720 case it was set for other reasons. For instance, it is set for
1721 __builtin_va_alist. */
1722 MEM_SET_IN_STRUCT_P (reg,
1723 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1724 MEM_ALIAS_SET (reg) = get_alias_set (type);
1726 /* Now make sure that all refs to the variable, previously made
1727 when it was a register, are fixed up to be valid again. */
1729 if (used_p && function != 0)
1731 struct var_refs_queue *temp;
1733 /* Variable is inherited; fix it up when we get back to its function. */
1734 push_obstacks (function->function_obstack,
1735 function->function_maybepermanent_obstack);
1737 /* See comment in restore_tree_status in tree.c for why this needs to be
1738 on saveable obstack. */
1740 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1741 temp->modified = reg;
1742 temp->promoted_mode = promoted_mode;
1743 temp->unsignedp = TREE_UNSIGNED (type);
1744 temp->next = function->fixup_var_refs_queue;
1745 function->fixup_var_refs_queue = temp;
1749 /* Variable is local; fix it up now. */
1750 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1754 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1756 enum machine_mode promoted_mode;
1758 struct hash_table *ht;
1761 rtx first_insn = get_insns ();
1762 struct sequence_stack *stack = sequence_stack;
1763 tree rtl_exps = rtl_expr_chain;
1765 /* Must scan all insns for stack-refs that exceed the limit. */
1766 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1768 /* If there's a hash table, it must record all uses of VAR. */
1772 /* Scan all pending sequences too. */
1773 for (; stack; stack = stack->next)
1775 push_to_sequence (stack->first);
1776 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1777 stack->first, stack->next != 0, 0);
1778 /* Update remembered end of sequence
1779 in case we added an insn at the end. */
1780 stack->last = get_last_insn ();
1784 /* Scan all waiting RTL_EXPRs too. */
1785 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1787 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1788 if (seq != const0_rtx && seq != 0)
1790 push_to_sequence (seq);
1791 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1797 /* Scan the catch clauses for exception handling too. */
1798 push_to_sequence (catch_clauses);
1799 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1804 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1805 some part of an insn. Return a struct fixup_replacement whose OLD
1806 value is equal to X. Allocate a new structure if no such entry exists. */
1808 static struct fixup_replacement *
1809 find_fixup_replacement (replacements, x)
1810 struct fixup_replacement **replacements;
1813 struct fixup_replacement *p;
1815 /* See if we have already replaced this. */
1816 for (p = *replacements; p && p->old != x; p = p->next)
1821 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1824 p->next = *replacements;
1831 /* Scan the insn-chain starting with INSN for refs to VAR
1832 and fix them up. TOPLEVEL is nonzero if this chain is the
1833 main chain of insns for the current function. */
1836 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1838 enum machine_mode promoted_mode;
1842 struct hash_table *ht;
1845 rtx insn_list = NULL_RTX;
1847 /* If we already know which INSNs reference VAR there's no need
1848 to walk the entire instruction chain. */
1851 insn_list = ((struct insns_for_mem_entry *)
1852 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1853 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1854 insn_list = XEXP (insn_list, 1);
1859 rtx next = NEXT_INSN (insn);
1860 rtx set, prev, prev_set;
1863 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1865 /* If this is a CLOBBER of VAR, delete it.
1867 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1868 and REG_RETVAL notes too. */
1869 if (GET_CODE (PATTERN (insn)) == CLOBBER
1870 && (XEXP (PATTERN (insn), 0) == var
1871 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1872 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1873 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1875 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1876 /* The REG_LIBCALL note will go away since we are going to
1877 turn INSN into a NOTE, so just delete the
1878 corresponding REG_RETVAL note. */
1879 remove_note (XEXP (note, 0),
1880 find_reg_note (XEXP (note, 0), REG_RETVAL,
1883 /* In unoptimized compilation, we shouldn't call delete_insn
1884 except in jump.c doing warnings. */
1885 PUT_CODE (insn, NOTE);
1886 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1887 NOTE_SOURCE_FILE (insn) = 0;
1890 /* The insn to load VAR from a home in the arglist
1891 is now a no-op. When we see it, just delete it.
1892 Similarly if this is storing VAR from a register from which
1893 it was loaded in the previous insn. This will occur
1894 when an ADDRESSOF was made for an arglist slot. */
1896 && (set = single_set (insn)) != 0
1897 && SET_DEST (set) == var
1898 /* If this represents the result of an insn group,
1899 don't delete the insn. */
1900 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1901 && (rtx_equal_p (SET_SRC (set), var)
1902 || (GET_CODE (SET_SRC (set)) == REG
1903 && (prev = prev_nonnote_insn (insn)) != 0
1904 && (prev_set = single_set (prev)) != 0
1905 && SET_DEST (prev_set) == SET_SRC (set)
1906 && rtx_equal_p (SET_SRC (prev_set), var))))
1908 /* In unoptimized compilation, we shouldn't call delete_insn
1909 except in jump.c doing warnings. */
1910 PUT_CODE (insn, NOTE);
1911 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1912 NOTE_SOURCE_FILE (insn) = 0;
1913 if (insn == last_parm_insn)
1914 last_parm_insn = PREV_INSN (next);
1918 struct fixup_replacement *replacements = 0;
1919 rtx next_insn = NEXT_INSN (insn);
1921 if (SMALL_REGISTER_CLASSES)
1923 /* If the insn that copies the results of a CALL_INSN
1924 into a pseudo now references VAR, we have to use an
1925 intermediate pseudo since we want the life of the
1926 return value register to be only a single insn.
1928 If we don't use an intermediate pseudo, such things as
1929 address computations to make the address of VAR valid
1930 if it is not can be placed between the CALL_INSN and INSN.
1932 To make sure this doesn't happen, we record the destination
1933 of the CALL_INSN and see if the next insn uses both that
1936 if (call_dest != 0 && GET_CODE (insn) == INSN
1937 && reg_mentioned_p (var, PATTERN (insn))
1938 && reg_mentioned_p (call_dest, PATTERN (insn)))
1940 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1942 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1944 PATTERN (insn) = replace_rtx (PATTERN (insn),
1948 if (GET_CODE (insn) == CALL_INSN
1949 && GET_CODE (PATTERN (insn)) == SET)
1950 call_dest = SET_DEST (PATTERN (insn));
1951 else if (GET_CODE (insn) == CALL_INSN
1952 && GET_CODE (PATTERN (insn)) == PARALLEL
1953 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1954 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1959 /* See if we have to do anything to INSN now that VAR is in
1960 memory. If it needs to be loaded into a pseudo, use a single
1961 pseudo for the entire insn in case there is a MATCH_DUP
1962 between two operands. We pass a pointer to the head of
1963 a list of struct fixup_replacements. If fixup_var_refs_1
1964 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1965 it will record them in this list.
1967 If it allocated a pseudo for any replacement, we copy into
1970 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1973 /* If this is last_parm_insn, and any instructions were output
1974 after it to fix it up, then we must set last_parm_insn to
1975 the last such instruction emitted. */
1976 if (insn == last_parm_insn)
1977 last_parm_insn = PREV_INSN (next_insn);
1979 while (replacements)
1981 if (GET_CODE (replacements->new) == REG)
1986 /* OLD might be a (subreg (mem)). */
1987 if (GET_CODE (replacements->old) == SUBREG)
1989 = fixup_memory_subreg (replacements->old, insn, 0);
1992 = fixup_stack_1 (replacements->old, insn);
1994 insert_before = insn;
1996 /* If we are changing the mode, do a conversion.
1997 This might be wasteful, but combine.c will
1998 eliminate much of the waste. */
2000 if (GET_MODE (replacements->new)
2001 != GET_MODE (replacements->old))
2004 convert_move (replacements->new,
2005 replacements->old, unsignedp);
2006 seq = gen_sequence ();
2010 seq = gen_move_insn (replacements->new,
2013 emit_insn_before (seq, insert_before);
2016 replacements = replacements->next;
2020 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
2021 But don't touch other insns referred to by reg-notes;
2022 we will get them elsewhere. */
2023 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2024 if (GET_CODE (note) != INSN_LIST)
2026 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
2033 insn = XEXP (insn_list, 0);
2034 insn_list = XEXP (insn_list, 1);
2041 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
2042 See if the rtx expression at *LOC in INSN needs to be changed.
2044 REPLACEMENTS is a pointer to a list head that starts out zero, but may
2045 contain a list of original rtx's and replacements. If we find that we need
2046 to modify this insn by replacing a memory reference with a pseudo or by
2047 making a new MEM to implement a SUBREG, we consult that list to see if
2048 we have already chosen a replacement. If none has already been allocated,
2049 we allocate it and update the list. fixup_var_refs_insns will copy VAR
2050 or the SUBREG, as appropriate, to the pseudo. */
2053 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
2055 enum machine_mode promoted_mode;
2058 struct fixup_replacement **replacements;
2061 register rtx x = *loc;
2062 RTX_CODE code = GET_CODE (x);
2064 register rtx tem, tem1;
2065 struct fixup_replacement *replacement;
2070 if (XEXP (x, 0) == var)
2072 /* Prevent sharing of rtl that might lose. */
2073 rtx sub = copy_rtx (XEXP (var, 0));
2075 if (! validate_change (insn, loc, sub, 0))
2077 rtx y = gen_reg_rtx (GET_MODE (sub));
2080 /* We should be able to replace with a register or all is lost.
2081 Note that we can't use validate_change to verify this, since
2082 we're not caring for replacing all dups simultaneously. */
2083 if (! validate_replace_rtx (*loc, y, insn))
2086 /* Careful! First try to recognize a direct move of the
2087 value, mimicking how things are done in gen_reload wrt
2088 PLUS. Consider what happens when insn is a conditional
2089 move instruction and addsi3 clobbers flags. */
2092 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
2093 seq = gen_sequence ();
2096 if (recog_memoized (new_insn) < 0)
2098 /* That failed. Fall back on force_operand and hope. */
2101 force_operand (sub, y);
2102 seq = gen_sequence ();
2107 /* Don't separate setter from user. */
2108 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
2109 insn = PREV_INSN (insn);
2112 emit_insn_before (seq, insn);
2120 /* If we already have a replacement, use it. Otherwise,
2121 try to fix up this address in case it is invalid. */
2123 replacement = find_fixup_replacement (replacements, var);
2124 if (replacement->new)
2126 *loc = replacement->new;
2130 *loc = replacement->new = x = fixup_stack_1 (x, insn);
2132 /* Unless we are forcing memory to register or we changed the mode,
2133 we can leave things the way they are if the insn is valid. */
2135 INSN_CODE (insn) = -1;
2136 if (! flag_force_mem && GET_MODE (x) == promoted_mode
2137 && recog_memoized (insn) >= 0)
2140 *loc = replacement->new = gen_reg_rtx (promoted_mode);
2144 /* If X contains VAR, we need to unshare it here so that we update
2145 each occurrence separately. But all identical MEMs in one insn
2146 must be replaced with the same rtx because of the possibility of
2149 if (reg_mentioned_p (var, x))
2151 replacement = find_fixup_replacement (replacements, x);
2152 if (replacement->new == 0)
2153 replacement->new = copy_most_rtx (x, var);
2155 *loc = x = replacement->new;
2171 /* Note that in some cases those types of expressions are altered
2172 by optimize_bit_field, and do not survive to get here. */
2173 if (XEXP (x, 0) == var
2174 || (GET_CODE (XEXP (x, 0)) == SUBREG
2175 && SUBREG_REG (XEXP (x, 0)) == var))
2177 /* Get TEM as a valid MEM in the mode presently in the insn.
2179 We don't worry about the possibility of MATCH_DUP here; it
2180 is highly unlikely and would be tricky to handle. */
2183 if (GET_CODE (tem) == SUBREG)
2185 if (GET_MODE_BITSIZE (GET_MODE (tem))
2186 > GET_MODE_BITSIZE (GET_MODE (var)))
2188 replacement = find_fixup_replacement (replacements, var);
2189 if (replacement->new == 0)
2190 replacement->new = gen_reg_rtx (GET_MODE (var));
2191 SUBREG_REG (tem) = replacement->new;
2194 tem = fixup_memory_subreg (tem, insn, 0);
2197 tem = fixup_stack_1 (tem, insn);
2199 /* Unless we want to load from memory, get TEM into the proper mode
2200 for an extract from memory. This can only be done if the
2201 extract is at a constant position and length. */
2203 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2204 && GET_CODE (XEXP (x, 2)) == CONST_INT
2205 && ! mode_dependent_address_p (XEXP (tem, 0))
2206 && ! MEM_VOLATILE_P (tem))
2208 enum machine_mode wanted_mode = VOIDmode;
2209 enum machine_mode is_mode = GET_MODE (tem);
2210 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2213 if (GET_CODE (x) == ZERO_EXTRACT)
2215 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2216 if (wanted_mode == VOIDmode)
2217 wanted_mode = word_mode;
2221 if (GET_CODE (x) == SIGN_EXTRACT)
2223 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2224 if (wanted_mode == VOIDmode)
2225 wanted_mode = word_mode;
2228 /* If we have a narrower mode, we can do something. */
2229 if (wanted_mode != VOIDmode
2230 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2232 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2233 rtx old_pos = XEXP (x, 2);
2236 /* If the bytes and bits are counted differently, we
2237 must adjust the offset. */
2238 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2239 offset = (GET_MODE_SIZE (is_mode)
2240 - GET_MODE_SIZE (wanted_mode) - offset);
2242 pos %= GET_MODE_BITSIZE (wanted_mode);
2244 newmem = gen_rtx_MEM (wanted_mode,
2245 plus_constant (XEXP (tem, 0), offset));
2246 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2247 MEM_COPY_ATTRIBUTES (newmem, tem);
2249 /* Make the change and see if the insn remains valid. */
2250 INSN_CODE (insn) = -1;
2251 XEXP (x, 0) = newmem;
2252 XEXP (x, 2) = GEN_INT (pos);
2254 if (recog_memoized (insn) >= 0)
2257 /* Otherwise, restore old position. XEXP (x, 0) will be
2259 XEXP (x, 2) = old_pos;
2263 /* If we get here, the bitfield extract insn can't accept a memory
2264 reference. Copy the input into a register. */
2266 tem1 = gen_reg_rtx (GET_MODE (tem));
2267 emit_insn_before (gen_move_insn (tem1, tem), insn);
2274 if (SUBREG_REG (x) == var)
2276 /* If this is a special SUBREG made because VAR was promoted
2277 from a wider mode, replace it with VAR and call ourself
2278 recursively, this time saying that the object previously
2279 had its current mode (by virtue of the SUBREG). */
2281 if (SUBREG_PROMOTED_VAR_P (x))
2284 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2288 /* If this SUBREG makes VAR wider, it has become a paradoxical
2289 SUBREG with VAR in memory, but these aren't allowed at this
2290 stage of the compilation. So load VAR into a pseudo and take
2291 a SUBREG of that pseudo. */
2292 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2294 replacement = find_fixup_replacement (replacements, var);
2295 if (replacement->new == 0)
2296 replacement->new = gen_reg_rtx (GET_MODE (var));
2297 SUBREG_REG (x) = replacement->new;
2301 /* See if we have already found a replacement for this SUBREG.
2302 If so, use it. Otherwise, make a MEM and see if the insn
2303 is recognized. If not, or if we should force MEM into a register,
2304 make a pseudo for this SUBREG. */
2305 replacement = find_fixup_replacement (replacements, x);
2306 if (replacement->new)
2308 *loc = replacement->new;
2312 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2314 INSN_CODE (insn) = -1;
2315 if (! flag_force_mem && recog_memoized (insn) >= 0)
2318 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2324 /* First do special simplification of bit-field references. */
2325 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2326 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2327 optimize_bit_field (x, insn, 0);
2328 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2329 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2330 optimize_bit_field (x, insn, NULL_PTR);
2332 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2333 into a register and then store it back out. */
2334 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2335 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2336 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2337 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2338 > GET_MODE_SIZE (GET_MODE (var))))
2340 replacement = find_fixup_replacement (replacements, var);
2341 if (replacement->new == 0)
2342 replacement->new = gen_reg_rtx (GET_MODE (var));
2344 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2345 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2348 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2349 insn into a pseudo and store the low part of the pseudo into VAR. */
2350 if (GET_CODE (SET_DEST (x)) == SUBREG
2351 && SUBREG_REG (SET_DEST (x)) == var
2352 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2353 > GET_MODE_SIZE (GET_MODE (var))))
2355 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2356 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2363 rtx dest = SET_DEST (x);
2364 rtx src = SET_SRC (x);
2366 rtx outerdest = dest;
2369 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2370 || GET_CODE (dest) == SIGN_EXTRACT
2371 || GET_CODE (dest) == ZERO_EXTRACT)
2372 dest = XEXP (dest, 0);
2374 if (GET_CODE (src) == SUBREG)
2375 src = XEXP (src, 0);
2377 /* If VAR does not appear at the top level of the SET
2378 just scan the lower levels of the tree. */
2380 if (src != var && dest != var)
2383 /* We will need to rerecognize this insn. */
2384 INSN_CODE (insn) = -1;
2387 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2389 /* Since this case will return, ensure we fixup all the
2391 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2392 insn, replacements);
2393 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2394 insn, replacements);
2395 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2396 insn, replacements);
2398 tem = XEXP (outerdest, 0);
2400 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2401 that may appear inside a ZERO_EXTRACT.
2402 This was legitimate when the MEM was a REG. */
2403 if (GET_CODE (tem) == SUBREG
2404 && SUBREG_REG (tem) == var)
2405 tem = fixup_memory_subreg (tem, insn, 0);
2407 tem = fixup_stack_1 (tem, insn);
2409 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2410 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2411 && ! mode_dependent_address_p (XEXP (tem, 0))
2412 && ! MEM_VOLATILE_P (tem))
2414 enum machine_mode wanted_mode;
2415 enum machine_mode is_mode = GET_MODE (tem);
2416 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2418 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2419 if (wanted_mode == VOIDmode)
2420 wanted_mode = word_mode;
2422 /* If we have a narrower mode, we can do something. */
2423 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2425 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2426 rtx old_pos = XEXP (outerdest, 2);
2429 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2430 offset = (GET_MODE_SIZE (is_mode)
2431 - GET_MODE_SIZE (wanted_mode) - offset);
2433 pos %= GET_MODE_BITSIZE (wanted_mode);
2435 newmem = gen_rtx_MEM (wanted_mode,
2436 plus_constant (XEXP (tem, 0), offset));
2437 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2438 MEM_COPY_ATTRIBUTES (newmem, tem);
2440 /* Make the change and see if the insn remains valid. */
2441 INSN_CODE (insn) = -1;
2442 XEXP (outerdest, 0) = newmem;
2443 XEXP (outerdest, 2) = GEN_INT (pos);
2445 if (recog_memoized (insn) >= 0)
2448 /* Otherwise, restore old position. XEXP (x, 0) will be
2450 XEXP (outerdest, 2) = old_pos;
2454 /* If we get here, the bit-field store doesn't allow memory
2455 or isn't located at a constant position. Load the value into
2456 a register, do the store, and put it back into memory. */
2458 tem1 = gen_reg_rtx (GET_MODE (tem));
2459 emit_insn_before (gen_move_insn (tem1, tem), insn);
2460 emit_insn_after (gen_move_insn (tem, tem1), insn);
2461 XEXP (outerdest, 0) = tem1;
2466 /* STRICT_LOW_PART is a no-op on memory references
2467 and it can cause combinations to be unrecognizable,
2470 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2471 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2473 /* A valid insn to copy VAR into or out of a register
2474 must be left alone, to avoid an infinite loop here.
2475 If the reference to VAR is by a subreg, fix that up,
2476 since SUBREG is not valid for a memref.
2477 Also fix up the address of the stack slot.
2479 Note that we must not try to recognize the insn until
2480 after we know that we have valid addresses and no
2481 (subreg (mem ...) ...) constructs, since these interfere
2482 with determining the validity of the insn. */
2484 if ((SET_SRC (x) == var
2485 || (GET_CODE (SET_SRC (x)) == SUBREG
2486 && SUBREG_REG (SET_SRC (x)) == var))
2487 && (GET_CODE (SET_DEST (x)) == REG
2488 || (GET_CODE (SET_DEST (x)) == SUBREG
2489 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2490 && GET_MODE (var) == promoted_mode
2491 && x == single_set (insn))
2495 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2496 if (replacement->new)
2497 SET_SRC (x) = replacement->new;
2498 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2499 SET_SRC (x) = replacement->new
2500 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2502 SET_SRC (x) = replacement->new
2503 = fixup_stack_1 (SET_SRC (x), insn);
2505 if (recog_memoized (insn) >= 0)
2508 /* INSN is not valid, but we know that we want to
2509 copy SET_SRC (x) to SET_DEST (x) in some way. So
2510 we generate the move and see whether it requires more
2511 than one insn. If it does, we emit those insns and
2512 delete INSN. Otherwise, we an just replace the pattern
2513 of INSN; we have already verified above that INSN has
2514 no other function that to do X. */
2516 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2517 if (GET_CODE (pat) == SEQUENCE)
2519 emit_insn_after (pat, insn);
2520 PUT_CODE (insn, NOTE);
2521 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2522 NOTE_SOURCE_FILE (insn) = 0;
2525 PATTERN (insn) = pat;
2530 if ((SET_DEST (x) == var
2531 || (GET_CODE (SET_DEST (x)) == SUBREG
2532 && SUBREG_REG (SET_DEST (x)) == var))
2533 && (GET_CODE (SET_SRC (x)) == REG
2534 || (GET_CODE (SET_SRC (x)) == SUBREG
2535 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2536 && GET_MODE (var) == promoted_mode
2537 && x == single_set (insn))
2541 if (GET_CODE (SET_DEST (x)) == SUBREG)
2542 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2544 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2546 if (recog_memoized (insn) >= 0)
2549 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2550 if (GET_CODE (pat) == SEQUENCE)
2552 emit_insn_after (pat, insn);
2553 PUT_CODE (insn, NOTE);
2554 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2555 NOTE_SOURCE_FILE (insn) = 0;
2558 PATTERN (insn) = pat;
2563 /* Otherwise, storing into VAR must be handled specially
2564 by storing into a temporary and copying that into VAR
2565 with a new insn after this one. Note that this case
2566 will be used when storing into a promoted scalar since
2567 the insn will now have different modes on the input
2568 and output and hence will be invalid (except for the case
2569 of setting it to a constant, which does not need any
2570 change if it is valid). We generate extra code in that case,
2571 but combine.c will eliminate it. */
2576 rtx fixeddest = SET_DEST (x);
2578 /* STRICT_LOW_PART can be discarded, around a MEM. */
2579 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2580 fixeddest = XEXP (fixeddest, 0);
2581 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2582 if (GET_CODE (fixeddest) == SUBREG)
2584 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2585 promoted_mode = GET_MODE (fixeddest);
2588 fixeddest = fixup_stack_1 (fixeddest, insn);
2590 temp = gen_reg_rtx (promoted_mode);
2592 emit_insn_after (gen_move_insn (fixeddest,
2593 gen_lowpart (GET_MODE (fixeddest),
2597 SET_DEST (x) = temp;
2605 /* Nothing special about this RTX; fix its operands. */
2607 fmt = GET_RTX_FORMAT (code);
2608 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2611 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2615 for (j = 0; j < XVECLEN (x, i); j++)
2616 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2617 insn, replacements);
2622 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2623 return an rtx (MEM:m1 newaddr) which is equivalent.
2624 If any insns must be emitted to compute NEWADDR, put them before INSN.
2626 UNCRITICAL nonzero means accept paradoxical subregs.
2627 This is used for subregs found inside REG_NOTES. */
2630 fixup_memory_subreg (x, insn, uncritical)
2635 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2636 rtx addr = XEXP (SUBREG_REG (x), 0);
2637 enum machine_mode mode = GET_MODE (x);
2640 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2641 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2645 if (BYTES_BIG_ENDIAN)
2646 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2647 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2648 addr = plus_constant (addr, offset);
2649 if (!flag_force_addr && memory_address_p (mode, addr))
2650 /* Shortcut if no insns need be emitted. */
2651 return change_address (SUBREG_REG (x), mode, addr);
2653 result = change_address (SUBREG_REG (x), mode, addr);
2654 emit_insn_before (gen_sequence (), insn);
2659 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2660 Replace subexpressions of X in place.
2661 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2662 Otherwise return X, with its contents possibly altered.
2664 If any insns must be emitted to compute NEWADDR, put them before INSN.
2666 UNCRITICAL is as in fixup_memory_subreg. */
2669 walk_fixup_memory_subreg (x, insn, uncritical)
2674 register enum rtx_code code;
2681 code = GET_CODE (x);
2683 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2684 return fixup_memory_subreg (x, insn, uncritical);
2686 /* Nothing special about this RTX; fix its operands. */
2688 fmt = GET_RTX_FORMAT (code);
2689 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2692 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2696 for (j = 0; j < XVECLEN (x, i); j++)
2698 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2704 /* For each memory ref within X, if it refers to a stack slot
2705 with an out of range displacement, put the address in a temp register
2706 (emitting new insns before INSN to load these registers)
2707 and alter the memory ref to use that register.
2708 Replace each such MEM rtx with a copy, to avoid clobberage. */
2711 fixup_stack_1 (x, insn)
2716 register RTX_CODE code = GET_CODE (x);
2721 register rtx ad = XEXP (x, 0);
2722 /* If we have address of a stack slot but it's not valid
2723 (displacement is too large), compute the sum in a register. */
2724 if (GET_CODE (ad) == PLUS
2725 && GET_CODE (XEXP (ad, 0)) == REG
2726 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2727 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2728 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2729 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2730 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2732 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2733 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2734 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2735 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2738 if (memory_address_p (GET_MODE (x), ad))
2742 temp = copy_to_reg (ad);
2743 seq = gen_sequence ();
2745 emit_insn_before (seq, insn);
2746 return change_address (x, VOIDmode, temp);
2751 fmt = GET_RTX_FORMAT (code);
2752 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2755 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2759 for (j = 0; j < XVECLEN (x, i); j++)
2760 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2766 /* Optimization: a bit-field instruction whose field
2767 happens to be a byte or halfword in memory
2768 can be changed to a move instruction.
2770 We call here when INSN is an insn to examine or store into a bit-field.
2771 BODY is the SET-rtx to be altered.
2773 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2774 (Currently this is called only from function.c, and EQUIV_MEM
2778 optimize_bit_field (body, insn, equiv_mem)
2783 register rtx bitfield;
2786 enum machine_mode mode;
2788 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2789 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2790 bitfield = SET_DEST (body), destflag = 1;
2792 bitfield = SET_SRC (body), destflag = 0;
2794 /* First check that the field being stored has constant size and position
2795 and is in fact a byte or halfword suitably aligned. */
2797 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2798 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2799 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2801 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2803 register rtx memref = 0;
2805 /* Now check that the containing word is memory, not a register,
2806 and that it is safe to change the machine mode. */
2808 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2809 memref = XEXP (bitfield, 0);
2810 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2812 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2813 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2814 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2815 memref = SUBREG_REG (XEXP (bitfield, 0));
2816 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2818 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2819 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2822 && ! mode_dependent_address_p (XEXP (memref, 0))
2823 && ! MEM_VOLATILE_P (memref))
2825 /* Now adjust the address, first for any subreg'ing
2826 that we are now getting rid of,
2827 and then for which byte of the word is wanted. */
2829 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2832 /* Adjust OFFSET to count bits from low-address byte. */
2833 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2834 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2835 - offset - INTVAL (XEXP (bitfield, 1)));
2837 /* Adjust OFFSET to count bytes from low-address byte. */
2838 offset /= BITS_PER_UNIT;
2839 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2841 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2842 if (BYTES_BIG_ENDIAN)
2843 offset -= (MIN (UNITS_PER_WORD,
2844 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2845 - MIN (UNITS_PER_WORD,
2846 GET_MODE_SIZE (GET_MODE (memref))));
2850 memref = change_address (memref, mode,
2851 plus_constant (XEXP (memref, 0), offset));
2852 insns = get_insns ();
2854 emit_insns_before (insns, insn);
2856 /* Store this memory reference where
2857 we found the bit field reference. */
2861 validate_change (insn, &SET_DEST (body), memref, 1);
2862 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2864 rtx src = SET_SRC (body);
2865 while (GET_CODE (src) == SUBREG
2866 && SUBREG_WORD (src) == 0)
2867 src = SUBREG_REG (src);
2868 if (GET_MODE (src) != GET_MODE (memref))
2869 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2870 validate_change (insn, &SET_SRC (body), src, 1);
2872 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2873 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2874 /* This shouldn't happen because anything that didn't have
2875 one of these modes should have got converted explicitly
2876 and then referenced through a subreg.
2877 This is so because the original bit-field was
2878 handled by agg_mode and so its tree structure had
2879 the same mode that memref now has. */
2884 rtx dest = SET_DEST (body);
2886 while (GET_CODE (dest) == SUBREG
2887 && SUBREG_WORD (dest) == 0
2888 && (GET_MODE_CLASS (GET_MODE (dest))
2889 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2890 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2892 dest = SUBREG_REG (dest);
2894 validate_change (insn, &SET_DEST (body), dest, 1);
2896 if (GET_MODE (dest) == GET_MODE (memref))
2897 validate_change (insn, &SET_SRC (body), memref, 1);
2900 /* Convert the mem ref to the destination mode. */
2901 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2904 convert_move (newreg, memref,
2905 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2909 validate_change (insn, &SET_SRC (body), newreg, 1);
2913 /* See if we can convert this extraction or insertion into
2914 a simple move insn. We might not be able to do so if this
2915 was, for example, part of a PARALLEL.
2917 If we succeed, write out any needed conversions. If we fail,
2918 it is hard to guess why we failed, so don't do anything
2919 special; just let the optimization be suppressed. */
2921 if (apply_change_group () && seq)
2922 emit_insns_before (seq, insn);
2927 /* These routines are responsible for converting virtual register references
2928 to the actual hard register references once RTL generation is complete.
2930 The following four variables are used for communication between the
2931 routines. They contain the offsets of the virtual registers from their
2932 respective hard registers. */
2934 static int in_arg_offset;
2935 static int var_offset;
2936 static int dynamic_offset;
2937 static int out_arg_offset;
2938 static int cfa_offset;
2940 /* In most machines, the stack pointer register is equivalent to the bottom
2943 #ifndef STACK_POINTER_OFFSET
2944 #define STACK_POINTER_OFFSET 0
2947 /* If not defined, pick an appropriate default for the offset of dynamically
2948 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2949 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2951 #ifndef STACK_DYNAMIC_OFFSET
2953 #ifdef ACCUMULATE_OUTGOING_ARGS
2954 /* The bottom of the stack points to the actual arguments. If
2955 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2956 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2957 stack space for register parameters is not pushed by the caller, but
2958 rather part of the fixed stack areas and hence not included in
2959 `current_function_outgoing_args_size'. Nevertheless, we must allow
2960 for it when allocating stack dynamic objects. */
2962 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2963 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2964 (current_function_outgoing_args_size \
2965 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2968 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2969 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2973 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2977 /* On a few machines, the CFA coincides with the arg pointer. */
2979 #ifndef ARG_POINTER_CFA_OFFSET
2980 #define ARG_POINTER_CFA_OFFSET 0
2984 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2985 its address taken. DECL is the decl for the object stored in the
2986 register, for later use if we do need to force REG into the stack.
2987 REG is overwritten by the MEM like in put_reg_into_stack. */
2990 gen_mem_addressof (reg, decl)
2994 tree type = TREE_TYPE (decl);
2995 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2996 SET_ADDRESSOF_DECL (r, decl);
2997 /* If the original REG was a user-variable, then so is the REG whose
2998 address is being taken. */
2999 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
3002 PUT_CODE (reg, MEM);
3003 PUT_MODE (reg, DECL_MODE (decl));
3004 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
3005 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
3006 MEM_ALIAS_SET (reg) = get_alias_set (decl);
3008 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
3009 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
3014 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
3017 flush_addressof (decl)
3020 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
3021 && DECL_RTL (decl) != 0
3022 && GET_CODE (DECL_RTL (decl)) == MEM
3023 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
3024 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
3025 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
3028 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
3031 put_addressof_into_stack (r, ht)
3033 struct hash_table *ht;
3035 tree decl = ADDRESSOF_DECL (r);
3036 rtx reg = XEXP (r, 0);
3038 if (GET_CODE (reg) != REG)
3041 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
3042 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
3043 ADDRESSOF_REGNO (r),
3044 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
3047 /* List of replacements made below in purge_addressof_1 when creating
3048 bitfield insertions. */
3049 static rtx purge_bitfield_addressof_replacements;
3051 /* List of replacements made below in purge_addressof_1 for patterns
3052 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3053 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3054 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3055 enough in complex cases, e.g. when some field values can be
3056 extracted by usage MEM with narrower mode. */
3057 static rtx purge_addressof_replacements;
3059 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3060 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3061 the stack. If the function returns FALSE then the replacement could not
3065 purge_addressof_1 (loc, insn, force, store, ht)
3069 struct hash_table *ht;
3075 boolean result = true;
3077 /* Re-start here to avoid recursion in common cases. */
3084 code = GET_CODE (x);
3086 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
3089 /* We must create a copy of the rtx because it was created by
3090 overwriting a REG rtx which is always shared. */
3091 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3093 if (validate_change (insn, loc, sub, 0)
3094 || validate_replace_rtx (x, sub, insn))
3098 sub = force_operand (sub, NULL_RTX);
3099 if (! validate_change (insn, loc, sub, 0)
3100 && ! validate_replace_rtx (x, sub, insn))
3103 insns = gen_sequence ();
3105 emit_insn_before (insns, insn);
3108 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3110 rtx sub = XEXP (XEXP (x, 0), 0);
3113 if (GET_CODE (sub) == MEM)
3115 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3116 MEM_COPY_ATTRIBUTES (sub2, sub);
3117 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
3121 if (GET_CODE (sub) == REG
3122 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3124 put_addressof_into_stack (XEXP (x, 0), ht);
3127 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3129 int size_x, size_sub;
3133 /* When processing REG_NOTES look at the list of
3134 replacements done on the insn to find the register that X
3138 for (tem = purge_bitfield_addressof_replacements;
3140 tem = XEXP (XEXP (tem, 1), 1))
3141 if (rtx_equal_p (x, XEXP (tem, 0)))
3143 *loc = XEXP (XEXP (tem, 1), 0);
3147 /* See comment for purge_addressof_replacements. */
3148 for (tem = purge_addressof_replacements;
3150 tem = XEXP (XEXP (tem, 1), 1))
3151 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3153 rtx z = XEXP (XEXP (tem, 1), 0);
3155 if (GET_MODE (x) == GET_MODE (z)
3156 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3157 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3160 /* It can happen that the note may speak of things
3161 in a wider (or just different) mode than the
3162 code did. This is especially true of
3165 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3168 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3169 && (GET_MODE_SIZE (GET_MODE (x))
3170 > GET_MODE_SIZE (GET_MODE (z))))
3172 /* This can occur as a result in invalid
3173 pointer casts, e.g. float f; ...
3174 *(long long int *)&f.
3175 ??? We could emit a warning here, but
3176 without a line number that wouldn't be
3178 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3181 z = gen_lowpart (GET_MODE (x), z);
3187 /* Sometimes we may not be able to find the replacement. For
3188 example when the original insn was a MEM in a wider mode,
3189 and the note is part of a sign extension of a narrowed
3190 version of that MEM. Gcc testcase compile/990829-1.c can
3191 generate an example of this siutation. Rather than complain
3192 we return false, which will prompt our caller to remove the
3197 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3198 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3200 /* Don't even consider working with paradoxical subregs,
3201 or the moral equivalent seen here. */
3202 if (size_x <= size_sub
3203 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3205 /* Do a bitfield insertion to mirror what would happen
3212 rtx p = PREV_INSN (insn);
3215 val = gen_reg_rtx (GET_MODE (x));
3216 if (! validate_change (insn, loc, val, 0))
3218 /* Discard the current sequence and put the
3219 ADDRESSOF on stack. */
3223 seq = gen_sequence ();
3225 emit_insn_before (seq, insn);
3226 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3230 store_bit_field (sub, size_x, 0, GET_MODE (x),
3231 val, GET_MODE_SIZE (GET_MODE (sub)),
3232 GET_MODE_SIZE (GET_MODE (sub)));
3234 /* Make sure to unshare any shared rtl that store_bit_field
3235 might have created. */
3236 unshare_all_rtl_again (get_insns ());
3238 seq = gen_sequence ();
3240 p = emit_insn_after (seq, insn);
3241 if (NEXT_INSN (insn))
3242 compute_insns_for_mem (NEXT_INSN (insn),
3243 p ? NEXT_INSN (p) : NULL_RTX,
3248 rtx p = PREV_INSN (insn);
3251 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3252 GET_MODE (x), GET_MODE (x),
3253 GET_MODE_SIZE (GET_MODE (sub)),
3254 GET_MODE_SIZE (GET_MODE (sub)));
3256 if (! validate_change (insn, loc, val, 0))
3258 /* Discard the current sequence and put the
3259 ADDRESSOF on stack. */
3264 seq = gen_sequence ();
3266 emit_insn_before (seq, insn);
3267 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3271 /* Remember the replacement so that the same one can be done
3272 on the REG_NOTES. */
3273 purge_bitfield_addressof_replacements
3274 = gen_rtx_EXPR_LIST (VOIDmode, x,
3277 purge_bitfield_addressof_replacements));
3279 /* We replaced with a reg -- all done. */
3283 else if (validate_change (insn, loc, sub, 0))
3285 /* Remember the replacement so that the same one can be done
3286 on the REG_NOTES. */
3287 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3291 for (tem = purge_addressof_replacements;
3293 tem = XEXP (XEXP (tem, 1), 1))
3294 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3296 XEXP (XEXP (tem, 1), 0) = sub;
3299 purge_addressof_replacements
3300 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3301 gen_rtx_EXPR_LIST (VOIDmode, sub,
3302 purge_addressof_replacements));
3308 /* else give up and put it into the stack */
3310 else if (code == ADDRESSOF)
3312 put_addressof_into_stack (x, ht);
3315 else if (code == SET)
3317 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3318 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3322 /* Scan all subexpressions. */
3323 fmt = GET_RTX_FORMAT (code);
3324 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3327 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3328 else if (*fmt == 'E')
3329 for (j = 0; j < XVECLEN (x, i); j++)
3330 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3336 /* Return a new hash table entry in HT. */
3338 static struct hash_entry *
3339 insns_for_mem_newfunc (he, ht, k)
3340 struct hash_entry *he;
3341 struct hash_table *ht;
3342 hash_table_key k ATTRIBUTE_UNUSED;
3344 struct insns_for_mem_entry *ifmhe;
3348 ifmhe = ((struct insns_for_mem_entry *)
3349 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3350 ifmhe->insns = NULL_RTX;
3355 /* Return a hash value for K, a REG. */
3357 static unsigned long
3358 insns_for_mem_hash (k)
3361 /* K is really a RTX. Just use the address as the hash value. */
3362 return (unsigned long) k;
3365 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3368 insns_for_mem_comp (k1, k2)
3375 struct insns_for_mem_walk_info {
3376 /* The hash table that we are using to record which INSNs use which
3378 struct hash_table *ht;
3380 /* The INSN we are currently proessing. */
3383 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3384 to find the insns that use the REGs in the ADDRESSOFs. */
3388 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3389 that might be used in an ADDRESSOF expression, record this INSN in
3390 the hash table given by DATA (which is really a pointer to an
3391 insns_for_mem_walk_info structure). */
3394 insns_for_mem_walk (r, data)
3398 struct insns_for_mem_walk_info *ifmwi
3399 = (struct insns_for_mem_walk_info *) data;
3401 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3402 && GET_CODE (XEXP (*r, 0)) == REG)
3403 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3404 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3406 /* Lookup this MEM in the hashtable, creating it if necessary. */
3407 struct insns_for_mem_entry *ifme
3408 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3413 /* If we have not already recorded this INSN, do so now. Since
3414 we process the INSNs in order, we know that if we have
3415 recorded it it must be at the front of the list. */
3416 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3418 /* We do the allocation on the same obstack as is used for
3419 the hash table since this memory will not be used once
3420 the hash table is deallocated. */
3421 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3422 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3431 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3432 which REGs in HT. */
3435 compute_insns_for_mem (insns, last_insn, ht)
3438 struct hash_table *ht;
3441 struct insns_for_mem_walk_info ifmwi;
3444 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3445 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3446 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3449 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3453 /* Helper function for purge_addressof called through for_each_rtx.
3454 Returns true iff the rtl is an ADDRESSOF. */
3456 is_addressof (rtl, data)
3458 void * data ATTRIBUTE_UNUSED;
3460 return GET_CODE (* rtl) == ADDRESSOF;
3463 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3464 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3468 purge_addressof (insns)
3472 struct hash_table ht;
3474 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3475 requires a fixup pass over the instruction stream to correct
3476 INSNs that depended on the REG being a REG, and not a MEM. But,
3477 these fixup passes are slow. Furthermore, more MEMs are not
3478 mentioned in very many instructions. So, we speed up the process
3479 by pre-calculating which REGs occur in which INSNs; that allows
3480 us to perform the fixup passes much more quickly. */
3481 hash_table_init (&ht,
3482 insns_for_mem_newfunc,
3484 insns_for_mem_comp);
3485 compute_insns_for_mem (insns, NULL_RTX, &ht);
3487 for (insn = insns; insn; insn = NEXT_INSN (insn))
3488 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3489 || GET_CODE (insn) == CALL_INSN)
3491 if (! purge_addressof_1 (&PATTERN (insn), insn,
3492 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3493 /* If we could not replace the ADDRESSOFs in the insn,
3494 something is wrong. */
3497 if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht))
3499 /* If we could not replace the ADDRESSOFs in the insn's notes,
3500 we can just remove the offending notes instead. */
3503 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3505 /* If we find a REG_RETVAL note then the insn is a libcall.
3506 Such insns must have REG_EQUAL notes as well, in order
3507 for later passes of the compiler to work. So it is not
3508 safe to delete the notes here, and instead we abort. */
3509 if (REG_NOTE_KIND (note) == REG_RETVAL)
3511 if (for_each_rtx (& note, is_addressof, NULL))
3512 remove_note (insn, note);
3518 hash_table_free (&ht);
3519 purge_bitfield_addressof_replacements = 0;
3520 purge_addressof_replacements = 0;
3522 /* REGs are shared. purge_addressof will destructively replace a REG
3523 with a MEM, which creates shared MEMs.
3525 Unfortunately, the children of put_reg_into_stack assume that MEMs
3526 referring to the same stack slot are shared (fixup_var_refs and
3527 the associated hash table code).
3529 So, we have to do another unsharing pass after we have flushed any
3530 REGs that had their address taken into the stack.
3532 It may be worth tracking whether or not we converted any REGs into
3533 MEMs to avoid this overhead when it is not needed. */
3534 unshare_all_rtl_again (get_insns ());
3537 /* Pass through the INSNS of function FNDECL and convert virtual register
3538 references to hard register references. */
3541 instantiate_virtual_regs (fndecl, insns)
3548 /* Compute the offsets to use for this function. */
3549 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3550 var_offset = STARTING_FRAME_OFFSET;
3551 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3552 out_arg_offset = STACK_POINTER_OFFSET;
3553 cfa_offset = ARG_POINTER_CFA_OFFSET;
3555 /* Scan all variables and parameters of this function. For each that is
3556 in memory, instantiate all virtual registers if the result is a valid
3557 address. If not, we do it later. That will handle most uses of virtual
3558 regs on many machines. */
3559 instantiate_decls (fndecl, 1);
3561 /* Initialize recognition, indicating that volatile is OK. */
3564 /* Scan through all the insns, instantiating every virtual register still
3566 for (insn = insns; insn; insn = NEXT_INSN (insn))
3567 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3568 || GET_CODE (insn) == CALL_INSN)
3570 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3571 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
3574 /* Instantiate the stack slots for the parm registers, for later use in
3575 addressof elimination. */
3576 for (i = 0; i < max_parm_reg; ++i)
3577 if (parm_reg_stack_loc[i])
3578 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3580 /* Now instantiate the remaining register equivalences for debugging info.
3581 These will not be valid addresses. */
3582 instantiate_decls (fndecl, 0);
3584 /* Indicate that, from now on, assign_stack_local should use
3585 frame_pointer_rtx. */
3586 virtuals_instantiated = 1;
3589 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3590 all virtual registers in their DECL_RTL's.
3592 If VALID_ONLY, do this only if the resulting address is still valid.
3593 Otherwise, always do it. */
3596 instantiate_decls (fndecl, valid_only)
3602 if (DECL_SAVED_INSNS (fndecl))
3603 /* When compiling an inline function, the obstack used for
3604 rtl allocation is the maybepermanent_obstack. Calling
3605 `resume_temporary_allocation' switches us back to that
3606 obstack while we process this function's parameters. */
3607 resume_temporary_allocation ();
3609 /* Process all parameters of the function. */
3610 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3612 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3614 instantiate_decl (DECL_RTL (decl), size, valid_only);
3616 /* If the parameter was promoted, then the incoming RTL mode may be
3617 larger than the declared type size. We must use the larger of
3619 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3620 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3623 /* Now process all variables defined in the function or its subblocks. */
3624 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3626 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3628 /* Save all rtl allocated for this function by raising the
3629 high-water mark on the maybepermanent_obstack. */
3631 /* All further rtl allocation is now done in the current_obstack. */
3632 rtl_in_current_obstack ();
3636 /* Subroutine of instantiate_decls: Process all decls in the given
3637 BLOCK node and all its subblocks. */
3640 instantiate_decls_1 (let, valid_only)
3646 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3647 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3650 /* Process all subblocks. */
3651 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3652 instantiate_decls_1 (t, valid_only);
3655 /* Subroutine of the preceding procedures: Given RTL representing a
3656 decl and the size of the object, do any instantiation required.
3658 If VALID_ONLY is non-zero, it means that the RTL should only be
3659 changed if the new address is valid. */
3662 instantiate_decl (x, size, valid_only)
3667 enum machine_mode mode;
3670 /* If this is not a MEM, no need to do anything. Similarly if the
3671 address is a constant or a register that is not a virtual register. */
3673 if (x == 0 || GET_CODE (x) != MEM)
3677 if (CONSTANT_P (addr)
3678 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3679 || (GET_CODE (addr) == REG
3680 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3681 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3684 /* If we should only do this if the address is valid, copy the address.
3685 We need to do this so we can undo any changes that might make the
3686 address invalid. This copy is unfortunate, but probably can't be
3690 addr = copy_rtx (addr);
3692 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3696 /* Now verify that the resulting address is valid for every integer or
3697 floating-point mode up to and including SIZE bytes long. We do this
3698 since the object might be accessed in any mode and frame addresses
3701 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3702 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3703 mode = GET_MODE_WIDER_MODE (mode))
3704 if (! memory_address_p (mode, addr))
3707 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3708 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3709 mode = GET_MODE_WIDER_MODE (mode))
3710 if (! memory_address_p (mode, addr))
3714 /* Put back the address now that we have updated it and we either know
3715 it is valid or we don't care whether it is valid. */
3720 /* Given a pointer to a piece of rtx and an optional pointer to the
3721 containing object, instantiate any virtual registers present in it.
3723 If EXTRA_INSNS, we always do the replacement and generate
3724 any extra insns before OBJECT. If it zero, we do nothing if replacement
3727 Return 1 if we either had nothing to do or if we were able to do the
3728 needed replacement. Return 0 otherwise; we only return zero if
3729 EXTRA_INSNS is zero.
3731 We first try some simple transformations to avoid the creation of extra
3735 instantiate_virtual_regs_1 (loc, object, extra_insns)
3743 HOST_WIDE_INT offset = 0;
3749 /* Re-start here to avoid recursion in common cases. */
3756 code = GET_CODE (x);
3758 /* Check for some special cases. */
3775 /* We are allowed to set the virtual registers. This means that
3776 the actual register should receive the source minus the
3777 appropriate offset. This is used, for example, in the handling
3778 of non-local gotos. */
3779 if (SET_DEST (x) == virtual_incoming_args_rtx)
3780 new = arg_pointer_rtx, offset = - in_arg_offset;
3781 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3782 new = frame_pointer_rtx, offset = - var_offset;
3783 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3784 new = stack_pointer_rtx, offset = - dynamic_offset;
3785 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3786 new = stack_pointer_rtx, offset = - out_arg_offset;
3787 else if (SET_DEST (x) == virtual_cfa_rtx)
3788 new = arg_pointer_rtx, offset = - cfa_offset;
3792 /* The only valid sources here are PLUS or REG. Just do
3793 the simplest possible thing to handle them. */
3794 if (GET_CODE (SET_SRC (x)) != REG
3795 && GET_CODE (SET_SRC (x)) != PLUS)
3799 if (GET_CODE (SET_SRC (x)) != REG)
3800 temp = force_operand (SET_SRC (x), NULL_RTX);
3803 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3807 emit_insns_before (seq, object);
3810 if (! validate_change (object, &SET_SRC (x), temp, 0)
3817 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3822 /* Handle special case of virtual register plus constant. */
3823 if (CONSTANT_P (XEXP (x, 1)))
3825 rtx old, new_offset;
3827 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3828 if (GET_CODE (XEXP (x, 0)) == PLUS)
3830 rtx inner = XEXP (XEXP (x, 0), 0);
3832 if (inner == virtual_incoming_args_rtx)
3833 new = arg_pointer_rtx, offset = in_arg_offset;
3834 else if (inner == virtual_stack_vars_rtx)
3835 new = frame_pointer_rtx, offset = var_offset;
3836 else if (inner == virtual_stack_dynamic_rtx)
3837 new = stack_pointer_rtx, offset = dynamic_offset;
3838 else if (inner == virtual_outgoing_args_rtx)
3839 new = stack_pointer_rtx, offset = out_arg_offset;
3840 else if (inner == virtual_cfa_rtx)
3841 new = arg_pointer_rtx, offset = cfa_offset;
3848 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3850 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3853 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3854 new = arg_pointer_rtx, offset = in_arg_offset;
3855 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3856 new = frame_pointer_rtx, offset = var_offset;
3857 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3858 new = stack_pointer_rtx, offset = dynamic_offset;
3859 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3860 new = stack_pointer_rtx, offset = out_arg_offset;
3861 else if (XEXP (x, 0) == virtual_cfa_rtx)
3862 new = arg_pointer_rtx, offset = cfa_offset;
3865 /* We know the second operand is a constant. Unless the
3866 first operand is a REG (which has been already checked),
3867 it needs to be checked. */
3868 if (GET_CODE (XEXP (x, 0)) != REG)
3876 new_offset = plus_constant (XEXP (x, 1), offset);
3878 /* If the new constant is zero, try to replace the sum with just
3880 if (new_offset == const0_rtx
3881 && validate_change (object, loc, new, 0))
3884 /* Next try to replace the register and new offset.
3885 There are two changes to validate here and we can't assume that
3886 in the case of old offset equals new just changing the register
3887 will yield a valid insn. In the interests of a little efficiency,
3888 however, we only call validate change once (we don't queue up the
3889 changes and then call apply_change_group). */
3893 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3894 : (XEXP (x, 0) = new,
3895 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3903 /* Otherwise copy the new constant into a register and replace
3904 constant with that register. */
3905 temp = gen_reg_rtx (Pmode);
3907 if (validate_change (object, &XEXP (x, 1), temp, 0))
3908 emit_insn_before (gen_move_insn (temp, new_offset), object);
3911 /* If that didn't work, replace this expression with a
3912 register containing the sum. */
3915 new = gen_rtx_PLUS (Pmode, new, new_offset);
3918 temp = force_operand (new, NULL_RTX);
3922 emit_insns_before (seq, object);
3923 if (! validate_change (object, loc, temp, 0)
3924 && ! validate_replace_rtx (x, temp, object))
3932 /* Fall through to generic two-operand expression case. */
3938 case DIV: case UDIV:
3939 case MOD: case UMOD:
3940 case AND: case IOR: case XOR:
3941 case ROTATERT: case ROTATE:
3942 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3944 case GE: case GT: case GEU: case GTU:
3945 case LE: case LT: case LEU: case LTU:
3946 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3947 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3952 /* Most cases of MEM that convert to valid addresses have already been
3953 handled by our scan of decls. The only special handling we
3954 need here is to make a copy of the rtx to ensure it isn't being
3955 shared if we have to change it to a pseudo.
3957 If the rtx is a simple reference to an address via a virtual register,
3958 it can potentially be shared. In such cases, first try to make it
3959 a valid address, which can also be shared. Otherwise, copy it and
3962 First check for common cases that need no processing. These are
3963 usually due to instantiation already being done on a previous instance
3967 if (CONSTANT_ADDRESS_P (temp)
3968 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3969 || temp == arg_pointer_rtx
3971 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3972 || temp == hard_frame_pointer_rtx
3974 || temp == frame_pointer_rtx)
3977 if (GET_CODE (temp) == PLUS
3978 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3979 && (XEXP (temp, 0) == frame_pointer_rtx
3980 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3981 || XEXP (temp, 0) == hard_frame_pointer_rtx
3983 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3984 || XEXP (temp, 0) == arg_pointer_rtx
3989 if (temp == virtual_stack_vars_rtx
3990 || temp == virtual_incoming_args_rtx
3991 || (GET_CODE (temp) == PLUS
3992 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3993 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3994 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3996 /* This MEM may be shared. If the substitution can be done without
3997 the need to generate new pseudos, we want to do it in place
3998 so all copies of the shared rtx benefit. The call below will
3999 only make substitutions if the resulting address is still
4002 Note that we cannot pass X as the object in the recursive call
4003 since the insn being processed may not allow all valid
4004 addresses. However, if we were not passed on object, we can
4005 only modify X without copying it if X will have a valid
4008 ??? Also note that this can still lose if OBJECT is an insn that
4009 has less restrictions on an address that some other insn.
4010 In that case, we will modify the shared address. This case
4011 doesn't seem very likely, though. One case where this could
4012 happen is in the case of a USE or CLOBBER reference, but we
4013 take care of that below. */
4015 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4016 object ? object : x, 0))
4019 /* Otherwise make a copy and process that copy. We copy the entire
4020 RTL expression since it might be a PLUS which could also be
4022 *loc = x = copy_rtx (x);
4025 /* Fall through to generic unary operation case. */
4027 case STRICT_LOW_PART:
4029 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4030 case SIGN_EXTEND: case ZERO_EXTEND:
4031 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4032 case FLOAT: case FIX:
4033 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4037 /* These case either have just one operand or we know that we need not
4038 check the rest of the operands. */
4044 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4045 go ahead and make the invalid one, but do it to a copy. For a REG,
4046 just make the recursive call, since there's no chance of a problem. */
4048 if ((GET_CODE (XEXP (x, 0)) == MEM
4049 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4051 || (GET_CODE (XEXP (x, 0)) == REG
4052 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4055 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4060 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4061 in front of this insn and substitute the temporary. */
4062 if (x == virtual_incoming_args_rtx)
4063 new = arg_pointer_rtx, offset = in_arg_offset;
4064 else if (x == virtual_stack_vars_rtx)
4065 new = frame_pointer_rtx, offset = var_offset;
4066 else if (x == virtual_stack_dynamic_rtx)
4067 new = stack_pointer_rtx, offset = dynamic_offset;
4068 else if (x == virtual_outgoing_args_rtx)
4069 new = stack_pointer_rtx, offset = out_arg_offset;
4070 else if (x == virtual_cfa_rtx)
4071 new = arg_pointer_rtx, offset = cfa_offset;
4075 temp = plus_constant (new, offset);
4076 if (!validate_change (object, loc, temp, 0))
4082 temp = force_operand (temp, NULL_RTX);
4086 emit_insns_before (seq, object);
4087 if (! validate_change (object, loc, temp, 0)
4088 && ! validate_replace_rtx (x, temp, object))
4096 if (GET_CODE (XEXP (x, 0)) == REG)
4099 else if (GET_CODE (XEXP (x, 0)) == MEM)
4101 /* If we have a (addressof (mem ..)), do any instantiation inside
4102 since we know we'll be making the inside valid when we finally
4103 remove the ADDRESSOF. */
4104 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4113 /* Scan all subexpressions. */
4114 fmt = GET_RTX_FORMAT (code);
4115 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4118 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4121 else if (*fmt == 'E')
4122 for (j = 0; j < XVECLEN (x, i); j++)
4123 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4130 /* Optimization: assuming this function does not receive nonlocal gotos,
4131 delete the handlers for such, as well as the insns to establish
4132 and disestablish them. */
4138 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4140 /* Delete the handler by turning off the flag that would
4141 prevent jump_optimize from deleting it.
4142 Also permit deletion of the nonlocal labels themselves
4143 if nothing local refers to them. */
4144 if (GET_CODE (insn) == CODE_LABEL)
4148 LABEL_PRESERVE_P (insn) = 0;
4150 /* Remove it from the nonlocal_label list, to avoid confusing
4152 for (t = nonlocal_labels, last_t = 0; t;
4153 last_t = t, t = TREE_CHAIN (t))
4154 if (DECL_RTL (TREE_VALUE (t)) == insn)
4159 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4161 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4164 if (GET_CODE (insn) == INSN)
4168 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4169 if (reg_mentioned_p (t, PATTERN (insn)))
4175 || (nonlocal_goto_stack_level != 0
4176 && reg_mentioned_p (nonlocal_goto_stack_level,
4183 /* Output a USE for any register use in RTL.
4184 This is used with -noreg to mark the extent of lifespan
4185 of any registers used in a user-visible variable's DECL_RTL. */
4191 if (GET_CODE (rtl) == REG)
4192 /* This is a register variable. */
4193 emit_insn (gen_rtx_USE (VOIDmode, rtl));
4194 else if (GET_CODE (rtl) == MEM
4195 && GET_CODE (XEXP (rtl, 0)) == REG
4196 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4197 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4198 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4199 /* This is a variable-sized structure. */
4200 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
4203 /* Like use_variable except that it outputs the USEs after INSN
4204 instead of at the end of the insn-chain. */
4207 use_variable_after (rtl, insn)
4210 if (GET_CODE (rtl) == REG)
4211 /* This is a register variable. */
4212 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
4213 else if (GET_CODE (rtl) == MEM
4214 && GET_CODE (XEXP (rtl, 0)) == REG
4215 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4216 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4217 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4218 /* This is a variable-sized structure. */
4219 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
4225 return max_parm_reg;
4228 /* Return the first insn following those generated by `assign_parms'. */
4231 get_first_nonparm_insn ()
4234 return NEXT_INSN (last_parm_insn);
4235 return get_insns ();
4238 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4239 Crash if there is none. */
4242 get_first_block_beg ()
4244 register rtx searcher;
4245 register rtx insn = get_first_nonparm_insn ();
4247 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4248 if (GET_CODE (searcher) == NOTE
4249 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4252 abort (); /* Invalid call to this function. (See comments above.) */
4256 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4257 This means a type for which function calls must pass an address to the
4258 function or get an address back from the function.
4259 EXP may be a type node or an expression (whose type is tested). */
4262 aggregate_value_p (exp)
4265 int i, regno, nregs;
4268 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
4271 type = TREE_TYPE (exp);
4273 if (RETURN_IN_MEMORY (type))
4275 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4276 and thus can't be returned in registers. */
4277 if (TREE_ADDRESSABLE (type))
4279 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4281 /* Make sure we have suitable call-clobbered regs to return
4282 the value in; if not, we must return it in memory. */
4283 reg = hard_function_value (type, 0);
4285 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4287 if (GET_CODE (reg) != REG)
4290 regno = REGNO (reg);
4291 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4292 for (i = 0; i < nregs; i++)
4293 if (! call_used_regs[regno + i])
4298 /* Assign RTL expressions to the function's parameters.
4299 This may involve copying them into registers and using
4300 those registers as the RTL for them.
4302 If SECOND_TIME is non-zero it means that this function is being
4303 called a second time. This is done by integrate.c when a function's
4304 compilation is deferred. We need to come back here in case the
4305 FUNCTION_ARG macro computes items needed for the rest of the compilation
4306 (such as changing which registers are fixed or caller-saved). But suppress
4307 writing any insns or setting DECL_RTL of anything in this case. */
4310 assign_parms (fndecl, second_time)
4315 register rtx entry_parm = 0;
4316 register rtx stack_parm = 0;
4317 CUMULATIVE_ARGS args_so_far;
4318 enum machine_mode promoted_mode, passed_mode;
4319 enum machine_mode nominal_mode, promoted_nominal_mode;
4321 /* Total space needed so far for args on the stack,
4322 given as a constant and a tree-expression. */
4323 struct args_size stack_args_size;
4324 tree fntype = TREE_TYPE (fndecl);
4325 tree fnargs = DECL_ARGUMENTS (fndecl);
4326 /* This is used for the arg pointer when referring to stack args. */
4327 rtx internal_arg_pointer;
4328 /* This is a dummy PARM_DECL that we used for the function result if
4329 the function returns a structure. */
4330 tree function_result_decl = 0;
4331 #ifdef SETUP_INCOMING_VARARGS
4332 int varargs_setup = 0;
4334 rtx conversion_insns = 0;
4336 /* Nonzero if the last arg is named `__builtin_va_alist',
4337 which is used on some machines for old-fashioned non-ANSI varargs.h;
4338 this should be stuck onto the stack as if it had arrived there. */
4340 = (current_function_varargs
4342 && (parm = tree_last (fnargs)) != 0
4344 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4345 "__builtin_va_alist")));
4347 /* Nonzero if function takes extra anonymous args.
4348 This means the last named arg must be on the stack
4349 right before the anonymous ones. */
4351 = (TYPE_ARG_TYPES (fntype) != 0
4352 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4353 != void_type_node));
4355 current_function_stdarg = stdarg;
4357 /* If the reg that the virtual arg pointer will be translated into is
4358 not a fixed reg or is the stack pointer, make a copy of the virtual
4359 arg pointer, and address parms via the copy. The frame pointer is
4360 considered fixed even though it is not marked as such.
4362 The second time through, simply use ap to avoid generating rtx. */
4364 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4365 || ! (fixed_regs[ARG_POINTER_REGNUM]
4366 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
4368 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4370 internal_arg_pointer = virtual_incoming_args_rtx;
4371 current_function_internal_arg_pointer = internal_arg_pointer;
4373 stack_args_size.constant = 0;
4374 stack_args_size.var = 0;
4376 /* If struct value address is treated as the first argument, make it so. */
4377 if (aggregate_value_p (DECL_RESULT (fndecl))
4378 && ! current_function_returns_pcc_struct
4379 && struct_value_incoming_rtx == 0)
4381 tree type = build_pointer_type (TREE_TYPE (fntype));
4383 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4385 DECL_ARG_TYPE (function_result_decl) = type;
4386 TREE_CHAIN (function_result_decl) = fnargs;
4387 fnargs = function_result_decl;
4390 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4391 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4392 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
4394 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4395 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4397 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4400 /* We haven't yet found an argument that we must push and pretend the
4402 current_function_pretend_args_size = 0;
4404 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4406 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4407 struct args_size stack_offset;
4408 struct args_size arg_size;
4409 int passed_pointer = 0;
4410 int did_conversion = 0;
4411 tree passed_type = DECL_ARG_TYPE (parm);
4412 tree nominal_type = TREE_TYPE (parm);
4415 /* Set LAST_NAMED if this is last named arg before some
4417 int last_named = ((TREE_CHAIN (parm) == 0
4418 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4419 && (stdarg || current_function_varargs));
4420 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4421 most machines, if this is a varargs/stdarg function, then we treat
4422 the last named arg as if it were anonymous too. */
4423 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4425 if (TREE_TYPE (parm) == error_mark_node
4426 /* This can happen after weird syntax errors
4427 or if an enum type is defined among the parms. */
4428 || TREE_CODE (parm) != PARM_DECL
4429 || passed_type == NULL)
4431 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4432 = gen_rtx_MEM (BLKmode, const0_rtx);
4433 TREE_USED (parm) = 1;
4437 /* For varargs.h function, save info about regs and stack space
4438 used by the individual args, not including the va_alist arg. */
4439 if (hide_last_arg && last_named)
4440 current_function_args_info = args_so_far;
4442 /* Find mode of arg as it is passed, and mode of arg
4443 as it should be during execution of this function. */
4444 passed_mode = TYPE_MODE (passed_type);
4445 nominal_mode = TYPE_MODE (nominal_type);
4447 /* If the parm's mode is VOID, its value doesn't matter,
4448 and avoid the usual things like emit_move_insn that could crash. */
4449 if (nominal_mode == VOIDmode)
4451 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4455 /* If the parm is to be passed as a transparent union, use the
4456 type of the first field for the tests below. We have already
4457 verified that the modes are the same. */
4458 if (DECL_TRANSPARENT_UNION (parm)
4459 || TYPE_TRANSPARENT_UNION (passed_type))
4460 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4462 /* See if this arg was passed by invisible reference. It is if
4463 it is an object whose size depends on the contents of the
4464 object itself or if the machine requires these objects be passed
4467 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4468 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4469 || TREE_ADDRESSABLE (passed_type)
4470 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4471 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4472 passed_type, named_arg)
4476 passed_type = nominal_type = build_pointer_type (passed_type);
4478 passed_mode = nominal_mode = Pmode;
4481 promoted_mode = passed_mode;
4483 #ifdef PROMOTE_FUNCTION_ARGS
4484 /* Compute the mode in which the arg is actually extended to. */
4485 unsignedp = TREE_UNSIGNED (passed_type);
4486 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4489 /* Let machine desc say which reg (if any) the parm arrives in.
4490 0 means it arrives on the stack. */
4491 #ifdef FUNCTION_INCOMING_ARG
4492 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4493 passed_type, named_arg);
4495 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4496 passed_type, named_arg);
4499 if (entry_parm == 0)
4500 promoted_mode = passed_mode;
4502 #ifdef SETUP_INCOMING_VARARGS
4503 /* If this is the last named parameter, do any required setup for
4504 varargs or stdargs. We need to know about the case of this being an
4505 addressable type, in which case we skip the registers it
4506 would have arrived in.
4508 For stdargs, LAST_NAMED will be set for two parameters, the one that
4509 is actually the last named, and the dummy parameter. We only
4510 want to do this action once.
4512 Also, indicate when RTL generation is to be suppressed. */
4513 if (last_named && !varargs_setup)
4515 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4516 current_function_pretend_args_size,
4522 /* Determine parm's home in the stack,
4523 in case it arrives in the stack or we should pretend it did.
4525 Compute the stack position and rtx where the argument arrives
4528 There is one complexity here: If this was a parameter that would
4529 have been passed in registers, but wasn't only because it is
4530 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4531 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4532 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4533 0 as it was the previous time. */
4535 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4536 locate_and_pad_parm (promoted_mode, passed_type,
4537 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4540 #ifdef FUNCTION_INCOMING_ARG
4541 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4543 pretend_named) != 0,
4545 FUNCTION_ARG (args_so_far, promoted_mode,
4547 pretend_named) != 0,
4550 fndecl, &stack_args_size, &stack_offset, &arg_size);
4554 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4556 if (offset_rtx == const0_rtx)
4557 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4559 stack_parm = gen_rtx_MEM (promoted_mode,
4560 gen_rtx_PLUS (Pmode,
4561 internal_arg_pointer,
4564 /* If this is a memory ref that contains aggregate components,
4565 mark it as such for cse and loop optimize. Likewise if it
4567 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4568 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4569 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4572 /* If this parameter was passed both in registers and in the stack,
4573 use the copy on the stack. */
4574 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4577 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4578 /* If this parm was passed part in regs and part in memory,
4579 pretend it arrived entirely in memory
4580 by pushing the register-part onto the stack.
4582 In the special case of a DImode or DFmode that is split,
4583 we could put it together in a pseudoreg directly,
4584 but for now that's not worth bothering with. */
4588 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4589 passed_type, named_arg);
4593 current_function_pretend_args_size
4594 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4595 / (PARM_BOUNDARY / BITS_PER_UNIT)
4596 * (PARM_BOUNDARY / BITS_PER_UNIT));
4600 /* Handle calls that pass values in multiple non-contiguous
4601 locations. The Irix 6 ABI has examples of this. */
4602 if (GET_CODE (entry_parm) == PARALLEL)
4603 emit_group_store (validize_mem (stack_parm), entry_parm,
4604 int_size_in_bytes (TREE_TYPE (parm)),
4605 (TYPE_ALIGN (TREE_TYPE (parm))
4608 move_block_from_reg (REGNO (entry_parm),
4609 validize_mem (stack_parm), nregs,
4610 int_size_in_bytes (TREE_TYPE (parm)));
4612 entry_parm = stack_parm;
4617 /* If we didn't decide this parm came in a register,
4618 by default it came on the stack. */
4619 if (entry_parm == 0)
4620 entry_parm = stack_parm;
4622 /* Record permanently how this parm was passed. */
4624 DECL_INCOMING_RTL (parm) = entry_parm;
4626 /* If there is actually space on the stack for this parm,
4627 count it in stack_args_size; otherwise set stack_parm to 0
4628 to indicate there is no preallocated stack slot for the parm. */
4630 if (entry_parm == stack_parm
4631 || (GET_CODE (entry_parm) == PARALLEL
4632 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4633 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4634 /* On some machines, even if a parm value arrives in a register
4635 there is still an (uninitialized) stack slot allocated for it.
4637 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4638 whether this parameter already has a stack slot allocated,
4639 because an arg block exists only if current_function_args_size
4640 is larger than some threshold, and we haven't calculated that
4641 yet. So, for now, we just assume that stack slots never exist
4643 || REG_PARM_STACK_SPACE (fndecl) > 0
4647 stack_args_size.constant += arg_size.constant;
4649 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4652 /* No stack slot was pushed for this parm. */
4655 /* Update info on where next arg arrives in registers. */
4657 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4658 passed_type, named_arg);
4660 /* If this is our second time through, we are done with this parm. */
4664 /* If we can't trust the parm stack slot to be aligned enough
4665 for its ultimate type, don't use that slot after entry.
4666 We'll make another stack slot, if we need one. */
4668 int thisparm_boundary
4669 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4671 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4675 /* If parm was passed in memory, and we need to convert it on entry,
4676 don't store it back in that same slot. */
4678 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4682 /* Now adjust STACK_PARM to the mode and precise location
4683 where this parameter should live during execution,
4684 if we discover that it must live in the stack during execution.
4685 To make debuggers happier on big-endian machines, we store
4686 the value in the last bytes of the space available. */
4688 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4693 if (BYTES_BIG_ENDIAN
4694 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4695 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4696 - GET_MODE_SIZE (nominal_mode));
4698 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4699 if (offset_rtx == const0_rtx)
4700 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4702 stack_parm = gen_rtx_MEM (nominal_mode,
4703 gen_rtx_PLUS (Pmode,
4704 internal_arg_pointer,
4707 /* If this is a memory ref that contains aggregate components,
4708 mark it as such for cse and loop optimize. */
4709 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4714 /* We need this "use" info, because the gcc-register->stack-register
4715 converter in reg-stack.c needs to know which registers are active
4716 at the start of the function call. The actual parameter loading
4717 instructions are not always available then anymore, since they might
4718 have been optimised away. */
4720 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4721 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4724 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4725 in the mode in which it arrives.
4726 STACK_PARM is an RTX for a stack slot where the parameter can live
4727 during the function (in case we want to put it there).
4728 STACK_PARM is 0 if no stack slot was pushed for it.
4730 Now output code if necessary to convert ENTRY_PARM to
4731 the type in which this function declares it,
4732 and store that result in an appropriate place,
4733 which may be a pseudo reg, may be STACK_PARM,
4734 or may be a local stack slot if STACK_PARM is 0.
4736 Set DECL_RTL to that place. */
4738 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4740 /* If a BLKmode arrives in registers, copy it to a stack slot.
4741 Handle calls that pass values in multiple non-contiguous
4742 locations. The Irix 6 ABI has examples of this. */
4743 if (GET_CODE (entry_parm) == REG
4744 || GET_CODE (entry_parm) == PARALLEL)
4747 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4750 /* Note that we will be storing an integral number of words.
4751 So we have to be careful to ensure that we allocate an
4752 integral number of words. We do this below in the
4753 assign_stack_local if space was not allocated in the argument
4754 list. If it was, this will not work if PARM_BOUNDARY is not
4755 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4756 if it becomes a problem. */
4758 if (stack_parm == 0)
4761 = assign_stack_local (GET_MODE (entry_parm),
4764 /* If this is a memory ref that contains aggregate
4765 components, mark it as such for cse and loop optimize. */
4766 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4769 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4772 if (TREE_READONLY (parm))
4773 RTX_UNCHANGING_P (stack_parm) = 1;
4775 /* Handle calls that pass values in multiple non-contiguous
4776 locations. The Irix 6 ABI has examples of this. */
4777 if (GET_CODE (entry_parm) == PARALLEL)
4778 emit_group_store (validize_mem (stack_parm), entry_parm,
4779 int_size_in_bytes (TREE_TYPE (parm)),
4780 (TYPE_ALIGN (TREE_TYPE (parm))
4783 move_block_from_reg (REGNO (entry_parm),
4784 validize_mem (stack_parm),
4785 size_stored / UNITS_PER_WORD,
4786 int_size_in_bytes (TREE_TYPE (parm)));
4788 DECL_RTL (parm) = stack_parm;
4790 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4791 && ! DECL_INLINE (fndecl))
4792 /* layout_decl may set this. */
4793 || TREE_ADDRESSABLE (parm)
4794 || TREE_SIDE_EFFECTS (parm)
4795 /* If -ffloat-store specified, don't put explicit
4796 float variables into registers. */
4797 || (flag_float_store
4798 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4799 /* Always assign pseudo to structure return or item passed
4800 by invisible reference. */
4801 || passed_pointer || parm == function_result_decl)
4803 /* Store the parm in a pseudoregister during the function, but we
4804 may need to do it in a wider mode. */
4806 register rtx parmreg;
4807 int regno, regnoi = 0, regnor = 0;
4809 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4811 promoted_nominal_mode
4812 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4814 parmreg = gen_reg_rtx (promoted_nominal_mode);
4815 mark_user_reg (parmreg);
4817 /* If this was an item that we received a pointer to, set DECL_RTL
4822 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4823 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4826 DECL_RTL (parm) = parmreg;
4828 /* Copy the value into the register. */
4829 if (nominal_mode != passed_mode
4830 || promoted_nominal_mode != promoted_mode)
4833 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4834 mode, by the caller. We now have to convert it to
4835 NOMINAL_MODE, if different. However, PARMREG may be in
4836 a different mode than NOMINAL_MODE if it is being stored
4839 If ENTRY_PARM is a hard register, it might be in a register
4840 not valid for operating in its mode (e.g., an odd-numbered
4841 register for a DFmode). In that case, moves are the only
4842 thing valid, so we can't do a convert from there. This
4843 occurs when the calling sequence allow such misaligned
4846 In addition, the conversion may involve a call, which could
4847 clobber parameters which haven't been copied to pseudo
4848 registers yet. Therefore, we must first copy the parm to
4849 a pseudo reg here, and save the conversion until after all
4850 parameters have been moved. */
4852 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4854 emit_move_insn (tempreg, validize_mem (entry_parm));
4856 push_to_sequence (conversion_insns);
4857 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4859 /* TREE_USED gets set erroneously during expand_assignment. */
4860 save_tree_used = TREE_USED (parm);
4861 expand_assignment (parm,
4862 make_tree (nominal_type, tempreg), 0, 0);
4863 TREE_USED (parm) = save_tree_used;
4864 conversion_insns = get_insns ();
4869 emit_move_insn (parmreg, validize_mem (entry_parm));
4871 /* If we were passed a pointer but the actual value
4872 can safely live in a register, put it in one. */
4873 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4874 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4875 && ! DECL_INLINE (fndecl))
4876 /* layout_decl may set this. */
4877 || TREE_ADDRESSABLE (parm)
4878 || TREE_SIDE_EFFECTS (parm)
4879 /* If -ffloat-store specified, don't put explicit
4880 float variables into registers. */
4881 || (flag_float_store
4882 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4884 /* We can't use nominal_mode, because it will have been set to
4885 Pmode above. We must use the actual mode of the parm. */
4886 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4887 mark_user_reg (parmreg);
4888 emit_move_insn (parmreg, DECL_RTL (parm));
4889 DECL_RTL (parm) = parmreg;
4890 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4894 #ifdef FUNCTION_ARG_CALLEE_COPIES
4895 /* If we are passed an arg by reference and it is our responsibility
4896 to make a copy, do it now.
4897 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4898 original argument, so we must recreate them in the call to
4899 FUNCTION_ARG_CALLEE_COPIES. */
4900 /* ??? Later add code to handle the case that if the argument isn't
4901 modified, don't do the copy. */
4903 else if (passed_pointer
4904 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4905 TYPE_MODE (DECL_ARG_TYPE (parm)),
4906 DECL_ARG_TYPE (parm),
4908 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4911 tree type = DECL_ARG_TYPE (parm);
4913 /* This sequence may involve a library call perhaps clobbering
4914 registers that haven't been copied to pseudos yet. */
4916 push_to_sequence (conversion_insns);
4918 if (TYPE_SIZE (type) == 0
4919 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4920 /* This is a variable sized object. */
4921 copy = gen_rtx_MEM (BLKmode,
4922 allocate_dynamic_stack_space
4923 (expr_size (parm), NULL_RTX,
4924 TYPE_ALIGN (type)));
4926 copy = assign_stack_temp (TYPE_MODE (type),
4927 int_size_in_bytes (type), 1);
4928 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4929 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4931 store_expr (parm, copy, 0);
4932 emit_move_insn (parmreg, XEXP (copy, 0));
4933 if (current_function_check_memory_usage)
4934 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4935 XEXP (copy, 0), Pmode,
4936 GEN_INT (int_size_in_bytes (type)),
4937 TYPE_MODE (sizetype),
4938 GEN_INT (MEMORY_USE_RW),
4939 TYPE_MODE (integer_type_node));
4940 conversion_insns = get_insns ();
4944 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4946 /* In any case, record the parm's desired stack location
4947 in case we later discover it must live in the stack.
4949 If it is a COMPLEX value, store the stack location for both
4952 if (GET_CODE (parmreg) == CONCAT)
4953 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4955 regno = REGNO (parmreg);
4957 if (regno >= max_parm_reg)
4960 int old_max_parm_reg = max_parm_reg;
4962 /* It's slow to expand this one register at a time,
4963 but it's also rare and we need max_parm_reg to be
4964 precisely correct. */
4965 max_parm_reg = regno + 1;
4966 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4967 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4968 old_max_parm_reg * sizeof (rtx));
4969 bzero ((char *) (new + old_max_parm_reg),
4970 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4971 parm_reg_stack_loc = new;
4974 if (GET_CODE (parmreg) == CONCAT)
4976 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4978 regnor = REGNO (gen_realpart (submode, parmreg));
4979 regnoi = REGNO (gen_imagpart (submode, parmreg));
4981 if (stack_parm != 0)
4983 parm_reg_stack_loc[regnor]
4984 = gen_realpart (submode, stack_parm);
4985 parm_reg_stack_loc[regnoi]
4986 = gen_imagpart (submode, stack_parm);
4990 parm_reg_stack_loc[regnor] = 0;
4991 parm_reg_stack_loc[regnoi] = 0;
4995 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4997 /* Mark the register as eliminable if we did no conversion
4998 and it was copied from memory at a fixed offset,
4999 and the arg pointer was not copied to a pseudo-reg.
5000 If the arg pointer is a pseudo reg or the offset formed
5001 an invalid address, such memory-equivalences
5002 as we make here would screw up life analysis for it. */
5003 if (nominal_mode == passed_mode
5006 && GET_CODE (stack_parm) == MEM
5007 && stack_offset.var == 0
5008 && reg_mentioned_p (virtual_incoming_args_rtx,
5009 XEXP (stack_parm, 0)))
5011 rtx linsn = get_last_insn ();
5014 /* Mark complex types separately. */
5015 if (GET_CODE (parmreg) == CONCAT)
5016 /* Scan backwards for the set of the real and
5018 for (sinsn = linsn; sinsn != 0;
5019 sinsn = prev_nonnote_insn (sinsn))
5021 set = single_set (sinsn);
5023 && SET_DEST (set) == regno_reg_rtx [regnoi])
5025 = gen_rtx_EXPR_LIST (REG_EQUIV,
5026 parm_reg_stack_loc[regnoi],
5029 && SET_DEST (set) == regno_reg_rtx [regnor])
5031 = gen_rtx_EXPR_LIST (REG_EQUIV,
5032 parm_reg_stack_loc[regnor],
5035 else if ((set = single_set (linsn)) != 0
5036 && SET_DEST (set) == parmreg)
5038 = gen_rtx_EXPR_LIST (REG_EQUIV,
5039 stack_parm, REG_NOTES (linsn));
5042 /* For pointer data type, suggest pointer register. */
5043 if (POINTER_TYPE_P (TREE_TYPE (parm)))
5044 mark_reg_pointer (parmreg,
5045 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
5050 /* Value must be stored in the stack slot STACK_PARM
5051 during function execution. */
5053 if (promoted_mode != nominal_mode)
5055 /* Conversion is required. */
5056 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5058 emit_move_insn (tempreg, validize_mem (entry_parm));
5060 push_to_sequence (conversion_insns);
5061 entry_parm = convert_to_mode (nominal_mode, tempreg,
5062 TREE_UNSIGNED (TREE_TYPE (parm)));
5065 /* ??? This may need a big-endian conversion on sparc64. */
5066 stack_parm = change_address (stack_parm, nominal_mode,
5069 conversion_insns = get_insns ();
5074 if (entry_parm != stack_parm)
5076 if (stack_parm == 0)
5079 = assign_stack_local (GET_MODE (entry_parm),
5080 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5081 /* If this is a memory ref that contains aggregate components,
5082 mark it as such for cse and loop optimize. */
5083 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
5086 if (promoted_mode != nominal_mode)
5088 push_to_sequence (conversion_insns);
5089 emit_move_insn (validize_mem (stack_parm),
5090 validize_mem (entry_parm));
5091 conversion_insns = get_insns ();
5095 emit_move_insn (validize_mem (stack_parm),
5096 validize_mem (entry_parm));
5098 if (current_function_check_memory_usage)
5100 push_to_sequence (conversion_insns);
5101 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
5102 XEXP (stack_parm, 0), Pmode,
5103 GEN_INT (GET_MODE_SIZE (GET_MODE
5105 TYPE_MODE (sizetype),
5106 GEN_INT (MEMORY_USE_RW),
5107 TYPE_MODE (integer_type_node));
5109 conversion_insns = get_insns ();
5112 DECL_RTL (parm) = stack_parm;
5115 /* If this "parameter" was the place where we are receiving the
5116 function's incoming structure pointer, set up the result. */
5117 if (parm == function_result_decl)
5119 tree result = DECL_RESULT (fndecl);
5120 tree restype = TREE_TYPE (result);
5123 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
5125 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
5126 AGGREGATE_TYPE_P (restype));
5129 if (TREE_THIS_VOLATILE (parm))
5130 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
5131 if (TREE_READONLY (parm))
5132 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
5135 /* Output all parameter conversion instructions (possibly including calls)
5136 now that all parameters have been copied out of hard registers. */
5137 emit_insns (conversion_insns);
5139 last_parm_insn = get_last_insn ();
5141 current_function_args_size = stack_args_size.constant;
5143 /* Adjust function incoming argument size for alignment and
5146 #ifdef REG_PARM_STACK_SPACE
5147 #ifndef MAYBE_REG_PARM_STACK_SPACE
5148 current_function_args_size = MAX (current_function_args_size,
5149 REG_PARM_STACK_SPACE (fndecl));
5153 #ifdef STACK_BOUNDARY
5154 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5156 current_function_args_size
5157 = ((current_function_args_size + STACK_BYTES - 1)
5158 / STACK_BYTES) * STACK_BYTES;
5161 #ifdef ARGS_GROW_DOWNWARD
5162 current_function_arg_offset_rtx
5163 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5164 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
5165 size_int (-stack_args_size.constant)),
5166 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5168 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5171 /* See how many bytes, if any, of its args a function should try to pop
5174 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5175 current_function_args_size);
5177 /* For stdarg.h function, save info about
5178 regs and stack space used by the named args. */
5181 current_function_args_info = args_so_far;
5183 /* Set the rtx used for the function return value. Put this in its
5184 own variable so any optimizers that need this information don't have
5185 to include tree.h. Do this here so it gets done when an inlined
5186 function gets output. */
5188 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
5191 /* Indicate whether REGNO is an incoming argument to the current function
5192 that was promoted to a wider mode. If so, return the RTX for the
5193 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5194 that REGNO is promoted from and whether the promotion was signed or
5197 #ifdef PROMOTE_FUNCTION_ARGS
5200 promoted_input_arg (regno, pmode, punsignedp)
5202 enum machine_mode *pmode;
5207 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5208 arg = TREE_CHAIN (arg))
5209 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5210 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5211 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5213 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5214 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5216 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5217 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5218 && mode != DECL_MODE (arg))
5220 *pmode = DECL_MODE (arg);
5221 *punsignedp = unsignedp;
5222 return DECL_INCOMING_RTL (arg);
5231 /* Compute the size and offset from the start of the stacked arguments for a
5232 parm passed in mode PASSED_MODE and with type TYPE.
5234 INITIAL_OFFSET_PTR points to the current offset into the stacked
5237 The starting offset and size for this parm are returned in *OFFSET_PTR
5238 and *ARG_SIZE_PTR, respectively.
5240 IN_REGS is non-zero if the argument will be passed in registers. It will
5241 never be set if REG_PARM_STACK_SPACE is not defined.
5243 FNDECL is the function in which the argument was defined.
5245 There are two types of rounding that are done. The first, controlled by
5246 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5247 list to be aligned to the specific boundary (in bits). This rounding
5248 affects the initial and starting offsets, but not the argument size.
5250 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5251 optionally rounds the size of the parm to PARM_BOUNDARY. The
5252 initial offset is not affected by this rounding, while the size always
5253 is and the starting offset may be. */
5255 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5256 initial_offset_ptr is positive because locate_and_pad_parm's
5257 callers pass in the total size of args so far as
5258 initial_offset_ptr. arg_size_ptr is always positive.*/
5261 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5262 initial_offset_ptr, offset_ptr, arg_size_ptr)
5263 enum machine_mode passed_mode;
5266 tree fndecl ATTRIBUTE_UNUSED;
5267 struct args_size *initial_offset_ptr;
5268 struct args_size *offset_ptr;
5269 struct args_size *arg_size_ptr;
5272 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5273 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5274 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5276 #ifdef REG_PARM_STACK_SPACE
5277 /* If we have found a stack parm before we reach the end of the
5278 area reserved for registers, skip that area. */
5281 int reg_parm_stack_space = 0;
5283 #ifdef MAYBE_REG_PARM_STACK_SPACE
5284 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5286 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5288 if (reg_parm_stack_space > 0)
5290 if (initial_offset_ptr->var)
5292 initial_offset_ptr->var
5293 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5294 size_int (reg_parm_stack_space));
5295 initial_offset_ptr->constant = 0;
5297 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5298 initial_offset_ptr->constant = reg_parm_stack_space;
5301 #endif /* REG_PARM_STACK_SPACE */
5303 arg_size_ptr->var = 0;
5304 arg_size_ptr->constant = 0;
5306 #ifdef ARGS_GROW_DOWNWARD
5307 if (initial_offset_ptr->var)
5309 offset_ptr->constant = 0;
5310 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
5311 initial_offset_ptr->var);
5315 offset_ptr->constant = - initial_offset_ptr->constant;
5316 offset_ptr->var = 0;
5318 if (where_pad != none
5319 && (TREE_CODE (sizetree) != INTEGER_CST
5320 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5321 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5322 SUB_PARM_SIZE (*offset_ptr, sizetree);
5323 if (where_pad != downward)
5324 pad_to_arg_alignment (offset_ptr, boundary);
5325 if (initial_offset_ptr->var)
5327 arg_size_ptr->var = size_binop (MINUS_EXPR,
5328 size_binop (MINUS_EXPR,
5330 initial_offset_ptr->var),
5335 arg_size_ptr->constant = (- initial_offset_ptr->constant
5336 - offset_ptr->constant);
5338 #else /* !ARGS_GROW_DOWNWARD */
5340 #ifdef REG_PARM_STACK_SPACE
5341 || REG_PARM_STACK_SPACE (fndecl) > 0
5343 /* For the gcc-2_95-branch we want to make sure not to break something
5344 on platforms which pass argument in registers but don't define
5345 REG_PARM_STACK_SPACE. So we force the original behaviour here. */
5349 pad_to_arg_alignment (initial_offset_ptr, boundary);
5351 *offset_ptr = *initial_offset_ptr;
5353 #ifdef PUSH_ROUNDING
5354 if (passed_mode != BLKmode)
5355 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5358 /* Pad_below needs the pre-rounded size to know how much to pad below
5359 so this must be done before rounding up. */
5360 if (where_pad == downward
5361 /* However, BLKmode args passed in regs have their padding done elsewhere.
5362 The stack slot must be able to hold the entire register. */
5363 && !(in_regs && passed_mode == BLKmode))
5364 pad_below (offset_ptr, passed_mode, sizetree);
5366 if (where_pad != none
5367 && (TREE_CODE (sizetree) != INTEGER_CST
5368 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5369 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5371 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5372 #endif /* ARGS_GROW_DOWNWARD */
5375 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5376 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5379 pad_to_arg_alignment (offset_ptr, boundary)
5380 struct args_size *offset_ptr;
5383 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5385 if (boundary > BITS_PER_UNIT)
5387 if (offset_ptr->var)
5390 #ifdef ARGS_GROW_DOWNWARD
5395 (ARGS_SIZE_TREE (*offset_ptr),
5396 boundary / BITS_PER_UNIT);
5397 offset_ptr->constant = 0; /*?*/
5400 offset_ptr->constant =
5401 #ifdef ARGS_GROW_DOWNWARD
5402 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5404 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5409 #ifndef ARGS_GROW_DOWNWARD
5411 pad_below (offset_ptr, passed_mode, sizetree)
5412 struct args_size *offset_ptr;
5413 enum machine_mode passed_mode;
5416 if (passed_mode != BLKmode)
5418 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5419 offset_ptr->constant
5420 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5421 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5422 - GET_MODE_SIZE (passed_mode));
5426 if (TREE_CODE (sizetree) != INTEGER_CST
5427 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5429 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5430 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5432 ADD_PARM_SIZE (*offset_ptr, s2);
5433 SUB_PARM_SIZE (*offset_ptr, sizetree);
5439 #ifdef ARGS_GROW_DOWNWARD
5441 round_down (value, divisor)
5445 return size_binop (MULT_EXPR,
5446 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5447 size_int (divisor));
5451 /* Walk the tree of blocks describing the binding levels within a function
5452 and warn about uninitialized variables.
5453 This is done after calling flow_analysis and before global_alloc
5454 clobbers the pseudo-regs to hard regs. */
5457 uninitialized_vars_warning (block)
5460 register tree decl, sub;
5461 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5463 if (TREE_CODE (decl) == VAR_DECL
5464 /* These warnings are unreliable for and aggregates
5465 because assigning the fields one by one can fail to convince
5466 flow.c that the entire aggregate was initialized.
5467 Unions are troublesome because members may be shorter. */
5468 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5469 && DECL_RTL (decl) != 0
5470 && GET_CODE (DECL_RTL (decl)) == REG
5471 /* Global optimizations can make it difficult to determine if a
5472 particular variable has been initialized. However, a VAR_DECL
5473 with a nonzero DECL_INITIAL had an initializer, so do not
5474 claim it is potentially uninitialized.
5476 We do not care about the actual value in DECL_INITIAL, so we do
5477 not worry that it may be a dangling pointer. */
5478 && DECL_INITIAL (decl) == NULL_TREE
5479 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5480 warning_with_decl (decl,
5481 "`%s' might be used uninitialized in this function");
5482 if (TREE_CODE (decl) == VAR_DECL
5483 && DECL_RTL (decl) != 0
5484 && GET_CODE (DECL_RTL (decl)) == REG
5485 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5486 warning_with_decl (decl,
5487 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5489 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5490 uninitialized_vars_warning (sub);
5493 /* Do the appropriate part of uninitialized_vars_warning
5494 but for arguments instead of local variables. */
5497 setjmp_args_warning ()
5500 for (decl = DECL_ARGUMENTS (current_function_decl);
5501 decl; decl = TREE_CHAIN (decl))
5502 if (DECL_RTL (decl) != 0
5503 && GET_CODE (DECL_RTL (decl)) == REG
5504 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5505 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5508 /* If this function call setjmp, put all vars into the stack
5509 unless they were declared `register'. */
5512 setjmp_protect (block)
5515 register tree decl, sub;
5516 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5517 if ((TREE_CODE (decl) == VAR_DECL
5518 || TREE_CODE (decl) == PARM_DECL)
5519 && DECL_RTL (decl) != 0
5520 && (GET_CODE (DECL_RTL (decl)) == REG
5521 || (GET_CODE (DECL_RTL (decl)) == MEM
5522 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5523 /* If this variable came from an inline function, it must be
5524 that its life doesn't overlap the setjmp. If there was a
5525 setjmp in the function, it would already be in memory. We
5526 must exclude such variable because their DECL_RTL might be
5527 set to strange things such as virtual_stack_vars_rtx. */
5528 && ! DECL_FROM_INLINE (decl)
5530 #ifdef NON_SAVING_SETJMP
5531 /* If longjmp doesn't restore the registers,
5532 don't put anything in them. */
5536 ! DECL_REGISTER (decl)))
5537 put_var_into_stack (decl);
5538 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5539 setjmp_protect (sub);
5542 /* Like the previous function, but for args instead of local variables. */
5545 setjmp_protect_args ()
5548 for (decl = DECL_ARGUMENTS (current_function_decl);
5549 decl; decl = TREE_CHAIN (decl))
5550 if ((TREE_CODE (decl) == VAR_DECL
5551 || TREE_CODE (decl) == PARM_DECL)
5552 && DECL_RTL (decl) != 0
5553 && (GET_CODE (DECL_RTL (decl)) == REG
5554 || (GET_CODE (DECL_RTL (decl)) == MEM
5555 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5557 /* If longjmp doesn't restore the registers,
5558 don't put anything in them. */
5559 #ifdef NON_SAVING_SETJMP
5563 ! DECL_REGISTER (decl)))
5564 put_var_into_stack (decl);
5567 /* Return the context-pointer register corresponding to DECL,
5568 or 0 if it does not need one. */
5571 lookup_static_chain (decl)
5574 tree context = decl_function_context (decl);
5578 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5581 /* We treat inline_function_decl as an alias for the current function
5582 because that is the inline function whose vars, types, etc.
5583 are being merged into the current function.
5584 See expand_inline_function. */
5585 if (context == current_function_decl || context == inline_function_decl)
5586 return virtual_stack_vars_rtx;
5588 for (link = context_display; link; link = TREE_CHAIN (link))
5589 if (TREE_PURPOSE (link) == context)
5590 return RTL_EXPR_RTL (TREE_VALUE (link));
5595 /* Convert a stack slot address ADDR for variable VAR
5596 (from a containing function)
5597 into an address valid in this function (using a static chain). */
5600 fix_lexical_addr (addr, var)
5605 HOST_WIDE_INT displacement;
5606 tree context = decl_function_context (var);
5607 struct function *fp;
5610 /* If this is the present function, we need not do anything. */
5611 if (context == current_function_decl || context == inline_function_decl)
5614 for (fp = outer_function_chain; fp; fp = fp->next)
5615 if (fp->decl == context)
5621 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5622 addr = XEXP (XEXP (addr, 0), 0);
5624 /* Decode given address as base reg plus displacement. */
5625 if (GET_CODE (addr) == REG)
5626 basereg = addr, displacement = 0;
5627 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5628 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5632 /* We accept vars reached via the containing function's
5633 incoming arg pointer and via its stack variables pointer. */
5634 if (basereg == fp->internal_arg_pointer)
5636 /* If reached via arg pointer, get the arg pointer value
5637 out of that function's stack frame.
5639 There are two cases: If a separate ap is needed, allocate a
5640 slot in the outer function for it and dereference it that way.
5641 This is correct even if the real ap is actually a pseudo.
5642 Otherwise, just adjust the offset from the frame pointer to
5645 #ifdef NEED_SEPARATE_AP
5648 if (fp->arg_pointer_save_area == 0)
5649 fp->arg_pointer_save_area
5650 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5652 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5653 addr = memory_address (Pmode, addr);
5655 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5657 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5658 base = lookup_static_chain (var);
5662 else if (basereg == virtual_stack_vars_rtx)
5664 /* This is the same code as lookup_static_chain, duplicated here to
5665 avoid an extra call to decl_function_context. */
5668 for (link = context_display; link; link = TREE_CHAIN (link))
5669 if (TREE_PURPOSE (link) == context)
5671 base = RTL_EXPR_RTL (TREE_VALUE (link));
5679 /* Use same offset, relative to appropriate static chain or argument
5681 return plus_constant (base, displacement);
5684 /* Return the address of the trampoline for entering nested fn FUNCTION.
5685 If necessary, allocate a trampoline (in the stack frame)
5686 and emit rtl to initialize its contents (at entry to this function). */
5689 trampoline_address (function)
5695 struct function *fp;
5698 /* Find an existing trampoline and return it. */
5699 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5700 if (TREE_PURPOSE (link) == function)
5702 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5704 for (fp = outer_function_chain; fp; fp = fp->next)
5705 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5706 if (TREE_PURPOSE (link) == function)
5708 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5710 return round_trampoline_addr (tramp);
5713 /* None exists; we must make one. */
5715 /* Find the `struct function' for the function containing FUNCTION. */
5717 fn_context = decl_function_context (function);
5718 if (fn_context != current_function_decl
5719 && fn_context != inline_function_decl)
5720 for (fp = outer_function_chain; fp; fp = fp->next)
5721 if (fp->decl == fn_context)
5724 /* Allocate run-time space for this trampoline
5725 (usually in the defining function's stack frame). */
5726 #ifdef ALLOCATE_TRAMPOLINE
5727 tramp = ALLOCATE_TRAMPOLINE (fp);
5729 /* If rounding needed, allocate extra space
5730 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5731 #ifdef TRAMPOLINE_ALIGNMENT
5732 #define TRAMPOLINE_REAL_SIZE \
5733 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5735 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5738 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5740 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5743 /* Record the trampoline for reuse and note it for later initialization
5744 by expand_function_end. */
5747 push_obstacks (fp->function_maybepermanent_obstack,
5748 fp->function_maybepermanent_obstack);
5749 rtlexp = make_node (RTL_EXPR);
5750 RTL_EXPR_RTL (rtlexp) = tramp;
5751 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5756 /* Make the RTL_EXPR node temporary, not momentary, so that the
5757 trampoline_list doesn't become garbage. */
5758 int momentary = suspend_momentary ();
5759 rtlexp = make_node (RTL_EXPR);
5760 resume_momentary (momentary);
5762 RTL_EXPR_RTL (rtlexp) = tramp;
5763 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5766 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5767 return round_trampoline_addr (tramp);
5770 /* Given a trampoline address,
5771 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5774 round_trampoline_addr (tramp)
5777 #ifdef TRAMPOLINE_ALIGNMENT
5778 /* Round address up to desired boundary. */
5779 rtx temp = gen_reg_rtx (Pmode);
5780 temp = expand_binop (Pmode, add_optab, tramp,
5781 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5782 temp, 0, OPTAB_LIB_WIDEN);
5783 tramp = expand_binop (Pmode, and_optab, temp,
5784 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5785 temp, 0, OPTAB_LIB_WIDEN);
5790 /* The functions identify_blocks and reorder_blocks provide a way to
5791 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5792 duplicate portions of the RTL code. Call identify_blocks before
5793 changing the RTL, and call reorder_blocks after. */
5795 /* Put all this function's BLOCK nodes including those that are chained
5796 onto the first block into a vector, and return it.
5797 Also store in each NOTE for the beginning or end of a block
5798 the index of that block in the vector.
5799 The arguments are BLOCK, the chain of top-level blocks of the function,
5800 and INSNS, the insn chain of the function. */
5803 identify_blocks (block, insns)
5811 int next_block_number = 1;
5812 int current_block_number = 1;
5818 n_blocks = all_blocks (block, 0);
5819 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5820 block_stack = (int *) alloca (n_blocks * sizeof (int));
5822 all_blocks (block, block_vector);
5824 for (insn = insns; insn; insn = NEXT_INSN (insn))
5825 if (GET_CODE (insn) == NOTE)
5827 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5829 block_stack[depth++] = current_block_number;
5830 current_block_number = next_block_number;
5831 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5833 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5835 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5836 current_block_number = block_stack[--depth];
5840 if (n_blocks != next_block_number)
5843 return block_vector;
5846 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5847 and a revised instruction chain, rebuild the tree structure
5848 of BLOCK nodes to correspond to the new order of RTL.
5849 The new block tree is inserted below TOP_BLOCK.
5850 Returns the current top-level block. */
5853 reorder_blocks (block_vector, block, insns)
5858 tree current_block = block;
5861 if (block_vector == 0)
5864 /* Prune the old trees away, so that it doesn't get in the way. */
5865 BLOCK_SUBBLOCKS (current_block) = 0;
5866 BLOCK_CHAIN (current_block) = 0;
5868 for (insn = insns; insn; insn = NEXT_INSN (insn))
5869 if (GET_CODE (insn) == NOTE)
5871 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5873 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5874 /* If we have seen this block before, copy it. */
5875 if (TREE_ASM_WRITTEN (block))
5876 block = copy_node (block);
5877 BLOCK_SUBBLOCKS (block) = 0;
5878 TREE_ASM_WRITTEN (block) = 1;
5879 BLOCK_SUPERCONTEXT (block) = current_block;
5880 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5881 BLOCK_SUBBLOCKS (current_block) = block;
5882 current_block = block;
5883 NOTE_SOURCE_FILE (insn) = 0;
5885 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5887 BLOCK_SUBBLOCKS (current_block)
5888 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5889 current_block = BLOCK_SUPERCONTEXT (current_block);
5890 NOTE_SOURCE_FILE (insn) = 0;
5894 BLOCK_SUBBLOCKS (current_block)
5895 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5896 return current_block;
5899 /* Reverse the order of elements in the chain T of blocks,
5900 and return the new head of the chain (old last element). */
5906 register tree prev = 0, decl, next;
5907 for (decl = t; decl; decl = next)
5909 next = BLOCK_CHAIN (decl);
5910 BLOCK_CHAIN (decl) = prev;
5916 /* Count the subblocks of the list starting with BLOCK, and list them
5917 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5921 all_blocks (block, vector)
5929 TREE_ASM_WRITTEN (block) = 0;
5931 /* Record this block. */
5933 vector[n_blocks] = block;
5937 /* Record the subblocks, and their subblocks... */
5938 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5939 vector ? vector + n_blocks : 0);
5940 block = BLOCK_CHAIN (block);
5946 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5947 and initialize static variables for generating RTL for the statements
5951 init_function_start (subr, filename, line)
5956 init_stmt_for_function ();
5958 cse_not_expected = ! optimize;
5960 /* Caller save not needed yet. */
5961 caller_save_needed = 0;
5963 /* No stack slots have been made yet. */
5964 stack_slot_list = 0;
5966 /* There is no stack slot for handling nonlocal gotos. */
5967 nonlocal_goto_handler_slots = 0;
5968 nonlocal_goto_stack_level = 0;
5970 /* No labels have been declared for nonlocal use. */
5971 nonlocal_labels = 0;
5972 nonlocal_goto_handler_labels = 0;
5974 /* No function calls so far in this function. */
5975 function_call_count = 0;
5977 /* No parm regs have been allocated.
5978 (This is important for output_inline_function.) */
5979 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5981 /* Initialize the RTL mechanism. */
5984 /* Initialize the queue of pending postincrement and postdecrements,
5985 and some other info in expr.c. */
5988 /* We haven't done register allocation yet. */
5991 init_const_rtx_hash_table ();
5993 current_function_name = (*decl_printable_name) (subr, 2);
5995 /* Nonzero if this is a nested function that uses a static chain. */
5997 current_function_needs_context
5998 = (decl_function_context (current_function_decl) != 0
5999 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6001 /* Set if a call to setjmp is seen. */
6002 current_function_calls_setjmp = 0;
6004 /* Set if a call to longjmp is seen. */
6005 current_function_calls_longjmp = 0;
6007 current_function_calls_alloca = 0;
6008 current_function_has_nonlocal_label = 0;
6009 current_function_has_nonlocal_goto = 0;
6010 current_function_contains_functions = 0;
6011 current_function_is_leaf = 0;
6012 current_function_sp_is_unchanging = 0;
6013 current_function_uses_only_leaf_regs = 0;
6014 current_function_has_computed_jump = 0;
6015 current_function_is_thunk = 0;
6017 current_function_returns_pcc_struct = 0;
6018 current_function_returns_struct = 0;
6019 current_function_epilogue_delay_list = 0;
6020 current_function_uses_const_pool = 0;
6021 current_function_uses_pic_offset_table = 0;
6022 current_function_cannot_inline = 0;
6024 /* We have not yet needed to make a label to jump to for tail-recursion. */
6025 tail_recursion_label = 0;
6027 /* We haven't had a need to make a save area for ap yet. */
6029 arg_pointer_save_area = 0;
6031 /* No stack slots allocated yet. */
6034 /* No SAVE_EXPRs in this function yet. */
6037 /* No RTL_EXPRs in this function yet. */
6040 /* Set up to allocate temporaries. */
6043 /* Within function body, compute a type's size as soon it is laid out. */
6044 immediate_size_expand++;
6046 /* We haven't made any trampolines for this function yet. */
6047 trampoline_list = 0;
6049 init_pending_stack_adjust ();
6050 inhibit_defer_pop = 0;
6052 current_function_outgoing_args_size = 0;
6054 /* Prevent ever trying to delete the first instruction of a function.
6055 Also tell final how to output a linenum before the function prologue.
6056 Note linenums could be missing, e.g. when compiling a Java .class file. */
6058 emit_line_note (filename, line);
6060 /* Make sure first insn is a note even if we don't want linenums.
6061 This makes sure the first insn will never be deleted.
6062 Also, final expects a note to appear there. */
6063 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6065 /* Set flags used by final.c. */
6066 if (aggregate_value_p (DECL_RESULT (subr)))
6068 #ifdef PCC_STATIC_STRUCT_RETURN
6069 current_function_returns_pcc_struct = 1;
6071 current_function_returns_struct = 1;
6074 /* Warn if this value is an aggregate type,
6075 regardless of which calling convention we are using for it. */
6076 if (warn_aggregate_return
6077 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6078 warning ("function returns an aggregate");
6080 current_function_returns_pointer
6081 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6083 /* Indicate that we need to distinguish between the return value of the
6084 present function and the return value of a function being called. */
6085 rtx_equal_function_value_matters = 1;
6087 /* Indicate that we have not instantiated virtual registers yet. */
6088 virtuals_instantiated = 0;
6090 /* Indicate we have no need of a frame pointer yet. */
6091 frame_pointer_needed = 0;
6093 /* By default assume not varargs or stdarg. */
6094 current_function_varargs = 0;
6095 current_function_stdarg = 0;
6098 /* Indicate that the current function uses extra args
6099 not explicitly mentioned in the argument list in any fashion. */
6104 current_function_varargs = 1;
6107 /* Expand a call to __main at the beginning of a possible main function. */
6109 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6110 #undef HAS_INIT_SECTION
6111 #define HAS_INIT_SECTION
6114 #ifndef GEN_CALL__MAIN
6115 #define GEN_CALL__MAIN \
6117 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0, \
6123 expand_main_function ()
6125 #if defined(INVOKE__main) || !defined (HAS_INIT_SECTION)
6127 #endif /* not HAS_INIT_SECTION */
6130 extern struct obstack permanent_obstack;
6132 /* Start the RTL for a new function, and set variables used for
6134 SUBR is the FUNCTION_DECL node.
6135 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6136 the function's parameters, which must be run at any return statement. */
6139 expand_function_start (subr, parms_have_cleanups)
6141 int parms_have_cleanups;
6145 rtx last_ptr = NULL_RTX;
6147 /* Make sure volatile mem refs aren't considered
6148 valid operands of arithmetic insns. */
6149 init_recog_no_volatile ();
6151 /* Set this before generating any memory accesses. */
6152 current_function_check_memory_usage
6153 = (flag_check_memory_usage
6154 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6156 current_function_instrument_entry_exit
6157 = (flag_instrument_function_entry_exit
6158 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6160 /* If function gets a static chain arg, store it in the stack frame.
6161 Do this first, so it gets the first stack slot offset. */
6162 if (current_function_needs_context)
6164 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6166 /* Delay copying static chain if it is not a register to avoid
6167 conflicts with regs used for parameters. */
6168 if (! SMALL_REGISTER_CLASSES
6169 || GET_CODE (static_chain_incoming_rtx) == REG)
6170 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6173 /* If the parameters of this function need cleaning up, get a label
6174 for the beginning of the code which executes those cleanups. This must
6175 be done before doing anything with return_label. */
6176 if (parms_have_cleanups)
6177 cleanup_label = gen_label_rtx ();
6181 /* Make the label for return statements to jump to, if this machine
6182 does not have a one-instruction return and uses an epilogue,
6183 or if it returns a structure, or if it has parm cleanups. */
6185 if (cleanup_label == 0 && HAVE_return
6186 && ! current_function_instrument_entry_exit
6187 && ! current_function_returns_pcc_struct
6188 && ! (current_function_returns_struct && ! optimize))
6191 return_label = gen_label_rtx ();
6193 return_label = gen_label_rtx ();
6196 /* Initialize rtx used to return the value. */
6197 /* Do this before assign_parms so that we copy the struct value address
6198 before any library calls that assign parms might generate. */
6200 /* Decide whether to return the value in memory or in a register. */
6201 if (aggregate_value_p (DECL_RESULT (subr)))
6203 /* Returning something that won't go in a register. */
6204 register rtx value_address = 0;
6206 #ifdef PCC_STATIC_STRUCT_RETURN
6207 if (current_function_returns_pcc_struct)
6209 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6210 value_address = assemble_static_space (size);
6215 /* Expect to be passed the address of a place to store the value.
6216 If it is passed as an argument, assign_parms will take care of
6218 if (struct_value_incoming_rtx)
6220 value_address = gen_reg_rtx (Pmode);
6221 emit_move_insn (value_address, struct_value_incoming_rtx);
6226 DECL_RTL (DECL_RESULT (subr))
6227 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6228 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
6229 AGGREGATE_TYPE_P (TREE_TYPE
6234 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6235 /* If return mode is void, this decl rtl should not be used. */
6236 DECL_RTL (DECL_RESULT (subr)) = 0;
6237 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6239 /* If function will end with cleanup code for parms,
6240 compute the return values into a pseudo reg,
6241 which we will copy into the true return register
6242 after the cleanups are done. */
6244 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6246 #ifdef PROMOTE_FUNCTION_RETURN
6247 tree type = TREE_TYPE (DECL_RESULT (subr));
6248 int unsignedp = TREE_UNSIGNED (type);
6250 mode = promote_mode (type, mode, &unsignedp, 1);
6253 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6256 /* Scalar, returned in a register. */
6258 #ifdef FUNCTION_OUTGOING_VALUE
6259 DECL_RTL (DECL_RESULT (subr))
6260 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6262 DECL_RTL (DECL_RESULT (subr))
6263 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6266 /* Mark this reg as the function's return value. */
6267 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6269 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6270 /* Needed because we may need to move this to memory
6271 in case it's a named return value whose address is taken. */
6272 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6276 /* Initialize rtx for parameters and local variables.
6277 In some cases this requires emitting insns. */
6279 assign_parms (subr, 0);
6281 /* Copy the static chain now if it wasn't a register. The delay is to
6282 avoid conflicts with the parameter passing registers. */
6284 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6285 if (GET_CODE (static_chain_incoming_rtx) != REG)
6286 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6288 /* The following was moved from init_function_start.
6289 The move is supposed to make sdb output more accurate. */
6290 /* Indicate the beginning of the function body,
6291 as opposed to parm setup. */
6292 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6294 /* If doing stupid allocation, mark parms as born here. */
6296 if (GET_CODE (get_last_insn ()) != NOTE)
6297 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6298 parm_birth_insn = get_last_insn ();
6302 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6303 use_variable (regno_reg_rtx[i]);
6305 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6306 use_variable (current_function_internal_arg_pointer);
6309 context_display = 0;
6310 if (current_function_needs_context)
6312 /* Fetch static chain values for containing functions. */
6313 tem = decl_function_context (current_function_decl);
6314 /* If not doing stupid register allocation copy the static chain
6315 pointer into a pseudo. If we have small register classes, copy
6316 the value from memory if static_chain_incoming_rtx is a REG. If
6317 we do stupid register allocation, we use the stack address
6319 if (tem && ! obey_regdecls)
6321 /* If the static chain originally came in a register, put it back
6322 there, then move it out in the next insn. The reason for
6323 this peculiar code is to satisfy function integration. */
6324 if (SMALL_REGISTER_CLASSES
6325 && GET_CODE (static_chain_incoming_rtx) == REG)
6326 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6327 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6332 tree rtlexp = make_node (RTL_EXPR);
6334 RTL_EXPR_RTL (rtlexp) = last_ptr;
6335 context_display = tree_cons (tem, rtlexp, context_display);
6336 tem = decl_function_context (tem);
6339 /* Chain thru stack frames, assuming pointer to next lexical frame
6340 is found at the place we always store it. */
6341 #ifdef FRAME_GROWS_DOWNWARD
6342 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6344 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6345 memory_address (Pmode, last_ptr)));
6347 /* If we are not optimizing, ensure that we know that this
6348 piece of context is live over the entire function. */
6350 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6355 if (current_function_instrument_entry_exit)
6357 rtx fun = DECL_RTL (current_function_decl);
6358 if (GET_CODE (fun) == MEM)
6359 fun = XEXP (fun, 0);
6362 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6364 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6366 hard_frame_pointer_rtx),
6370 /* After the display initializations is where the tail-recursion label
6371 should go, if we end up needing one. Ensure we have a NOTE here
6372 since some things (like trampolines) get placed before this. */
6373 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6375 /* Evaluate now the sizes of any types declared among the arguments. */
6376 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6378 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6379 EXPAND_MEMORY_USE_BAD);
6380 /* Flush the queue in case this parameter declaration has
6385 /* Make sure there is a line number after the function entry setup code. */
6386 force_next_line_note ();
6389 /* Generate RTL for the end of the current function.
6390 FILENAME and LINE are the current position in the source file.
6392 It is up to language-specific callers to do cleanups for parameters--
6393 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6396 expand_function_end (filename, line, end_bindings)
6404 #ifdef TRAMPOLINE_TEMPLATE
6405 static rtx initial_trampoline;
6408 #ifdef NON_SAVING_SETJMP
6409 /* Don't put any variables in registers if we call setjmp
6410 on a machine that fails to restore the registers. */
6411 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6413 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6414 setjmp_protect (DECL_INITIAL (current_function_decl));
6416 setjmp_protect_args ();
6420 /* Save the argument pointer if a save area was made for it. */
6421 if (arg_pointer_save_area)
6423 /* arg_pointer_save_area may not be a valid memory address, so we
6424 have to check it and fix it if necessary. */
6427 emit_move_insn (validize_mem (arg_pointer_save_area),
6428 virtual_incoming_args_rtx);
6429 seq = gen_sequence ();
6431 emit_insn_before (seq, tail_recursion_reentry);
6434 /* Initialize any trampolines required by this function. */
6435 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6437 tree function = TREE_PURPOSE (link);
6438 rtx context = lookup_static_chain (function);
6439 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6440 #ifdef TRAMPOLINE_TEMPLATE
6445 #ifdef TRAMPOLINE_TEMPLATE
6446 /* First make sure this compilation has a template for
6447 initializing trampolines. */
6448 if (initial_trampoline == 0)
6450 end_temporary_allocation ();
6452 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6453 resume_temporary_allocation ();
6457 /* Generate insns to initialize the trampoline. */
6459 tramp = round_trampoline_addr (XEXP (tramp, 0));
6460 #ifdef TRAMPOLINE_TEMPLATE
6461 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6462 emit_block_move (blktramp, initial_trampoline,
6463 GEN_INT (TRAMPOLINE_SIZE),
6464 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6466 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6470 /* Put those insns at entry to the containing function (this one). */
6471 emit_insns_before (seq, tail_recursion_reentry);
6474 /* If we are doing stack checking and this function makes calls,
6475 do a stack probe at the start of the function to ensure we have enough
6476 space for another stack frame. */
6477 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6481 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6482 if (GET_CODE (insn) == CALL_INSN)
6485 probe_stack_range (STACK_CHECK_PROTECT,
6486 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6489 emit_insns_before (seq, tail_recursion_reentry);
6494 /* Warn about unused parms if extra warnings were specified. */
6495 if (warn_unused && extra_warnings)
6499 for (decl = DECL_ARGUMENTS (current_function_decl);
6500 decl; decl = TREE_CHAIN (decl))
6501 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6502 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6503 warning_with_decl (decl, "unused parameter `%s'");
6506 /* Delete handlers for nonlocal gotos if nothing uses them. */
6507 if (nonlocal_goto_handler_slots != 0
6508 && ! current_function_has_nonlocal_label)
6511 /* End any sequences that failed to be closed due to syntax errors. */
6512 while (in_sequence_p ())
6515 /* Outside function body, can't compute type's actual size
6516 until next function's body starts. */
6517 immediate_size_expand--;
6519 /* If doing stupid register allocation,
6520 mark register parms as dying here. */
6525 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6526 use_variable (regno_reg_rtx[i]);
6528 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6530 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6532 use_variable (XEXP (tem, 0));
6533 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6536 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6537 use_variable (current_function_internal_arg_pointer);
6540 clear_pending_stack_adjust ();
6541 do_pending_stack_adjust ();
6543 /* Mark the end of the function body.
6544 If control reaches this insn, the function can drop through
6545 without returning a value. */
6546 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6548 /* Must mark the last line number note in the function, so that the test
6549 coverage code can avoid counting the last line twice. This just tells
6550 the code to ignore the immediately following line note, since there
6551 already exists a copy of this note somewhere above. This line number
6552 note is still needed for debugging though, so we can't delete it. */
6553 if (flag_test_coverage)
6554 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6556 /* Output a linenumber for the end of the function.
6557 SDB depends on this. */
6558 emit_line_note_force (filename, line);
6560 /* Output the label for the actual return from the function,
6561 if one is expected. This happens either because a function epilogue
6562 is used instead of a return instruction, or because a return was done
6563 with a goto in order to run local cleanups, or because of pcc-style
6564 structure returning. */
6567 emit_label (return_label);
6569 /* C++ uses this. */
6571 expand_end_bindings (0, 0, 0);
6573 /* Now handle any leftover exception regions that may have been
6574 created for the parameters. */
6576 rtx last = get_last_insn ();
6579 expand_leftover_cleanups ();
6581 /* If the above emitted any code, may sure we jump around it. */
6582 if (last != get_last_insn ())
6584 label = gen_label_rtx ();
6585 last = emit_jump_insn_after (gen_jump (label), last);
6586 last = emit_barrier_after (last);
6591 if (current_function_instrument_entry_exit)
6593 rtx fun = DECL_RTL (current_function_decl);
6594 if (GET_CODE (fun) == MEM)
6595 fun = XEXP (fun, 0);
6598 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6600 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6602 hard_frame_pointer_rtx),
6606 /* If we had calls to alloca, and this machine needs
6607 an accurate stack pointer to exit the function,
6608 insert some code to save and restore the stack pointer. */
6609 #ifdef EXIT_IGNORE_STACK
6610 if (! EXIT_IGNORE_STACK)
6612 if (current_function_calls_alloca)
6616 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6617 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6620 /* If scalar return value was computed in a pseudo-reg,
6621 copy that to the hard return register. */
6622 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6623 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6624 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6625 >= FIRST_PSEUDO_REGISTER))
6627 rtx real_decl_result;
6629 #ifdef FUNCTION_OUTGOING_VALUE
6631 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6632 current_function_decl);
6635 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6636 current_function_decl);
6638 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6639 /* If this is a BLKmode structure being returned in registers, then use
6640 the mode computed in expand_return. */
6641 if (GET_MODE (real_decl_result) == BLKmode)
6642 PUT_MODE (real_decl_result,
6643 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6644 emit_move_insn (real_decl_result,
6645 DECL_RTL (DECL_RESULT (current_function_decl)));
6646 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6648 /* The delay slot scheduler assumes that current_function_return_rtx
6649 holds the hard register containing the return value, not a temporary
6651 current_function_return_rtx = real_decl_result;
6654 /* If returning a structure, arrange to return the address of the value
6655 in a place where debuggers expect to find it.
6657 If returning a structure PCC style,
6658 the caller also depends on this value.
6659 And current_function_returns_pcc_struct is not necessarily set. */
6660 if (current_function_returns_struct
6661 || current_function_returns_pcc_struct)
6663 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6664 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6665 #ifdef FUNCTION_OUTGOING_VALUE
6667 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6668 current_function_decl);
6671 = FUNCTION_VALUE (build_pointer_type (type),
6672 current_function_decl);
6675 /* Mark this as a function return value so integrate will delete the
6676 assignment and USE below when inlining this function. */
6677 REG_FUNCTION_VALUE_P (outgoing) = 1;
6679 emit_move_insn (outgoing, value_address);
6680 use_variable (outgoing);
6683 /* If this is an implementation of __throw, do what's necessary to
6684 communicate between __builtin_eh_return and the epilogue. */
6685 expand_eh_return ();
6687 /* Output a return insn if we are using one.
6688 Otherwise, let the rtl chain end here, to drop through
6689 into the epilogue. */
6694 emit_jump_insn (gen_return ());
6699 /* Fix up any gotos that jumped out to the outermost
6700 binding level of the function.
6701 Must follow emitting RETURN_LABEL. */
6703 /* If you have any cleanups to do at this point,
6704 and they need to create temporary variables,
6705 then you will lose. */
6706 expand_fixups (get_insns ());
6709 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6711 static int *prologue;
6712 static int *epilogue;
6714 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6715 or a single insn). */
6717 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6719 record_insns (insns)
6724 if (GET_CODE (insns) == SEQUENCE)
6726 int len = XVECLEN (insns, 0);
6727 vec = (int *) oballoc ((len + 1) * sizeof (int));
6730 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6734 vec = (int *) oballoc (2 * sizeof (int));
6735 vec[0] = INSN_UID (insns);
6741 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6744 contains (insn, vec)
6750 if (GET_CODE (insn) == INSN
6751 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6754 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6755 for (j = 0; vec[j]; j++)
6756 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6762 for (j = 0; vec[j]; j++)
6763 if (INSN_UID (insn) == vec[j])
6768 #endif /* HAVE_prologue || HAVE_epilogue */
6770 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6771 this into place with notes indicating where the prologue ends and where
6772 the epilogue begins. Update the basic block information when possible. */
6775 thread_prologue_and_epilogue_insns (f)
6776 rtx f ATTRIBUTE_UNUSED;
6779 #ifdef HAVE_prologue
6780 rtx prologue_end = NULL_RTX;
6784 #ifdef HAVE_prologue
6790 seq = gen_prologue();
6793 /* Retain a map of the prologue insns. */
6794 if (GET_CODE (seq) != SEQUENCE)
6796 prologue = record_insns (seq);
6798 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6799 seq = gen_sequence ();
6802 /* If optimization is off, and perhaps in an empty function,
6803 the entry block will have no successors. */
6804 if (ENTRY_BLOCK_PTR->succ)
6806 /* Can't deal with multiple successsors of the entry block. */
6807 if (ENTRY_BLOCK_PTR->succ->succ_next)
6810 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6814 emit_insn_after (seq, f);
6819 #ifdef HAVE_epilogue
6824 rtx tail = get_last_insn ();
6826 /* ??? This is gastly. If function returns were not done via uses,
6827 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6828 and all of this uglyness would go away. */
6833 /* If the exit block has no non-fake predecessors, we don't
6834 need an epilogue. Furthermore, only pay attention to the
6835 fallthru predecessors; if (conditional) return insns were
6836 generated, by definition we do not need to emit epilogue
6839 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6840 if ((e->flags & EDGE_FAKE) == 0
6841 && (e->flags & EDGE_FALLTHRU) != 0)
6846 /* We can't handle multiple epilogues -- if one is needed,
6847 we won't be able to place it multiple times.
6849 ??? Fix epilogue expanders to not assume they are the
6850 last thing done compiling the function. Either that
6851 or copy_rtx each insn.
6853 ??? Blah, it's not a simple expression to assert that
6854 we've exactly one fallthru exit edge. */
6859 /* ??? If the last insn of the basic block is a jump, then we
6860 are creating a new basic block. Wimp out and leave these
6861 insns outside any block. */
6862 if (GET_CODE (tail) == JUMP_INSN)
6868 rtx prev, seq, first_use;
6870 /* Move the USE insns at the end of a function onto a list. */
6872 if (GET_CODE (prev) == BARRIER
6873 || GET_CODE (prev) == NOTE)
6874 prev = prev_nonnote_insn (prev);
6878 && GET_CODE (prev) == INSN
6879 && GET_CODE (PATTERN (prev)) == USE)
6881 /* If the end of the block is the use, grab hold of something
6882 else so that we emit barriers etc in the right place. */
6886 tail = PREV_INSN (tail);
6887 while (GET_CODE (tail) == INSN
6888 && GET_CODE (PATTERN (tail)) == USE);
6894 prev = prev_nonnote_insn (prev);
6899 NEXT_INSN (use) = first_use;
6900 PREV_INSN (first_use) = use;
6903 NEXT_INSN (use) = NULL_RTX;
6907 && GET_CODE (prev) == INSN
6908 && GET_CODE (PATTERN (prev)) == USE);
6911 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6912 epilogue insns, the USE insns at the end of a function,
6913 the jump insn that returns, and then a BARRIER. */
6915 if (GET_CODE (tail) != BARRIER)
6917 prev = next_nonnote_insn (tail);
6918 if (!prev || GET_CODE (prev) != BARRIER)
6919 emit_barrier_after (tail);
6922 seq = gen_epilogue ();
6924 tail = emit_jump_insn_after (seq, tail);
6926 /* Insert the USE insns immediately before the return insn, which
6927 must be the last instruction emitted in the sequence. */
6929 emit_insns_before (first_use, tail);
6930 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6932 /* Update the tail of the basic block. */
6936 /* Retain a map of the epilogue insns. */
6937 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6944 commit_edge_insertions ();
6946 #ifdef HAVE_prologue
6951 /* GDB handles `break f' by setting a breakpoint on the first
6952 line note *after* the prologue. Which means (1) that if
6953 there are line number notes before where we inserted the
6954 prologue we should move them, and (2) if there is no such
6955 note, then we should generate one at the prologue. */
6957 for (insn = prologue_end; insn ; insn = prev)
6959 prev = PREV_INSN (insn);
6960 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
6962 /* Note that we cannot reorder the first insn in the
6963 chain, since rest_of_compilation relies on that
6964 remaining constant. Do the next best thing. */
6967 emit_line_note_after (NOTE_SOURCE_FILE (insn),
6968 NOTE_LINE_NUMBER (insn),
6970 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6973 reorder_insns (insn, insn, prologue_end);
6977 insn = NEXT_INSN (prologue_end);
6978 if (! insn || GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) <= 0)
6980 for (insn = next_active_insn (f); insn ; insn = PREV_INSN (insn))
6982 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
6984 emit_line_note_after (NOTE_SOURCE_FILE (insn),
6985 NOTE_LINE_NUMBER (insn),
6995 /* Reposition the prologue-end and epilogue-begin notes after instruction
6996 scheduling and delayed branch scheduling. */
6999 reposition_prologue_and_epilogue_notes (f)
7000 rtx f ATTRIBUTE_UNUSED;
7002 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7003 /* Reposition the prologue and epilogue notes. */
7010 register rtx insn, note = 0;
7012 /* Scan from the beginning until we reach the last prologue insn.
7013 We apparently can't depend on basic_block_{head,end} after
7015 for (len = 0; prologue[len]; len++)
7017 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7019 if (GET_CODE (insn) == NOTE)
7021 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7024 else if ((len -= contains (insn, prologue)) == 0)
7027 /* Find the prologue-end note if we haven't already, and
7028 move it to just after the last prologue insn. */
7031 for (note = insn; (note = NEXT_INSN (note));)
7032 if (GET_CODE (note) == NOTE
7033 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7037 next = NEXT_INSN (note);
7039 /* Whether or not we can depend on BLOCK_HEAD,
7040 attempt to keep it up-to-date. */
7041 if (BLOCK_HEAD (0) == note)
7042 BLOCK_HEAD (0) = next;
7045 add_insn_after (note, insn);
7052 register rtx insn, note = 0;
7054 /* Scan from the end until we reach the first epilogue insn.
7055 We apparently can't depend on basic_block_{head,end} after
7057 for (len = 0; epilogue[len]; len++)
7059 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7061 if (GET_CODE (insn) == NOTE)
7063 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7066 else if ((len -= contains (insn, epilogue)) == 0)
7068 /* Find the epilogue-begin note if we haven't already, and
7069 move it to just before the first epilogue insn. */
7072 for (note = insn; (note = PREV_INSN (note));)
7073 if (GET_CODE (note) == NOTE
7074 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7078 /* Whether or not we can depend on BLOCK_HEAD,
7079 attempt to keep it up-to-date. */
7081 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7082 BLOCK_HEAD (n_basic_blocks-1) = note;
7085 add_insn_before (note, insn);
7090 #endif /* HAVE_prologue or HAVE_epilogue */