1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* $FreeBSD: src/contrib/gcc/function.c,v 1.6.2.3 2002/06/20 23:12:27 obrien Exp $ */
23 /* $DragonFly: src/contrib/gcc/Attic/function.c,v 1.3 2003/12/10 22:25:04 dillon Exp $ */
26 /* This file handles the generation of rtl code from tree structure
27 at the level of the function as a whole.
28 It creates the rtl expressions for parameters and auto variables
29 and has full responsibility for allocating stack slots.
31 `expand_function_start' is called at the beginning of a function,
32 before the function body is parsed, and `expand_function_end' is
33 called after parsing the body.
35 Call `assign_stack_local' to allocate a stack slot for a local variable.
36 This is usually done during the RTL generation for the function body,
37 but it can also be done in the reload pass when a pseudo-register does
38 not get a hard register.
40 Call `put_var_into_stack' when you learn, belatedly, that a variable
41 previously given a pseudo-register must in fact go in the stack.
42 This function changes the DECL_RTL to be a stack slot instead of a reg
43 then scans all the RTL instructions so far generated to correct them. */
52 #include "insn-flags.h"
54 #include "insn-codes.h"
56 #include "hard-reg-set.h"
57 #include "insn-config.h"
60 #include "basic-block.h"
64 #include "protector.h"
66 #ifndef TRAMPOLINE_ALIGNMENT
67 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
70 #ifndef LOCAL_ALIGNMENT
71 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
74 /* Some systems use __main in a way incompatible with its use in gcc, in these
75 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
76 give the same symbol without quotes for an alternative entry point. You
77 must define both, or neither. */
79 #define NAME__MAIN "__main"
80 #define SYMBOL__MAIN __main
83 /* Round a value to the lowest integer less than it that is a multiple of
84 the required alignment. Avoid using division in case the value is
85 negative. Assume the alignment is a power of two. */
86 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
88 /* Similar, but round to the next highest integer that meets the
90 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
92 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
93 during rtl generation. If they are different register numbers, this is
94 always true. It may also be true if
95 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
96 generation. See fix_lexical_addr for details. */
98 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
99 #define NEED_SEPARATE_AP
102 /* Number of bytes of args popped by function being compiled on its return.
103 Zero if no bytes are to be popped.
104 May affect compilation of return insn or of function epilogue. */
106 int current_function_pops_args;
108 /* Nonzero if function being compiled needs to be given an address
109 where the value should be stored. */
111 int current_function_returns_struct;
113 /* Nonzero if function being compiled needs to
114 return the address of where it has put a structure value. */
116 int current_function_returns_pcc_struct;
118 /* Nonzero if function being compiled needs to be passed a static chain. */
120 int current_function_needs_context;
122 /* Nonzero if function being compiled can call setjmp. */
124 int current_function_calls_setjmp;
126 /* Nonzero if function being compiled can call longjmp. */
128 int current_function_calls_longjmp;
130 /* Nonzero if function being compiled receives nonlocal gotos
131 from nested functions. */
133 int current_function_has_nonlocal_label;
135 /* Nonzero if function being compiled has nonlocal gotos to parent
138 int current_function_has_nonlocal_goto;
140 /* Nonzero if function being compiled contains nested functions. */
142 int current_function_contains_functions;
144 /* Nonzero if function being compiled doesn't contain any calls
145 (ignoring the prologue and epilogue). This is set prior to
146 local register allocation and is valid for the remaining
149 int current_function_is_leaf;
151 /* Nonzero if function being compiled doesn't modify the stack pointer
152 (ignoring the prologue and epilogue). This is only valid after
153 life_analysis has run. */
155 int current_function_sp_is_unchanging;
157 /* Nonzero if the function being compiled is a leaf function which only
158 uses leaf registers. This is valid after reload (specifically after
159 sched2) and is useful only if the port defines LEAF_REGISTERS. */
161 int current_function_uses_only_leaf_regs;
163 /* Nonzero if the function being compiled issues a computed jump. */
165 int current_function_has_computed_jump;
167 /* Nonzero if the current function is a thunk (a lightweight function that
168 just adjusts one of its arguments and forwards to another function), so
169 we should try to cut corners where we can. */
170 int current_function_is_thunk;
172 /* Nonzero if function being compiled can call alloca,
173 either as a subroutine or builtin. */
175 int current_function_calls_alloca;
177 /* Nonzero if the current function returns a pointer type */
179 int current_function_returns_pointer;
181 /* If some insns can be deferred to the delay slots of the epilogue, the
182 delay list for them is recorded here. */
184 rtx current_function_epilogue_delay_list;
186 /* If function's args have a fixed size, this is that size, in bytes.
188 May affect compilation of return insn or of function epilogue. */
190 int current_function_args_size;
192 /* # bytes the prologue should push and pretend that the caller pushed them.
193 The prologue must do this, but only if parms can be passed in registers. */
195 int current_function_pretend_args_size;
197 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
198 defined, the needed space is pushed by the prologue. */
200 int current_function_outgoing_args_size;
202 /* This is the offset from the arg pointer to the place where the first
203 anonymous arg can be found, if there is one. */
205 rtx current_function_arg_offset_rtx;
207 /* Nonzero if current function uses varargs.h or equivalent.
208 Zero for functions that use stdarg.h. */
210 int current_function_varargs;
212 /* Nonzero if current function uses stdarg.h or equivalent.
213 Zero for functions that use varargs.h. */
215 int current_function_stdarg;
217 /* Quantities of various kinds of registers
218 used for the current function's args. */
220 CUMULATIVE_ARGS current_function_args_info;
222 /* Name of function now being compiled. */
224 char *current_function_name;
226 /* If non-zero, an RTL expression for the location at which the current
227 function returns its result. If the current function returns its
228 result in a register, current_function_return_rtx will always be
229 the hard register containing the result. */
231 rtx current_function_return_rtx;
233 /* Nonzero if the current function uses the constant pool. */
235 int current_function_uses_const_pool;
237 /* Nonzero if the current function uses pic_offset_table_rtx. */
238 int current_function_uses_pic_offset_table;
240 /* The arg pointer hard register, or the pseudo into which it was copied. */
241 rtx current_function_internal_arg_pointer;
243 /* Language-specific reason why the current function cannot be made inline. */
244 char *current_function_cannot_inline;
246 /* Nonzero if instrumentation calls for function entry and exit should be
248 int current_function_instrument_entry_exit;
250 /* Nonzero if memory access checking be enabled in the current function. */
251 int current_function_check_memory_usage;
253 /* The FUNCTION_DECL for an inline function currently being expanded. */
254 tree inline_function_decl;
256 /* Number of function calls seen so far in current function. */
258 int function_call_count;
260 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
261 (labels to which there can be nonlocal gotos from nested functions)
264 tree nonlocal_labels;
266 /* List (chain of EXPR_LIST) of stack slots that hold the current handlers
267 for nonlocal gotos. There is one for every nonlocal label in the function;
268 this list matches the one in nonlocal_labels.
269 Zero when function does not have nonlocal labels. */
271 rtx nonlocal_goto_handler_slots;
273 /* List (chain of EXPR_LIST) of labels heading the current handlers for
276 rtx nonlocal_goto_handler_labels;
278 /* RTX for stack slot that holds the stack pointer value to restore
280 Zero when function does not have nonlocal labels. */
282 rtx nonlocal_goto_stack_level;
284 /* Label that will go on parm cleanup code, if any.
285 Jumping to this label runs cleanup code for parameters, if
286 such code must be run. Following this code is the logical return label. */
290 /* Label that will go on function epilogue.
291 Jumping to this label serves as a "return" instruction
292 on machines which require execution of the epilogue on all returns. */
296 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
297 So we can mark them all live at the end of the function, if nonopt. */
300 /* List (chain of EXPR_LISTs) of all stack slots in this function.
301 Made for the sake of unshare_all_rtl. */
304 /* Chain of all RTL_EXPRs that have insns in them. */
307 /* Label to jump back to for tail recursion, or 0 if we have
308 not yet needed one for this function. */
309 rtx tail_recursion_label;
311 /* Place after which to insert the tail_recursion_label if we need one. */
312 rtx tail_recursion_reentry;
314 /* Location at which to save the argument pointer if it will need to be
315 referenced. There are two cases where this is done: if nonlocal gotos
316 exist, or if vars stored at an offset from the argument pointer will be
317 needed by inner routines. */
319 rtx arg_pointer_save_area;
321 /* Offset to end of allocated area of stack frame.
322 If stack grows down, this is the address of the last stack slot allocated.
323 If stack grows up, this is the address for the next slot. */
324 HOST_WIDE_INT frame_offset;
326 /* List (chain of TREE_LISTs) of static chains for containing functions.
327 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
328 in an RTL_EXPR in the TREE_VALUE. */
329 static tree context_display;
331 /* List (chain of TREE_LISTs) of trampolines for nested functions.
332 The trampoline sets up the static chain and jumps to the function.
333 We supply the trampoline's address when the function's address is requested.
335 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
336 in an RTL_EXPR in the TREE_VALUE. */
337 static tree trampoline_list;
339 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
340 static rtx parm_birth_insn;
343 /* Nonzero if a stack slot has been generated whose address is not
344 actually valid. It means that the generated rtl must all be scanned
345 to detect and correct the invalid addresses where they occur. */
346 static int invalid_stack_slot;
349 /* Last insn of those whose job was to put parms into their nominal homes. */
350 static rtx last_parm_insn;
352 /* 1 + last pseudo register number possibly used for loading a copy
353 of a parameter of this function. */
356 /* Vector indexed by REGNO, containing location on stack in which
357 to put the parm which is nominally in pseudo register REGNO,
358 if we discover that that parm must go in the stack. The highest
359 element in this vector is one less than MAX_PARM_REG, above. */
360 rtx *parm_reg_stack_loc;
362 /* Nonzero once virtual register instantiation has been done.
363 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
364 static int virtuals_instantiated;
366 /* These variables hold pointers to functions to
367 save and restore machine-specific data,
368 in push_function_context and pop_function_context. */
369 void (*save_machine_status) PROTO((struct function *));
370 void (*restore_machine_status) PROTO((struct function *));
372 /* Nonzero if we need to distinguish between the return value of this function
373 and the return value of a function called by this function. This helps
376 extern int rtx_equal_function_value_matters;
377 extern tree sequence_rtl_expr;
379 /* In order to evaluate some expressions, such as function calls returning
380 structures in memory, we need to temporarily allocate stack locations.
381 We record each allocated temporary in the following structure.
383 Associated with each temporary slot is a nesting level. When we pop up
384 one level, all temporaries associated with the previous level are freed.
385 Normally, all temporaries are freed after the execution of the statement
386 in which they were created. However, if we are inside a ({...}) grouping,
387 the result may be in a temporary and hence must be preserved. If the
388 result could be in a temporary, we preserve it if we can determine which
389 one it is in. If we cannot determine which temporary may contain the
390 result, all temporaries are preserved. A temporary is preserved by
391 pretending it was allocated at the previous nesting level.
393 Automatic variables are also assigned temporary slots, at the nesting
394 level where they are defined. They are marked a "kept" so that
395 free_temp_slots will not free them. */
399 /* Points to next temporary slot. */
400 struct temp_slot *next;
401 /* The rtx to used to reference the slot. */
403 /* The rtx used to represent the address if not the address of the
404 slot above. May be an EXPR_LIST if multiple addresses exist. */
406 /* The alignment (in bits) of the slot. */
408 /* The size, in units, of the slot. */
410 /* The alias set for the slot. If the alias set is zero, we don't
411 know anything about the alias set of the slot. We must only
412 reuse a slot if it is assigned an object of the same alias set.
413 Otherwise, the rest of the compiler may assume that the new use
414 of the slot cannot alias the old use of the slot, which is
415 false. If the slot has alias set zero, then we can't reuse the
416 slot at all, since we have no idea what alias set may have been
417 imposed on the memory. For example, if the stack slot is the
418 call frame for an inline functioned, we have no idea what alias
419 sets will be assigned to various pieces of the call frame. */
421 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
423 /* Non-zero if this temporary is currently in use. */
425 /* Non-zero if this temporary has its address taken. */
427 /* Nesting level at which this slot is being used. */
429 /* Non-zero if this should survive a call to free_temp_slots. */
431 /* The offset of the slot from the frame_pointer, including extra space
432 for alignment. This info is for combine_temp_slots. */
433 HOST_WIDE_INT base_offset;
434 /* The size of the slot, including extra space for alignment. This
435 info is for combine_temp_slots. */
436 HOST_WIDE_INT full_size;
437 /* Boundary mark of a character array and the others. This info is for propolice */
441 /* List of all temporaries allocated, both available and in use. */
443 struct temp_slot *temp_slots;
445 /* Current nesting level for temporaries. */
449 /* Current nesting level for variables in a block. */
451 int var_temp_slot_level;
453 /* When temporaries are created by TARGET_EXPRs, they are created at
454 this level of temp_slot_level, so that they can remain allocated
455 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
457 int target_temp_slot_level;
459 /* Current boundary mark for character arrays. */
461 int temp_boundary_mark;
464 /* This structure is used to record MEMs or pseudos used to replace VAR, any
465 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
466 maintain this list in case two operands of an insn were required to match;
467 in that case we must ensure we use the same replacement. */
469 struct fixup_replacement
473 struct fixup_replacement *next;
476 struct insns_for_mem_entry {
477 /* The KEY in HE will be a MEM. */
478 struct hash_entry he;
479 /* These are the INSNS which reference the MEM. */
483 /* Forward declarations. */
485 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
486 int, struct function *));
487 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
489 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
490 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
491 enum machine_mode, enum machine_mode,
493 struct hash_table *));
494 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
495 struct hash_table *));
496 static struct fixup_replacement
497 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
498 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
499 rtx, int, struct hash_table *));
500 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
501 struct fixup_replacement **));
502 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
503 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
504 static rtx fixup_stack_1 PROTO((rtx, rtx));
505 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
506 static void instantiate_decls PROTO((tree, int));
507 static void instantiate_decls_1 PROTO((tree, int));
508 static void instantiate_decl PROTO((rtx, int, int));
509 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
510 static void delete_handlers PROTO((void));
511 static void pad_to_arg_alignment PROTO((struct args_size *, int));
512 #ifndef ARGS_GROW_DOWNWARD
513 static void pad_below PROTO((struct args_size *, enum machine_mode,
516 #ifdef ARGS_GROW_DOWNWARD
517 static tree round_down PROTO((tree, int));
519 static rtx round_trampoline_addr PROTO((rtx));
520 static tree blocks_nreverse PROTO((tree));
521 static int all_blocks PROTO((tree, tree *));
522 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
523 static int *record_insns PROTO((rtx));
524 static int contains PROTO((rtx, int *));
525 #endif /* HAVE_prologue || HAVE_epilogue */
526 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
527 static boolean purge_addressof_1 PROTO((rtx *, rtx, int, int,
528 struct hash_table *));
529 static int is_addressof PROTO ((rtx *, void *));
530 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
533 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
534 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
535 static int insns_for_mem_walk PROTO ((rtx *, void *));
536 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
539 /* Pointer to chain of `struct function' for containing functions. */
540 struct function *outer_function_chain;
542 /* Given a function decl for a containing function,
543 return the `struct function' for it. */
546 find_function_data (decl)
551 for (p = outer_function_chain; p; p = p->next)
558 /* Save the current context for compilation of a nested function.
559 This is called from language-specific code.
560 The caller is responsible for saving any language-specific status,
561 since this function knows only about language-independent variables. */
564 push_function_context_to (context)
567 struct function *p = (struct function *) xmalloc (sizeof (struct function));
569 p->next = outer_function_chain;
570 outer_function_chain = p;
572 p->name = current_function_name;
573 p->decl = current_function_decl;
574 p->pops_args = current_function_pops_args;
575 p->returns_struct = current_function_returns_struct;
576 p->returns_pcc_struct = current_function_returns_pcc_struct;
577 p->returns_pointer = current_function_returns_pointer;
578 p->needs_context = current_function_needs_context;
579 p->calls_setjmp = current_function_calls_setjmp;
580 p->calls_longjmp = current_function_calls_longjmp;
581 p->calls_alloca = current_function_calls_alloca;
582 p->has_nonlocal_label = current_function_has_nonlocal_label;
583 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
584 p->contains_functions = current_function_contains_functions;
585 p->has_computed_jump = current_function_has_computed_jump;
586 p->is_thunk = current_function_is_thunk;
587 p->args_size = current_function_args_size;
588 p->pretend_args_size = current_function_pretend_args_size;
589 p->arg_offset_rtx = current_function_arg_offset_rtx;
590 p->varargs = current_function_varargs;
591 p->stdarg = current_function_stdarg;
592 p->uses_const_pool = current_function_uses_const_pool;
593 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
594 p->internal_arg_pointer = current_function_internal_arg_pointer;
595 p->cannot_inline = current_function_cannot_inline;
596 p->max_parm_reg = max_parm_reg;
597 p->parm_reg_stack_loc = parm_reg_stack_loc;
598 p->outgoing_args_size = current_function_outgoing_args_size;
599 p->return_rtx = current_function_return_rtx;
600 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
601 p->nonlocal_goto_handler_labels = nonlocal_goto_handler_labels;
602 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
603 p->nonlocal_labels = nonlocal_labels;
604 p->cleanup_label = cleanup_label;
605 p->return_label = return_label;
606 p->save_expr_regs = save_expr_regs;
607 p->stack_slot_list = stack_slot_list;
608 p->parm_birth_insn = parm_birth_insn;
609 p->frame_offset = frame_offset;
610 p->tail_recursion_label = tail_recursion_label;
611 p->tail_recursion_reentry = tail_recursion_reentry;
612 p->arg_pointer_save_area = arg_pointer_save_area;
613 p->rtl_expr_chain = rtl_expr_chain;
614 p->last_parm_insn = last_parm_insn;
615 p->context_display = context_display;
616 p->trampoline_list = trampoline_list;
617 p->function_call_count = function_call_count;
618 p->temp_slots = temp_slots;
619 p->temp_slot_level = temp_slot_level;
620 p->target_temp_slot_level = target_temp_slot_level;
621 p->var_temp_slot_level = var_temp_slot_level;
622 p->fixup_var_refs_queue = 0;
623 p->epilogue_delay_list = current_function_epilogue_delay_list;
624 p->args_info = current_function_args_info;
625 p->check_memory_usage = current_function_check_memory_usage;
626 p->instrument_entry_exit = current_function_instrument_entry_exit;
628 save_tree_status (p, context);
629 save_storage_status (p);
630 save_emit_status (p);
631 save_expr_status (p);
632 save_stmt_status (p);
633 save_varasm_status (p, context);
634 if (save_machine_status)
635 (*save_machine_status) (p);
639 push_function_context ()
641 push_function_context_to (current_function_decl);
644 /* Restore the last saved context, at the end of a nested function.
645 This function is called from language-specific code. */
648 pop_function_context_from (context)
651 struct function *p = outer_function_chain;
652 struct var_refs_queue *queue;
654 outer_function_chain = p->next;
656 current_function_contains_functions
657 = p->contains_functions || p->inline_obstacks
658 || context == current_function_decl;
659 current_function_has_computed_jump = p->has_computed_jump;
660 current_function_name = p->name;
661 current_function_decl = p->decl;
662 current_function_pops_args = p->pops_args;
663 current_function_returns_struct = p->returns_struct;
664 current_function_returns_pcc_struct = p->returns_pcc_struct;
665 current_function_returns_pointer = p->returns_pointer;
666 current_function_needs_context = p->needs_context;
667 current_function_calls_setjmp = p->calls_setjmp;
668 current_function_calls_longjmp = p->calls_longjmp;
669 current_function_calls_alloca = p->calls_alloca;
670 current_function_has_nonlocal_label = p->has_nonlocal_label;
671 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
672 current_function_is_thunk = p->is_thunk;
673 current_function_args_size = p->args_size;
674 current_function_pretend_args_size = p->pretend_args_size;
675 current_function_arg_offset_rtx = p->arg_offset_rtx;
676 current_function_varargs = p->varargs;
677 current_function_stdarg = p->stdarg;
678 current_function_uses_const_pool = p->uses_const_pool;
679 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
680 current_function_internal_arg_pointer = p->internal_arg_pointer;
681 current_function_cannot_inline = p->cannot_inline;
682 max_parm_reg = p->max_parm_reg;
683 parm_reg_stack_loc = p->parm_reg_stack_loc;
684 current_function_outgoing_args_size = p->outgoing_args_size;
685 current_function_return_rtx = p->return_rtx;
686 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
687 nonlocal_goto_handler_labels = p->nonlocal_goto_handler_labels;
688 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
689 nonlocal_labels = p->nonlocal_labels;
690 cleanup_label = p->cleanup_label;
691 return_label = p->return_label;
692 save_expr_regs = p->save_expr_regs;
693 stack_slot_list = p->stack_slot_list;
694 parm_birth_insn = p->parm_birth_insn;
695 frame_offset = p->frame_offset;
696 tail_recursion_label = p->tail_recursion_label;
697 tail_recursion_reentry = p->tail_recursion_reentry;
698 arg_pointer_save_area = p->arg_pointer_save_area;
699 rtl_expr_chain = p->rtl_expr_chain;
700 last_parm_insn = p->last_parm_insn;
701 context_display = p->context_display;
702 trampoline_list = p->trampoline_list;
703 function_call_count = p->function_call_count;
704 temp_slots = p->temp_slots;
705 temp_slot_level = p->temp_slot_level;
706 target_temp_slot_level = p->target_temp_slot_level;
707 var_temp_slot_level = p->var_temp_slot_level;
708 current_function_epilogue_delay_list = p->epilogue_delay_list;
710 current_function_args_info = p->args_info;
711 current_function_check_memory_usage = p->check_memory_usage;
712 current_function_instrument_entry_exit = p->instrument_entry_exit;
714 restore_tree_status (p, context);
715 restore_storage_status (p);
716 restore_expr_status (p);
717 restore_emit_status (p);
718 restore_stmt_status (p);
719 restore_varasm_status (p);
721 if (restore_machine_status)
722 (*restore_machine_status) (p);
724 /* Finish doing put_var_into_stack for any of our variables
725 which became addressable during the nested function. */
726 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
727 fixup_var_refs (queue->modified, queue->promoted_mode,
728 queue->unsignedp, 0);
732 /* Reset variables that have known state during rtx generation. */
733 rtx_equal_function_value_matters = 1;
734 virtuals_instantiated = 0;
737 void pop_function_context ()
739 pop_function_context_from (current_function_decl);
742 /* Allocate fixed slots in the stack frame of the current function. */
744 /* Return size needed for stack frame based on slots so far allocated.
745 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
746 the caller may have to do that. */
751 #ifdef FRAME_GROWS_DOWNWARD
752 return -frame_offset;
758 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
759 with machine mode MODE.
761 ALIGN controls the amount of alignment for the address of the slot:
762 0 means according to MODE,
763 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
764 positive specifies alignment boundary in bits.
766 We do not round to stack_boundary here. */
769 assign_stack_local (mode, size, align)
770 enum machine_mode mode;
774 register rtx x, addr;
775 int bigend_correction = 0;
782 alignment = GET_MODE_ALIGNMENT (mode);
784 alignment = BIGGEST_ALIGNMENT;
786 /* Allow the target to (possibly) increase the alignment of this
788 type = type_for_mode (mode, 0);
790 alignment = LOCAL_ALIGNMENT (type, alignment);
792 alignment /= BITS_PER_UNIT;
794 else if (align == -1)
796 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
797 size = CEIL_ROUND (size, alignment);
800 alignment = align / BITS_PER_UNIT;
802 #ifdef FRAME_GROWS_DOWNWARD
803 frame_offset -= size;
806 /* Round frame offset to that alignment.
807 We must be careful here, since FRAME_OFFSET might be negative and
808 division with a negative dividend isn't as well defined as we might
809 like. So we instead assume that ALIGNMENT is a power of two and
810 use logical operations which are unambiguous. */
811 #ifdef FRAME_GROWS_DOWNWARD
812 frame_offset = FLOOR_ROUND (frame_offset, alignment);
814 frame_offset = CEIL_ROUND (frame_offset, alignment);
817 /* On a big-endian machine, if we are allocating more space than we will use,
818 use the least significant bytes of those that are allocated. */
819 if (BYTES_BIG_ENDIAN && mode != BLKmode)
820 bigend_correction = size - GET_MODE_SIZE (mode);
822 /* If we have already instantiated virtual registers, return the actual
823 address relative to the frame pointer. */
824 if (virtuals_instantiated)
825 addr = plus_constant (frame_pointer_rtx,
826 (frame_offset + bigend_correction
827 + STARTING_FRAME_OFFSET));
829 addr = plus_constant (virtual_stack_vars_rtx,
830 frame_offset + bigend_correction);
832 #ifndef FRAME_GROWS_DOWNWARD
833 frame_offset += size;
836 x = gen_rtx_MEM (mode, addr);
838 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
843 /* Assign a stack slot in a containing function.
844 First three arguments are same as in preceding function.
845 The last argument specifies the function to allocate in. */
848 assign_outer_stack_local (mode, size, align, function)
849 enum machine_mode mode;
852 struct function *function;
854 register rtx x, addr;
855 int bigend_correction = 0;
858 /* Allocate in the memory associated with the function in whose frame
860 push_obstacks (function->function_obstack,
861 function->function_maybepermanent_obstack);
867 alignment = GET_MODE_ALIGNMENT (mode);
869 alignment = BIGGEST_ALIGNMENT;
871 /* Allow the target to (possibly) increase the alignment of this
873 type = type_for_mode (mode, 0);
875 alignment = LOCAL_ALIGNMENT (type, alignment);
877 alignment /= BITS_PER_UNIT;
879 else if (align == -1)
881 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
882 size = CEIL_ROUND (size, alignment);
885 alignment = align / BITS_PER_UNIT;
887 #ifdef FRAME_GROWS_DOWNWARD
888 function->frame_offset -= size;
891 /* Round frame offset to that alignment. */
892 #ifdef FRAME_GROWS_DOWNWARD
893 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
895 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
898 /* On a big-endian machine, if we are allocating more space than we will use,
899 use the least significant bytes of those that are allocated. */
900 if (BYTES_BIG_ENDIAN && mode != BLKmode)
901 bigend_correction = size - GET_MODE_SIZE (mode);
903 addr = plus_constant (virtual_stack_vars_rtx,
904 function->frame_offset + bigend_correction);
905 #ifndef FRAME_GROWS_DOWNWARD
906 function->frame_offset += size;
909 x = gen_rtx_MEM (mode, addr);
911 function->stack_slot_list
912 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
919 /* Allocate a temporary stack slot and record it for possible later
922 MODE is the machine mode to be given to the returned rtx.
924 SIZE is the size in units of the space required. We do no rounding here
925 since assign_stack_local will do any required rounding.
927 KEEP is 1 if this slot is to be retained after a call to
928 free_temp_slots. Automatic variables for a block are allocated
929 with this flag. KEEP is 2 if we allocate a longer term temporary,
930 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
931 if we are to allocate something at an inner level to be treated as
932 a variable in the block (e.g., a SAVE_EXPR).
933 KEEP is 5 if we allocate a place to return structure.
935 TYPE is the type that will be used for the stack slot. */
938 assign_stack_temp_for_type (mode, size, keep, type)
939 enum machine_mode mode;
946 struct temp_slot *p, *best_p = 0;
947 int char_array = (flag_propolice_protection
948 && keep == 1 && search_string_def (type));
950 /* If SIZE is -1 it means that somebody tried to allocate a temporary
951 of a variable size. */
955 /* If we know the alias set for the memory that will be used, use
956 it. If there's no TYPE, then we don't know anything about the
957 alias set for the memory. */
959 alias_set = get_alias_set (type);
963 align = GET_MODE_ALIGNMENT (mode);
965 align = BIGGEST_ALIGNMENT;
968 type = type_for_mode (mode, 0);
970 align = LOCAL_ALIGNMENT (type, align);
972 /* Try to find an available, already-allocated temporary of the proper
973 mode which meets the size and alignment requirements. Choose the
974 smallest one with the closest alignment. */
975 for (p = temp_slots; p; p = p->next)
976 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
978 && (!flag_strict_aliasing
979 || (alias_set && p->alias_set == alias_set))
980 && (best_p == 0 || best_p->size > p->size
981 || (best_p->size == p->size && best_p->align > p->align))
982 && (! char_array || p->boundary_mark != 0))
984 if (p->align == align && p->size == size)
992 /* Make our best, if any, the one to use. */
995 /* If there are enough aligned bytes left over, make them into a new
996 temp_slot so that the extra bytes don't get wasted. Do this only
997 for BLKmode slots, so that we can be sure of the alignment. */
998 if (GET_MODE (best_p->slot) == BLKmode
999 /* We can't split slots if -fstrict-aliasing because the
1000 information about the alias set for the new slot will be
1002 && !flag_strict_aliasing)
1004 int alignment = best_p->align / BITS_PER_UNIT;
1005 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
1007 if (best_p->size - rounded_size >= alignment)
1009 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
1010 p->in_use = p->addr_taken = 0;
1011 p->size = best_p->size - rounded_size;
1012 p->base_offset = best_p->base_offset + rounded_size;
1013 p->full_size = best_p->full_size - rounded_size;
1014 p->slot = gen_rtx_MEM (BLKmode,
1015 plus_constant (XEXP (best_p->slot, 0),
1017 p->align = best_p->align;
1020 p->boundary_mark = best_p->boundary_mark;
1021 p->next = temp_slots;
1024 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
1027 best_p->size = rounded_size;
1028 best_p->full_size = rounded_size;
1035 /* If we still didn't find one, make a new temporary. */
1038 HOST_WIDE_INT frame_offset_old = frame_offset;
1040 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
1042 /* We are passing an explicit alignment request to assign_stack_local.
1043 One side effect of that is assign_stack_local will not round SIZE
1044 to ensure the frame offset remains suitably aligned.
1046 So for requests which depended on the rounding of SIZE, we go ahead
1047 and round it now. We also make sure ALIGNMENT is at least
1048 BIGGEST_ALIGNMENT. */
1049 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
1051 p->slot = assign_stack_local (mode,
1053 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
1058 p->alias_set = alias_set;
1060 /* The following slot size computation is necessary because we don't
1061 know the actual size of the temporary slot until assign_stack_local
1062 has performed all the frame alignment and size rounding for the
1063 requested temporary. Note that extra space added for alignment
1064 can be either above or below this stack slot depending on which
1065 way the frame grows. We include the extra space if and only if it
1066 is above this slot. */
1067 #ifdef FRAME_GROWS_DOWNWARD
1068 p->size = frame_offset_old - frame_offset;
1073 /* Now define the fields used by combine_temp_slots. */
1074 #ifdef FRAME_GROWS_DOWNWARD
1075 p->base_offset = frame_offset;
1076 p->full_size = frame_offset_old - frame_offset;
1078 p->base_offset = frame_offset_old;
1079 p->full_size = frame_offset - frame_offset_old;
1082 p->boundary_mark = char_array?++temp_boundary_mark:0;
1083 p->next = temp_slots;
1089 p->rtl_expr = sequence_rtl_expr;
1093 p->level = target_temp_slot_level;
1098 p->level = var_temp_slot_level;
1103 p->level = temp_slot_level;
1107 /* We may be reusing an old slot, so clear any MEM flags that may have been
1109 RTX_UNCHANGING_P (p->slot) = 0;
1110 MEM_IN_STRUCT_P (p->slot) = 0;
1111 MEM_SCALAR_P (p->slot) = 0;
1112 MEM_ALIAS_SET (p->slot) = 0;
1116 /* Allocate a temporary stack slot and record it for possible later
1117 reuse. First three arguments are same as in preceding function. */
1120 assign_stack_temp (mode, size, keep)
1121 enum machine_mode mode;
1125 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
1128 /* Assign a temporary of given TYPE.
1129 KEEP is as for assign_stack_temp.
1130 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
1131 it is 0 if a register is OK.
1132 DONT_PROMOTE is 1 if we should not promote values in register
1136 assign_temp (type, keep, memory_required, dont_promote)
1139 int memory_required;
1142 enum machine_mode mode = TYPE_MODE (type);
1143 int unsignedp = TREE_UNSIGNED (type);
1145 if (mode == BLKmode || memory_required)
1147 HOST_WIDE_INT size = int_size_in_bytes (type);
1150 /* Unfortunately, we don't yet know how to allocate variable-sized
1151 temporaries. However, sometimes we have a fixed upper limit on
1152 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
1153 instead. This is the case for Chill variable-sized strings. */
1154 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1155 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1156 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1157 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1159 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1160 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
1164 #ifndef PROMOTE_FOR_CALL_ONLY
1166 mode = promote_mode (type, mode, &unsignedp, 0);
1169 return gen_reg_rtx (mode);
1172 /* Combine temporary stack slots which are adjacent on the stack.
1174 This allows for better use of already allocated stack space. This is only
1175 done for BLKmode slots because we can be sure that we won't have alignment
1176 problems in this case. */
1179 combine_temp_slots ()
1181 struct temp_slot *p, *q;
1182 struct temp_slot *prev_p, *prev_q;
1185 /* We can't combine slots, because the information about which slot
1186 is in which alias set will be lost. */
1187 if (flag_strict_aliasing)
1190 /* If there are a lot of temp slots, don't do anything unless
1191 high levels of optimizaton. */
1192 if (! flag_expensive_optimizations)
1193 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1194 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1197 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1201 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1202 for (q = p->next, prev_q = p; q; q = prev_q->next)
1205 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1207 if (p->base_offset + p->full_size == q->base_offset &&
1208 p->boundary_mark == q->boundary_mark)
1210 /* Q comes after P; combine Q into P. */
1212 p->full_size += q->full_size;
1215 else if (q->base_offset + q->full_size == p->base_offset &&
1216 p->boundary_mark == q->boundary_mark)
1218 /* P comes after Q; combine P into Q. */
1220 q->full_size += p->full_size;
1225 /* Either delete Q or advance past it. */
1227 prev_q->next = q->next;
1231 /* Either delete P or advance past it. */
1235 prev_p->next = p->next;
1237 temp_slots = p->next;
1244 /* Find the temp slot corresponding to the object at address X. */
1246 static struct temp_slot *
1247 find_temp_slot_from_address (x)
1250 struct temp_slot *p;
1253 for (p = temp_slots; p; p = p->next)
1258 else if (XEXP (p->slot, 0) == x
1260 || (GET_CODE (x) == PLUS
1261 && XEXP (x, 0) == virtual_stack_vars_rtx
1262 && GET_CODE (XEXP (x, 1)) == CONST_INT
1263 && INTVAL (XEXP (x, 1)) >= p->base_offset
1264 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1267 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1268 for (next = p->address; next; next = XEXP (next, 1))
1269 if (XEXP (next, 0) == x)
1276 /* Indicate that NEW is an alternate way of referring to the temp slot
1277 that previously was known by OLD. */
1280 update_temp_slot_address (old, new)
1283 struct temp_slot *p = find_temp_slot_from_address (old);
1285 /* If none, return. Else add NEW as an alias. */
1288 else if (p->address == 0)
1292 if (GET_CODE (p->address) != EXPR_LIST)
1293 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1295 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1299 /* If X could be a reference to a temporary slot, mark the fact that its
1300 address was taken. */
1303 mark_temp_addr_taken (x)
1306 struct temp_slot *p;
1311 /* If X is not in memory or is at a constant address, it cannot be in
1312 a temporary slot. */
1313 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1316 p = find_temp_slot_from_address (XEXP (x, 0));
1321 /* If X could be a reference to a temporary slot, mark that slot as
1322 belonging to the to one level higher than the current level. If X
1323 matched one of our slots, just mark that one. Otherwise, we can't
1324 easily predict which it is, so upgrade all of them. Kept slots
1325 need not be touched.
1327 This is called when an ({...}) construct occurs and a statement
1328 returns a value in memory. */
1331 preserve_temp_slots (x)
1334 struct temp_slot *p = 0;
1336 /* If there is no result, we still might have some objects whose address
1337 were taken, so we need to make sure they stay around. */
1340 for (p = temp_slots; p; p = p->next)
1341 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1347 /* If X is a register that is being used as a pointer, see if we have
1348 a temporary slot we know it points to. To be consistent with
1349 the code below, we really should preserve all non-kept slots
1350 if we can't find a match, but that seems to be much too costly. */
1351 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1352 p = find_temp_slot_from_address (x);
1354 /* If X is not in memory or is at a constant address, it cannot be in
1355 a temporary slot, but it can contain something whose address was
1357 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1359 for (p = temp_slots; p; p = p->next)
1360 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1366 /* First see if we can find a match. */
1368 p = find_temp_slot_from_address (XEXP (x, 0));
1372 /* Move everything at our level whose address was taken to our new
1373 level in case we used its address. */
1374 struct temp_slot *q;
1376 if (p->level == temp_slot_level)
1378 for (q = temp_slots; q; q = q->next)
1379 if (q != p && q->addr_taken && q->level == p->level)
1388 /* Otherwise, preserve all non-kept slots at this level. */
1389 for (p = temp_slots; p; p = p->next)
1390 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1394 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1395 with that RTL_EXPR, promote it into a temporary slot at the present
1396 level so it will not be freed when we free slots made in the
1400 preserve_rtl_expr_result (x)
1403 struct temp_slot *p;
1405 /* If X is not in memory or is at a constant address, it cannot be in
1406 a temporary slot. */
1407 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1410 /* If we can find a match, move it to our level unless it is already at
1412 p = find_temp_slot_from_address (XEXP (x, 0));
1415 p->level = MIN (p->level, temp_slot_level);
1422 /* Free all temporaries used so far. This is normally called at the end
1423 of generating code for a statement. Don't free any temporaries
1424 currently in use for an RTL_EXPR that hasn't yet been emitted.
1425 We could eventually do better than this since it can be reused while
1426 generating the same RTL_EXPR, but this is complex and probably not
1432 struct temp_slot *p;
1434 for (p = temp_slots; p; p = p->next)
1435 if (p->in_use && p->level == temp_slot_level && ! p->keep
1436 && p->rtl_expr == 0)
1439 combine_temp_slots ();
1442 /* Free all temporary slots used in T, an RTL_EXPR node. */
1445 free_temps_for_rtl_expr (t)
1448 struct temp_slot *p;
1450 for (p = temp_slots; p; p = p->next)
1451 if (p->rtl_expr == t)
1453 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1454 needs to be preserved. This can happen if a temporary in
1455 the RTL_EXPR was addressed; preserve_temp_slots will move
1456 the temporary into a higher level. */
1457 if (temp_slot_level <= p->level)
1460 p->rtl_expr = NULL_TREE;
1463 combine_temp_slots ();
1466 /* Mark all temporaries ever allocated in this function as not suitable
1467 for reuse until the current level is exited. */
1470 mark_all_temps_used ()
1472 struct temp_slot *p;
1474 for (p = temp_slots; p; p = p->next)
1476 p->in_use = p->keep = 1;
1477 p->level = MIN (p->level, temp_slot_level);
1481 /* Push deeper into the nesting level for stack temporaries. */
1489 /* Likewise, but save the new level as the place to allocate variables
1493 push_temp_slots_for_block ()
1497 var_temp_slot_level = temp_slot_level;
1500 /* Likewise, but save the new level as the place to allocate temporaries
1501 for TARGET_EXPRs. */
1504 push_temp_slots_for_target ()
1508 target_temp_slot_level = temp_slot_level;
1511 /* Set and get the value of target_temp_slot_level. The only
1512 permitted use of these functions is to save and restore this value. */
1515 get_target_temp_slot_level ()
1517 return target_temp_slot_level;
1521 set_target_temp_slot_level (level)
1524 target_temp_slot_level = level;
1527 /* Pop a temporary nesting level. All slots in use in the current level
1533 struct temp_slot *p;
1535 for (p = temp_slots; p; p = p->next)
1536 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1539 combine_temp_slots ();
1544 /* Initialize temporary slots. */
1549 /* We have not allocated any temporaries yet. */
1551 temp_slot_level = 0;
1552 var_temp_slot_level = 0;
1553 target_temp_slot_level = 0;
1556 /* Retroactively move an auto variable from a register to a stack slot.
1557 This is done when an address-reference to the variable is seen. */
1560 put_var_into_stack (decl)
1564 enum machine_mode promoted_mode, decl_mode;
1565 struct function *function = 0;
1567 int can_use_addressof;
1569 context = decl_function_context (decl);
1571 /* Get the current rtl used for this object and its original mode. */
1572 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1574 /* No need to do anything if decl has no rtx yet
1575 since in that case caller is setting TREE_ADDRESSABLE
1576 and a stack slot will be assigned when the rtl is made. */
1580 /* Get the declared mode for this object. */
1581 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1582 : DECL_MODE (decl));
1583 /* Get the mode it's actually stored in. */
1584 promoted_mode = GET_MODE (reg);
1586 /* If this variable comes from an outer function,
1587 find that function's saved context. */
1588 if (context != current_function_decl && context != inline_function_decl)
1589 for (function = outer_function_chain; function; function = function->next)
1590 if (function->decl == context)
1593 /* If this is a variable-size object with a pseudo to address it,
1594 put that pseudo into the stack, if the var is nonlocal. */
1595 if (DECL_NONLOCAL (decl)
1596 && GET_CODE (reg) == MEM
1597 && GET_CODE (XEXP (reg, 0)) == REG
1598 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1600 reg = XEXP (reg, 0);
1601 decl_mode = promoted_mode = GET_MODE (reg);
1607 /* FIXME make it work for promoted modes too */
1608 && decl_mode == promoted_mode
1609 #ifdef NON_SAVING_SETJMP
1610 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1614 /* If we can't use ADDRESSOF, make sure we see through one we already
1616 if (! can_use_addressof && GET_CODE (reg) == MEM
1617 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1618 reg = XEXP (XEXP (reg, 0), 0);
1620 /* Now we should have a value that resides in one or more pseudo regs. */
1622 if (GET_CODE (reg) == REG)
1624 /* If this variable lives in the current function and we don't need
1625 to put things in the stack for the sake of setjmp, try to keep it
1626 in a register until we know we actually need the address. */
1627 if (can_use_addressof)
1628 gen_mem_addressof (reg, decl);
1630 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1631 promoted_mode, decl_mode,
1632 TREE_SIDE_EFFECTS (decl), 0,
1633 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1636 else if (GET_CODE (reg) == CONCAT)
1638 /* A CONCAT contains two pseudos; put them both in the stack.
1639 We do it so they end up consecutive. */
1640 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1641 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1642 #ifdef FRAME_GROWS_DOWNWARD
1643 /* Since part 0 should have a lower address, do it second. */
1644 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1645 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1646 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1648 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1649 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1650 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1653 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1654 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1655 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1657 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1658 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1659 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1663 /* Change the CONCAT into a combined MEM for both parts. */
1664 PUT_CODE (reg, MEM);
1665 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1666 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1668 /* The two parts are in memory order already.
1669 Use the lower parts address as ours. */
1670 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1671 /* Prevent sharing of rtl that might lose. */
1672 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1673 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1678 if (current_function_check_memory_usage)
1679 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1680 XEXP (reg, 0), Pmode,
1681 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1682 TYPE_MODE (sizetype),
1683 GEN_INT (MEMORY_USE_RW),
1684 TYPE_MODE (integer_type_node));
1687 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1688 into the stack frame of FUNCTION (0 means the current function).
1689 DECL_MODE is the machine mode of the user-level data type.
1690 PROMOTED_MODE is the machine mode of the register.
1691 VOLATILE_P is nonzero if this is for a "volatile" decl.
1692 USED_P is nonzero if this reg might have already been used in an insn. */
1695 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1696 original_regno, used_p, ht)
1697 struct function *function;
1700 enum machine_mode promoted_mode, decl_mode;
1704 struct hash_table *ht;
1707 int regno = original_regno;
1710 regno = REGNO (reg);
1714 if (regno < function->max_parm_reg)
1715 new = function->parm_reg_stack_loc[regno];
1717 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1722 if (regno < max_parm_reg)
1723 new = parm_reg_stack_loc[regno];
1725 new = assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1728 PUT_MODE (reg, decl_mode);
1729 XEXP (reg, 0) = XEXP (new, 0);
1730 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1731 MEM_VOLATILE_P (reg) = volatile_p;
1732 PUT_CODE (reg, MEM);
1734 /* If this is a memory ref that contains aggregate components,
1735 mark it as such for cse and loop optimize. If we are reusing a
1736 previously generated stack slot, then we need to copy the bit in
1737 case it was set for other reasons. For instance, it is set for
1738 __builtin_va_alist. */
1739 MEM_SET_IN_STRUCT_P (reg,
1740 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1741 MEM_ALIAS_SET (reg) = get_alias_set (type);
1743 /* Now make sure that all refs to the variable, previously made
1744 when it was a register, are fixed up to be valid again. */
1746 if (used_p && function != 0)
1748 struct var_refs_queue *temp;
1750 /* Variable is inherited; fix it up when we get back to its function. */
1751 push_obstacks (function->function_obstack,
1752 function->function_maybepermanent_obstack);
1754 /* See comment in restore_tree_status in tree.c for why this needs to be
1755 on saveable obstack. */
1757 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1758 temp->modified = reg;
1759 temp->promoted_mode = promoted_mode;
1760 temp->unsignedp = TREE_UNSIGNED (type);
1761 temp->next = function->fixup_var_refs_queue;
1762 function->fixup_var_refs_queue = temp;
1766 /* Variable is local; fix it up now. */
1767 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1771 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1773 enum machine_mode promoted_mode;
1775 struct hash_table *ht;
1778 rtx first_insn = get_insns ();
1779 struct sequence_stack *stack = sequence_stack;
1780 tree rtl_exps = rtl_expr_chain;
1782 /* Must scan all insns for stack-refs that exceed the limit. */
1783 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1785 /* If there's a hash table, it must record all uses of VAR. */
1789 /* Scan all pending sequences too. */
1790 for (; stack; stack = stack->next)
1792 push_to_sequence (stack->first);
1793 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1794 stack->first, stack->next != 0, 0);
1795 /* Update remembered end of sequence
1796 in case we added an insn at the end. */
1797 stack->last = get_last_insn ();
1801 /* Scan all waiting RTL_EXPRs too. */
1802 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1804 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1805 if (seq != const0_rtx && seq != 0)
1807 push_to_sequence (seq);
1808 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1814 /* Scan the catch clauses for exception handling too. */
1815 push_to_sequence (catch_clauses);
1816 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1821 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1822 some part of an insn. Return a struct fixup_replacement whose OLD
1823 value is equal to X. Allocate a new structure if no such entry exists. */
1825 static struct fixup_replacement *
1826 find_fixup_replacement (replacements, x)
1827 struct fixup_replacement **replacements;
1830 struct fixup_replacement *p;
1832 /* See if we have already replaced this. */
1833 for (p = *replacements; p && p->old != x; p = p->next)
1838 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1841 p->next = *replacements;
1848 /* Scan the insn-chain starting with INSN for refs to VAR
1849 and fix them up. TOPLEVEL is nonzero if this chain is the
1850 main chain of insns for the current function. */
1853 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1855 enum machine_mode promoted_mode;
1859 struct hash_table *ht;
1862 rtx insn_list = NULL_RTX;
1864 /* If we already know which INSNs reference VAR there's no need
1865 to walk the entire instruction chain. */
1868 insn_list = ((struct insns_for_mem_entry *)
1869 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1870 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1871 insn_list = XEXP (insn_list, 1);
1876 rtx next = NEXT_INSN (insn);
1877 rtx set, prev, prev_set;
1880 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1882 /* If this is a CLOBBER of VAR, delete it.
1884 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1885 and REG_RETVAL notes too. */
1886 if (GET_CODE (PATTERN (insn)) == CLOBBER
1887 && (XEXP (PATTERN (insn), 0) == var
1888 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1889 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1890 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1892 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1893 /* The REG_LIBCALL note will go away since we are going to
1894 turn INSN into a NOTE, so just delete the
1895 corresponding REG_RETVAL note. */
1896 remove_note (XEXP (note, 0),
1897 find_reg_note (XEXP (note, 0), REG_RETVAL,
1900 /* In unoptimized compilation, we shouldn't call delete_insn
1901 except in jump.c doing warnings. */
1902 PUT_CODE (insn, NOTE);
1903 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1904 NOTE_SOURCE_FILE (insn) = 0;
1907 /* The insn to load VAR from a home in the arglist
1908 is now a no-op. When we see it, just delete it.
1909 Similarly if this is storing VAR from a register from which
1910 it was loaded in the previous insn. This will occur
1911 when an ADDRESSOF was made for an arglist slot. */
1913 && (set = single_set (insn)) != 0
1914 && SET_DEST (set) == var
1915 /* If this represents the result of an insn group,
1916 don't delete the insn. */
1917 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1918 && (rtx_equal_p (SET_SRC (set), var)
1919 || (GET_CODE (SET_SRC (set)) == REG
1920 && (prev = prev_nonnote_insn (insn)) != 0
1921 && (prev_set = single_set (prev)) != 0
1922 && SET_DEST (prev_set) == SET_SRC (set)
1923 && rtx_equal_p (SET_SRC (prev_set), var))))
1925 /* In unoptimized compilation, we shouldn't call delete_insn
1926 except in jump.c doing warnings. */
1927 PUT_CODE (insn, NOTE);
1928 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1929 NOTE_SOURCE_FILE (insn) = 0;
1930 if (insn == last_parm_insn)
1931 last_parm_insn = PREV_INSN (next);
1935 struct fixup_replacement *replacements = 0;
1936 rtx next_insn = NEXT_INSN (insn);
1938 if (SMALL_REGISTER_CLASSES)
1940 /* If the insn that copies the results of a CALL_INSN
1941 into a pseudo now references VAR, we have to use an
1942 intermediate pseudo since we want the life of the
1943 return value register to be only a single insn.
1945 If we don't use an intermediate pseudo, such things as
1946 address computations to make the address of VAR valid
1947 if it is not can be placed between the CALL_INSN and INSN.
1949 To make sure this doesn't happen, we record the destination
1950 of the CALL_INSN and see if the next insn uses both that
1953 if (call_dest != 0 && GET_CODE (insn) == INSN
1954 && reg_mentioned_p (var, PATTERN (insn))
1955 && reg_mentioned_p (call_dest, PATTERN (insn)))
1957 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1959 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1961 PATTERN (insn) = replace_rtx (PATTERN (insn),
1965 if (GET_CODE (insn) == CALL_INSN
1966 && GET_CODE (PATTERN (insn)) == SET)
1967 call_dest = SET_DEST (PATTERN (insn));
1968 else if (GET_CODE (insn) == CALL_INSN
1969 && GET_CODE (PATTERN (insn)) == PARALLEL
1970 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1971 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1976 /* See if we have to do anything to INSN now that VAR is in
1977 memory. If it needs to be loaded into a pseudo, use a single
1978 pseudo for the entire insn in case there is a MATCH_DUP
1979 between two operands. We pass a pointer to the head of
1980 a list of struct fixup_replacements. If fixup_var_refs_1
1981 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1982 it will record them in this list.
1984 If it allocated a pseudo for any replacement, we copy into
1987 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1990 /* If this is last_parm_insn, and any instructions were output
1991 after it to fix it up, then we must set last_parm_insn to
1992 the last such instruction emitted. */
1993 if (insn == last_parm_insn)
1994 last_parm_insn = PREV_INSN (next_insn);
1996 while (replacements)
1998 if (GET_CODE (replacements->new) == REG)
2003 /* OLD might be a (subreg (mem)). */
2004 if (GET_CODE (replacements->old) == SUBREG)
2006 = fixup_memory_subreg (replacements->old, insn, 0);
2009 = fixup_stack_1 (replacements->old, insn);
2011 insert_before = insn;
2013 /* If we are changing the mode, do a conversion.
2014 This might be wasteful, but combine.c will
2015 eliminate much of the waste. */
2017 if (GET_MODE (replacements->new)
2018 != GET_MODE (replacements->old))
2021 convert_move (replacements->new,
2022 replacements->old, unsignedp);
2023 seq = gen_sequence ();
2027 seq = gen_move_insn (replacements->new,
2030 emit_insn_before (seq, insert_before);
2033 replacements = replacements->next;
2037 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
2038 But don't touch other insns referred to by reg-notes;
2039 we will get them elsewhere. */
2040 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2041 if (GET_CODE (note) != INSN_LIST)
2043 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
2050 insn = XEXP (insn_list, 0);
2051 insn_list = XEXP (insn_list, 1);
2058 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
2059 See if the rtx expression at *LOC in INSN needs to be changed.
2061 REPLACEMENTS is a pointer to a list head that starts out zero, but may
2062 contain a list of original rtx's and replacements. If we find that we need
2063 to modify this insn by replacing a memory reference with a pseudo or by
2064 making a new MEM to implement a SUBREG, we consult that list to see if
2065 we have already chosen a replacement. If none has already been allocated,
2066 we allocate it and update the list. fixup_var_refs_insns will copy VAR
2067 or the SUBREG, as appropriate, to the pseudo. */
2070 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
2072 enum machine_mode promoted_mode;
2075 struct fixup_replacement **replacements;
2078 register rtx x = *loc;
2079 RTX_CODE code = GET_CODE (x);
2081 register rtx tem, tem1;
2082 struct fixup_replacement *replacement;
2087 if (XEXP (x, 0) == var)
2089 /* Prevent sharing of rtl that might lose. */
2090 rtx sub = copy_rtx (XEXP (var, 0));
2092 if (! validate_change (insn, loc, sub, 0))
2094 rtx y = gen_reg_rtx (GET_MODE (sub));
2097 /* We should be able to replace with a register or all is lost.
2098 Note that we can't use validate_change to verify this, since
2099 we're not caring for replacing all dups simultaneously. */
2100 if (! validate_replace_rtx (*loc, y, insn))
2103 /* Careful! First try to recognize a direct move of the
2104 value, mimicking how things are done in gen_reload wrt
2105 PLUS. Consider what happens when insn is a conditional
2106 move instruction and addsi3 clobbers flags. */
2109 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
2110 seq = gen_sequence ();
2113 if (recog_memoized (new_insn) < 0)
2115 /* That failed. Fall back on force_operand and hope. */
2118 force_operand (sub, y);
2119 seq = gen_sequence ();
2124 /* Don't separate setter from user. */
2125 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
2126 insn = PREV_INSN (insn);
2129 emit_insn_before (seq, insn);
2137 /* If we already have a replacement, use it. Otherwise,
2138 try to fix up this address in case it is invalid. */
2140 replacement = find_fixup_replacement (replacements, var);
2141 if (replacement->new)
2143 *loc = replacement->new;
2147 *loc = replacement->new = x = fixup_stack_1 (x, insn);
2149 /* Unless we are forcing memory to register or we changed the mode,
2150 we can leave things the way they are if the insn is valid. */
2152 INSN_CODE (insn) = -1;
2153 if (! flag_force_mem && GET_MODE (x) == promoted_mode
2154 && recog_memoized (insn) >= 0)
2157 *loc = replacement->new = gen_reg_rtx (promoted_mode);
2161 /* If X contains VAR, we need to unshare it here so that we update
2162 each occurrence separately. But all identical MEMs in one insn
2163 must be replaced with the same rtx because of the possibility of
2166 if (reg_mentioned_p (var, x))
2168 replacement = find_fixup_replacement (replacements, x);
2169 if (replacement->new == 0)
2170 replacement->new = copy_most_rtx (x, var);
2172 *loc = x = replacement->new;
2188 /* Note that in some cases those types of expressions are altered
2189 by optimize_bit_field, and do not survive to get here. */
2190 if (XEXP (x, 0) == var
2191 || (GET_CODE (XEXP (x, 0)) == SUBREG
2192 && SUBREG_REG (XEXP (x, 0)) == var))
2194 /* Get TEM as a valid MEM in the mode presently in the insn.
2196 We don't worry about the possibility of MATCH_DUP here; it
2197 is highly unlikely and would be tricky to handle. */
2200 if (GET_CODE (tem) == SUBREG)
2202 if (GET_MODE_BITSIZE (GET_MODE (tem))
2203 > GET_MODE_BITSIZE (GET_MODE (var)))
2205 replacement = find_fixup_replacement (replacements, var);
2206 if (replacement->new == 0)
2207 replacement->new = gen_reg_rtx (GET_MODE (var));
2208 SUBREG_REG (tem) = replacement->new;
2211 tem = fixup_memory_subreg (tem, insn, 0);
2214 tem = fixup_stack_1 (tem, insn);
2216 /* Unless we want to load from memory, get TEM into the proper mode
2217 for an extract from memory. This can only be done if the
2218 extract is at a constant position and length. */
2220 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2221 && GET_CODE (XEXP (x, 2)) == CONST_INT
2222 && ! mode_dependent_address_p (XEXP (tem, 0))
2223 && ! MEM_VOLATILE_P (tem))
2225 enum machine_mode wanted_mode = VOIDmode;
2226 enum machine_mode is_mode = GET_MODE (tem);
2227 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2230 if (GET_CODE (x) == ZERO_EXTRACT)
2232 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2233 if (wanted_mode == VOIDmode)
2234 wanted_mode = word_mode;
2238 if (GET_CODE (x) == SIGN_EXTRACT)
2240 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2241 if (wanted_mode == VOIDmode)
2242 wanted_mode = word_mode;
2245 /* If we have a narrower mode, we can do something. */
2246 if (wanted_mode != VOIDmode
2247 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2249 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2250 rtx old_pos = XEXP (x, 2);
2253 /* If the bytes and bits are counted differently, we
2254 must adjust the offset. */
2255 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2256 offset = (GET_MODE_SIZE (is_mode)
2257 - GET_MODE_SIZE (wanted_mode) - offset);
2259 pos %= GET_MODE_BITSIZE (wanted_mode);
2261 newmem = gen_rtx_MEM (wanted_mode,
2262 plus_constant (XEXP (tem, 0), offset));
2263 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2264 MEM_COPY_ATTRIBUTES (newmem, tem);
2266 /* Make the change and see if the insn remains valid. */
2267 INSN_CODE (insn) = -1;
2268 XEXP (x, 0) = newmem;
2269 XEXP (x, 2) = GEN_INT (pos);
2271 if (recog_memoized (insn) >= 0)
2274 /* Otherwise, restore old position. XEXP (x, 0) will be
2276 XEXP (x, 2) = old_pos;
2280 /* If we get here, the bitfield extract insn can't accept a memory
2281 reference. Copy the input into a register. */
2283 tem1 = gen_reg_rtx (GET_MODE (tem));
2284 emit_insn_before (gen_move_insn (tem1, tem), insn);
2291 if (SUBREG_REG (x) == var)
2293 /* If this is a special SUBREG made because VAR was promoted
2294 from a wider mode, replace it with VAR and call ourself
2295 recursively, this time saying that the object previously
2296 had its current mode (by virtue of the SUBREG). */
2298 if (SUBREG_PROMOTED_VAR_P (x))
2301 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2305 /* If this SUBREG makes VAR wider, it has become a paradoxical
2306 SUBREG with VAR in memory, but these aren't allowed at this
2307 stage of the compilation. So load VAR into a pseudo and take
2308 a SUBREG of that pseudo. */
2309 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2311 replacement = find_fixup_replacement (replacements, var);
2312 if (replacement->new == 0)
2313 replacement->new = gen_reg_rtx (GET_MODE (var));
2314 SUBREG_REG (x) = replacement->new;
2318 /* See if we have already found a replacement for this SUBREG.
2319 If so, use it. Otherwise, make a MEM and see if the insn
2320 is recognized. If not, or if we should force MEM into a register,
2321 make a pseudo for this SUBREG. */
2322 replacement = find_fixup_replacement (replacements, x);
2323 if (replacement->new)
2325 *loc = replacement->new;
2329 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2331 INSN_CODE (insn) = -1;
2332 if (! flag_force_mem && recog_memoized (insn) >= 0)
2335 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2341 /* First do special simplification of bit-field references. */
2342 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2343 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2344 optimize_bit_field (x, insn, 0);
2345 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2346 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2347 optimize_bit_field (x, insn, NULL_PTR);
2349 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2350 into a register and then store it back out. */
2351 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2352 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2353 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2354 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2355 > GET_MODE_SIZE (GET_MODE (var))))
2357 replacement = find_fixup_replacement (replacements, var);
2358 if (replacement->new == 0)
2359 replacement->new = gen_reg_rtx (GET_MODE (var));
2361 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2362 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2365 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2366 insn into a pseudo and store the low part of the pseudo into VAR. */
2367 if (GET_CODE (SET_DEST (x)) == SUBREG
2368 && SUBREG_REG (SET_DEST (x)) == var
2369 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2370 > GET_MODE_SIZE (GET_MODE (var))))
2372 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2373 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2380 rtx dest = SET_DEST (x);
2381 rtx src = SET_SRC (x);
2383 rtx outerdest = dest;
2386 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2387 || GET_CODE (dest) == SIGN_EXTRACT
2388 || GET_CODE (dest) == ZERO_EXTRACT)
2389 dest = XEXP (dest, 0);
2391 if (GET_CODE (src) == SUBREG)
2392 src = XEXP (src, 0);
2394 /* If VAR does not appear at the top level of the SET
2395 just scan the lower levels of the tree. */
2397 if (src != var && dest != var)
2400 /* We will need to rerecognize this insn. */
2401 INSN_CODE (insn) = -1;
2404 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2406 /* Since this case will return, ensure we fixup all the
2408 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2409 insn, replacements);
2410 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2411 insn, replacements);
2412 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2413 insn, replacements);
2415 tem = XEXP (outerdest, 0);
2417 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2418 that may appear inside a ZERO_EXTRACT.
2419 This was legitimate when the MEM was a REG. */
2420 if (GET_CODE (tem) == SUBREG
2421 && SUBREG_REG (tem) == var)
2422 tem = fixup_memory_subreg (tem, insn, 0);
2424 tem = fixup_stack_1 (tem, insn);
2426 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2427 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2428 && ! mode_dependent_address_p (XEXP (tem, 0))
2429 && ! MEM_VOLATILE_P (tem))
2431 enum machine_mode wanted_mode;
2432 enum machine_mode is_mode = GET_MODE (tem);
2433 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2435 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2436 if (wanted_mode == VOIDmode)
2437 wanted_mode = word_mode;
2439 /* If we have a narrower mode, we can do something. */
2440 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2442 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2443 rtx old_pos = XEXP (outerdest, 2);
2446 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2447 offset = (GET_MODE_SIZE (is_mode)
2448 - GET_MODE_SIZE (wanted_mode) - offset);
2450 pos %= GET_MODE_BITSIZE (wanted_mode);
2452 newmem = gen_rtx_MEM (wanted_mode,
2453 plus_constant (XEXP (tem, 0), offset));
2454 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2455 MEM_COPY_ATTRIBUTES (newmem, tem);
2457 /* Make the change and see if the insn remains valid. */
2458 INSN_CODE (insn) = -1;
2459 XEXP (outerdest, 0) = newmem;
2460 XEXP (outerdest, 2) = GEN_INT (pos);
2462 if (recog_memoized (insn) >= 0)
2465 /* Otherwise, restore old position. XEXP (x, 0) will be
2467 XEXP (outerdest, 2) = old_pos;
2471 /* If we get here, the bit-field store doesn't allow memory
2472 or isn't located at a constant position. Load the value into
2473 a register, do the store, and put it back into memory. */
2475 tem1 = gen_reg_rtx (GET_MODE (tem));
2476 emit_insn_before (gen_move_insn (tem1, tem), insn);
2477 emit_insn_after (gen_move_insn (tem, tem1), insn);
2478 XEXP (outerdest, 0) = tem1;
2483 /* STRICT_LOW_PART is a no-op on memory references
2484 and it can cause combinations to be unrecognizable,
2487 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2488 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2490 /* A valid insn to copy VAR into or out of a register
2491 must be left alone, to avoid an infinite loop here.
2492 If the reference to VAR is by a subreg, fix that up,
2493 since SUBREG is not valid for a memref.
2494 Also fix up the address of the stack slot.
2496 Note that we must not try to recognize the insn until
2497 after we know that we have valid addresses and no
2498 (subreg (mem ...) ...) constructs, since these interfere
2499 with determining the validity of the insn. */
2501 if ((SET_SRC (x) == var
2502 || (GET_CODE (SET_SRC (x)) == SUBREG
2503 && SUBREG_REG (SET_SRC (x)) == var))
2504 && (GET_CODE (SET_DEST (x)) == REG
2505 || (GET_CODE (SET_DEST (x)) == SUBREG
2506 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2507 && GET_MODE (var) == promoted_mode
2508 && x == single_set (insn))
2512 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2513 if (replacement->new)
2514 SET_SRC (x) = replacement->new;
2515 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2516 SET_SRC (x) = replacement->new
2517 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2519 SET_SRC (x) = replacement->new
2520 = fixup_stack_1 (SET_SRC (x), insn);
2522 if (recog_memoized (insn) >= 0)
2525 /* INSN is not valid, but we know that we want to
2526 copy SET_SRC (x) to SET_DEST (x) in some way. So
2527 we generate the move and see whether it requires more
2528 than one insn. If it does, we emit those insns and
2529 delete INSN. Otherwise, we an just replace the pattern
2530 of INSN; we have already verified above that INSN has
2531 no other function that to do X. */
2533 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2534 if (GET_CODE (pat) == SEQUENCE)
2536 emit_insn_after (pat, insn);
2537 PUT_CODE (insn, NOTE);
2538 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2539 NOTE_SOURCE_FILE (insn) = 0;
2542 PATTERN (insn) = pat;
2547 if ((SET_DEST (x) == var
2548 || (GET_CODE (SET_DEST (x)) == SUBREG
2549 && SUBREG_REG (SET_DEST (x)) == var))
2550 && (GET_CODE (SET_SRC (x)) == REG
2551 || (GET_CODE (SET_SRC (x)) == SUBREG
2552 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2553 && GET_MODE (var) == promoted_mode
2554 && x == single_set (insn))
2558 if (GET_CODE (SET_DEST (x)) == SUBREG)
2559 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2561 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2563 if (recog_memoized (insn) >= 0)
2566 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2567 if (GET_CODE (pat) == SEQUENCE)
2569 emit_insn_after (pat, insn);
2570 PUT_CODE (insn, NOTE);
2571 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2572 NOTE_SOURCE_FILE (insn) = 0;
2575 PATTERN (insn) = pat;
2580 /* Otherwise, storing into VAR must be handled specially
2581 by storing into a temporary and copying that into VAR
2582 with a new insn after this one. Note that this case
2583 will be used when storing into a promoted scalar since
2584 the insn will now have different modes on the input
2585 and output and hence will be invalid (except for the case
2586 of setting it to a constant, which does not need any
2587 change if it is valid). We generate extra code in that case,
2588 but combine.c will eliminate it. */
2593 rtx fixeddest = SET_DEST (x);
2595 /* STRICT_LOW_PART can be discarded, around a MEM. */
2596 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2597 fixeddest = XEXP (fixeddest, 0);
2598 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2599 if (GET_CODE (fixeddest) == SUBREG)
2601 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2602 promoted_mode = GET_MODE (fixeddest);
2605 fixeddest = fixup_stack_1 (fixeddest, insn);
2607 temp = gen_reg_rtx (promoted_mode);
2609 emit_insn_after (gen_move_insn (fixeddest,
2610 gen_lowpart (GET_MODE (fixeddest),
2614 SET_DEST (x) = temp;
2622 /* Nothing special about this RTX; fix its operands. */
2624 fmt = GET_RTX_FORMAT (code);
2625 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2628 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2632 for (j = 0; j < XVECLEN (x, i); j++)
2633 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2634 insn, replacements);
2639 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2640 return an rtx (MEM:m1 newaddr) which is equivalent.
2641 If any insns must be emitted to compute NEWADDR, put them before INSN.
2643 UNCRITICAL nonzero means accept paradoxical subregs.
2644 This is used for subregs found inside REG_NOTES. */
2647 fixup_memory_subreg (x, insn, uncritical)
2652 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2653 rtx addr = XEXP (SUBREG_REG (x), 0);
2654 enum machine_mode mode = GET_MODE (x);
2657 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2658 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2662 if (BYTES_BIG_ENDIAN)
2663 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2664 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2665 addr = plus_constant (addr, offset);
2666 if (!flag_force_addr && memory_address_p (mode, addr))
2667 /* Shortcut if no insns need be emitted. */
2668 return change_address (SUBREG_REG (x), mode, addr);
2670 result = change_address (SUBREG_REG (x), mode, addr);
2671 emit_insn_before (gen_sequence (), insn);
2676 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2677 Replace subexpressions of X in place.
2678 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2679 Otherwise return X, with its contents possibly altered.
2681 If any insns must be emitted to compute NEWADDR, put them before INSN.
2683 UNCRITICAL is as in fixup_memory_subreg. */
2686 walk_fixup_memory_subreg (x, insn, uncritical)
2691 register enum rtx_code code;
2698 code = GET_CODE (x);
2700 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2701 return fixup_memory_subreg (x, insn, uncritical);
2703 /* Nothing special about this RTX; fix its operands. */
2705 fmt = GET_RTX_FORMAT (code);
2706 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2709 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2713 for (j = 0; j < XVECLEN (x, i); j++)
2715 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2721 /* For each memory ref within X, if it refers to a stack slot
2722 with an out of range displacement, put the address in a temp register
2723 (emitting new insns before INSN to load these registers)
2724 and alter the memory ref to use that register.
2725 Replace each such MEM rtx with a copy, to avoid clobberage. */
2728 fixup_stack_1 (x, insn)
2733 register RTX_CODE code = GET_CODE (x);
2738 register rtx ad = XEXP (x, 0);
2739 /* If we have address of a stack slot but it's not valid
2740 (displacement is too large), compute the sum in a register. */
2741 if (GET_CODE (ad) == PLUS
2742 && GET_CODE (XEXP (ad, 0)) == REG
2743 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2744 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2745 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2746 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2747 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2749 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2750 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2751 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2752 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2755 if (memory_address_p (GET_MODE (x), ad))
2759 temp = copy_to_reg (ad);
2760 seq = gen_sequence ();
2762 emit_insn_before (seq, insn);
2763 return change_address (x, VOIDmode, temp);
2768 fmt = GET_RTX_FORMAT (code);
2769 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2772 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2776 for (j = 0; j < XVECLEN (x, i); j++)
2777 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2783 /* Optimization: a bit-field instruction whose field
2784 happens to be a byte or halfword in memory
2785 can be changed to a move instruction.
2787 We call here when INSN is an insn to examine or store into a bit-field.
2788 BODY is the SET-rtx to be altered.
2790 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2791 (Currently this is called only from function.c, and EQUIV_MEM
2795 optimize_bit_field (body, insn, equiv_mem)
2800 register rtx bitfield;
2803 enum machine_mode mode;
2805 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2806 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2807 bitfield = SET_DEST (body), destflag = 1;
2809 bitfield = SET_SRC (body), destflag = 0;
2811 /* First check that the field being stored has constant size and position
2812 and is in fact a byte or halfword suitably aligned. */
2814 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2815 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2816 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2818 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2820 register rtx memref = 0;
2822 /* Now check that the containing word is memory, not a register,
2823 and that it is safe to change the machine mode. */
2825 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2826 memref = XEXP (bitfield, 0);
2827 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2829 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2830 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2831 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2832 memref = SUBREG_REG (XEXP (bitfield, 0));
2833 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2835 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2836 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2839 && ! mode_dependent_address_p (XEXP (memref, 0))
2840 && ! MEM_VOLATILE_P (memref))
2842 /* Now adjust the address, first for any subreg'ing
2843 that we are now getting rid of,
2844 and then for which byte of the word is wanted. */
2846 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2849 /* Adjust OFFSET to count bits from low-address byte. */
2850 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2851 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2852 - offset - INTVAL (XEXP (bitfield, 1)));
2854 /* Adjust OFFSET to count bytes from low-address byte. */
2855 offset /= BITS_PER_UNIT;
2856 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2858 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2859 if (BYTES_BIG_ENDIAN)
2860 offset -= (MIN (UNITS_PER_WORD,
2861 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2862 - MIN (UNITS_PER_WORD,
2863 GET_MODE_SIZE (GET_MODE (memref))));
2867 memref = change_address (memref, mode,
2868 plus_constant (XEXP (memref, 0), offset));
2869 insns = get_insns ();
2871 emit_insns_before (insns, insn);
2873 /* Store this memory reference where
2874 we found the bit field reference. */
2878 validate_change (insn, &SET_DEST (body), memref, 1);
2879 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2881 rtx src = SET_SRC (body);
2882 while (GET_CODE (src) == SUBREG
2883 && SUBREG_WORD (src) == 0)
2884 src = SUBREG_REG (src);
2885 if (GET_MODE (src) != GET_MODE (memref))
2886 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2887 validate_change (insn, &SET_SRC (body), src, 1);
2889 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2890 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2891 /* This shouldn't happen because anything that didn't have
2892 one of these modes should have got converted explicitly
2893 and then referenced through a subreg.
2894 This is so because the original bit-field was
2895 handled by agg_mode and so its tree structure had
2896 the same mode that memref now has. */
2901 rtx dest = SET_DEST (body);
2903 while (GET_CODE (dest) == SUBREG
2904 && SUBREG_WORD (dest) == 0
2905 && (GET_MODE_CLASS (GET_MODE (dest))
2906 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2907 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2909 dest = SUBREG_REG (dest);
2911 validate_change (insn, &SET_DEST (body), dest, 1);
2913 if (GET_MODE (dest) == GET_MODE (memref))
2914 validate_change (insn, &SET_SRC (body), memref, 1);
2917 /* Convert the mem ref to the destination mode. */
2918 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2921 convert_move (newreg, memref,
2922 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2926 validate_change (insn, &SET_SRC (body), newreg, 1);
2930 /* See if we can convert this extraction or insertion into
2931 a simple move insn. We might not be able to do so if this
2932 was, for example, part of a PARALLEL.
2934 If we succeed, write out any needed conversions. If we fail,
2935 it is hard to guess why we failed, so don't do anything
2936 special; just let the optimization be suppressed. */
2938 if (apply_change_group () && seq)
2939 emit_insns_before (seq, insn);
2944 /* These routines are responsible for converting virtual register references
2945 to the actual hard register references once RTL generation is complete.
2947 The following four variables are used for communication between the
2948 routines. They contain the offsets of the virtual registers from their
2949 respective hard registers. */
2951 static int in_arg_offset;
2952 static int var_offset;
2953 static int dynamic_offset;
2954 static int out_arg_offset;
2955 static int cfa_offset;
2957 /* In most machines, the stack pointer register is equivalent to the bottom
2960 #ifndef STACK_POINTER_OFFSET
2961 #define STACK_POINTER_OFFSET 0
2964 /* If not defined, pick an appropriate default for the offset of dynamically
2965 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2966 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2968 #ifndef STACK_DYNAMIC_OFFSET
2970 #ifdef ACCUMULATE_OUTGOING_ARGS
2971 /* The bottom of the stack points to the actual arguments. If
2972 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2973 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2974 stack space for register parameters is not pushed by the caller, but
2975 rather part of the fixed stack areas and hence not included in
2976 `current_function_outgoing_args_size'. Nevertheless, we must allow
2977 for it when allocating stack dynamic objects. */
2979 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2980 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2981 (current_function_outgoing_args_size \
2982 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2985 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2986 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2990 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2994 /* On a few machines, the CFA coincides with the arg pointer. */
2996 #ifndef ARG_POINTER_CFA_OFFSET
2997 #define ARG_POINTER_CFA_OFFSET 0
3001 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
3002 its address taken. DECL is the decl for the object stored in the
3003 register, for later use if we do need to force REG into the stack.
3004 REG is overwritten by the MEM like in put_reg_into_stack. */
3007 gen_mem_addressof (reg, decl)
3011 tree type = TREE_TYPE (decl);
3012 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
3013 SET_ADDRESSOF_DECL (r, decl);
3014 /* If the original REG was a user-variable, then so is the REG whose
3015 address is being taken. */
3016 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
3019 PUT_CODE (reg, MEM);
3020 PUT_MODE (reg, DECL_MODE (decl));
3021 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
3022 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
3023 MEM_ALIAS_SET (reg) = get_alias_set (decl);
3025 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
3026 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
3031 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
3034 flush_addressof (decl)
3037 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
3038 && DECL_RTL (decl) != 0
3039 && GET_CODE (DECL_RTL (decl)) == MEM
3040 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
3041 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
3042 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
3045 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
3048 put_addressof_into_stack (r, ht)
3050 struct hash_table *ht;
3052 tree decl = ADDRESSOF_DECL (r);
3053 rtx reg = XEXP (r, 0);
3055 if (GET_CODE (reg) != REG)
3058 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
3059 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
3060 ADDRESSOF_REGNO (r),
3061 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
3064 /* List of replacements made below in purge_addressof_1 when creating
3065 bitfield insertions. */
3066 static rtx purge_bitfield_addressof_replacements;
3068 /* List of replacements made below in purge_addressof_1 for patterns
3069 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3070 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3071 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3072 enough in complex cases, e.g. when some field values can be
3073 extracted by usage MEM with narrower mode. */
3074 static rtx purge_addressof_replacements;
3076 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3077 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3078 the stack. If the function returns FALSE then the replacement could not
3082 purge_addressof_1 (loc, insn, force, store, ht)
3086 struct hash_table *ht;
3092 boolean result = true;
3094 /* Re-start here to avoid recursion in common cases. */
3101 code = GET_CODE (x);
3103 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
3106 /* We must create a copy of the rtx because it was created by
3107 overwriting a REG rtx which is always shared. */
3108 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3110 if (validate_change (insn, loc, sub, 0)
3111 || validate_replace_rtx (x, sub, insn))
3115 sub = force_operand (sub, NULL_RTX);
3116 if (! validate_change (insn, loc, sub, 0)
3117 && ! validate_replace_rtx (x, sub, insn))
3120 insns = gen_sequence ();
3122 emit_insn_before (insns, insn);
3125 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3127 rtx sub = XEXP (XEXP (x, 0), 0);
3130 if (GET_CODE (sub) == MEM)
3132 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3133 MEM_COPY_ATTRIBUTES (sub2, sub);
3134 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
3138 if (GET_CODE (sub) == REG
3139 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3141 put_addressof_into_stack (XEXP (x, 0), ht);
3144 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3146 int size_x, size_sub;
3150 /* When processing REG_NOTES look at the list of
3151 replacements done on the insn to find the register that X
3155 for (tem = purge_bitfield_addressof_replacements;
3157 tem = XEXP (XEXP (tem, 1), 1))
3158 if (rtx_equal_p (x, XEXP (tem, 0)))
3160 *loc = XEXP (XEXP (tem, 1), 0);
3164 /* See comment for purge_addressof_replacements. */
3165 for (tem = purge_addressof_replacements;
3167 tem = XEXP (XEXP (tem, 1), 1))
3168 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3170 rtx z = XEXP (XEXP (tem, 1), 0);
3172 if (GET_MODE (x) == GET_MODE (z)
3173 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3174 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3177 /* It can happen that the note may speak of things
3178 in a wider (or just different) mode than the
3179 code did. This is especially true of
3182 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3185 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3186 && (GET_MODE_SIZE (GET_MODE (x))
3187 > GET_MODE_SIZE (GET_MODE (z))))
3189 /* This can occur as a result in invalid
3190 pointer casts, e.g. float f; ...
3191 *(long long int *)&f.
3192 ??? We could emit a warning here, but
3193 without a line number that wouldn't be
3195 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3198 z = gen_lowpart (GET_MODE (x), z);
3204 /* Sometimes we may not be able to find the replacement. For
3205 example when the original insn was a MEM in a wider mode,
3206 and the note is part of a sign extension of a narrowed
3207 version of that MEM. Gcc testcase compile/990829-1.c can
3208 generate an example of this siutation. Rather than complain
3209 we return false, which will prompt our caller to remove the
3214 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3215 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3217 /* Don't even consider working with paradoxical subregs,
3218 or the moral equivalent seen here. */
3219 if (size_x <= size_sub
3220 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3222 /* Do a bitfield insertion to mirror what would happen
3229 rtx p = PREV_INSN (insn);
3232 val = gen_reg_rtx (GET_MODE (x));
3233 if (! validate_change (insn, loc, val, 0))
3235 /* Discard the current sequence and put the
3236 ADDRESSOF on stack. */
3240 seq = gen_sequence ();
3242 emit_insn_before (seq, insn);
3243 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3247 store_bit_field (sub, size_x, 0, GET_MODE (x),
3248 val, GET_MODE_SIZE (GET_MODE (sub)),
3249 GET_MODE_SIZE (GET_MODE (sub)));
3251 /* Make sure to unshare any shared rtl that store_bit_field
3252 might have created. */
3253 unshare_all_rtl_again (get_insns ());
3255 seq = gen_sequence ();
3257 p = emit_insn_after (seq, insn);
3258 if (NEXT_INSN (insn))
3259 compute_insns_for_mem (NEXT_INSN (insn),
3260 p ? NEXT_INSN (p) : NULL_RTX,
3265 rtx p = PREV_INSN (insn);
3268 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3269 GET_MODE (x), GET_MODE (x),
3270 GET_MODE_SIZE (GET_MODE (sub)),
3271 GET_MODE_SIZE (GET_MODE (sub)));
3273 if (! validate_change (insn, loc, val, 0))
3275 /* Discard the current sequence and put the
3276 ADDRESSOF on stack. */
3281 seq = gen_sequence ();
3283 emit_insn_before (seq, insn);
3284 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3288 /* Remember the replacement so that the same one can be done
3289 on the REG_NOTES. */
3290 purge_bitfield_addressof_replacements
3291 = gen_rtx_EXPR_LIST (VOIDmode, x,
3294 purge_bitfield_addressof_replacements));
3296 /* We replaced with a reg -- all done. */
3300 else if (validate_change (insn, loc, sub, 0))
3302 /* Remember the replacement so that the same one can be done
3303 on the REG_NOTES. */
3304 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3308 for (tem = purge_addressof_replacements;
3310 tem = XEXP (XEXP (tem, 1), 1))
3311 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3313 XEXP (XEXP (tem, 1), 0) = sub;
3316 purge_addressof_replacements
3317 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3318 gen_rtx_EXPR_LIST (VOIDmode, sub,
3319 purge_addressof_replacements));
3325 /* else give up and put it into the stack */
3327 else if (code == ADDRESSOF)
3329 put_addressof_into_stack (x, ht);
3332 else if (code == SET)
3334 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3335 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3339 /* Scan all subexpressions. */
3340 fmt = GET_RTX_FORMAT (code);
3341 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3344 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3345 else if (*fmt == 'E')
3346 for (j = 0; j < XVECLEN (x, i); j++)
3347 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3353 /* Return a new hash table entry in HT. */
3355 static struct hash_entry *
3356 insns_for_mem_newfunc (he, ht, k)
3357 struct hash_entry *he;
3358 struct hash_table *ht;
3359 hash_table_key k ATTRIBUTE_UNUSED;
3361 struct insns_for_mem_entry *ifmhe;
3365 ifmhe = ((struct insns_for_mem_entry *)
3366 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3367 ifmhe->insns = NULL_RTX;
3372 /* Return a hash value for K, a REG. */
3374 static unsigned long
3375 insns_for_mem_hash (k)
3378 /* K is really a RTX. Just use the address as the hash value. */
3379 return (unsigned long) k;
3382 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3385 insns_for_mem_comp (k1, k2)
3392 struct insns_for_mem_walk_info {
3393 /* The hash table that we are using to record which INSNs use which
3395 struct hash_table *ht;
3397 /* The INSN we are currently proessing. */
3400 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3401 to find the insns that use the REGs in the ADDRESSOFs. */
3405 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3406 that might be used in an ADDRESSOF expression, record this INSN in
3407 the hash table given by DATA (which is really a pointer to an
3408 insns_for_mem_walk_info structure). */
3411 insns_for_mem_walk (r, data)
3415 struct insns_for_mem_walk_info *ifmwi
3416 = (struct insns_for_mem_walk_info *) data;
3418 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3419 && GET_CODE (XEXP (*r, 0)) == REG)
3420 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3421 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3423 /* Lookup this MEM in the hashtable, creating it if necessary. */
3424 struct insns_for_mem_entry *ifme
3425 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3430 /* If we have not already recorded this INSN, do so now. Since
3431 we process the INSNs in order, we know that if we have
3432 recorded it it must be at the front of the list. */
3433 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3435 /* We do the allocation on the same obstack as is used for
3436 the hash table since this memory will not be used once
3437 the hash table is deallocated. */
3438 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3439 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3448 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3449 which REGs in HT. */
3452 compute_insns_for_mem (insns, last_insn, ht)
3455 struct hash_table *ht;
3458 struct insns_for_mem_walk_info ifmwi;
3461 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3462 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3463 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3466 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3470 /* Helper function for purge_addressof called through for_each_rtx.
3471 Returns true iff the rtl is an ADDRESSOF. */
3473 is_addressof (rtl, data)
3475 void * data ATTRIBUTE_UNUSED;
3477 return GET_CODE (* rtl) == ADDRESSOF;
3480 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3481 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3485 purge_addressof (insns)
3489 struct hash_table ht;
3491 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3492 requires a fixup pass over the instruction stream to correct
3493 INSNs that depended on the REG being a REG, and not a MEM. But,
3494 these fixup passes are slow. Furthermore, more MEMs are not
3495 mentioned in very many instructions. So, we speed up the process
3496 by pre-calculating which REGs occur in which INSNs; that allows
3497 us to perform the fixup passes much more quickly. */
3498 hash_table_init (&ht,
3499 insns_for_mem_newfunc,
3501 insns_for_mem_comp);
3502 compute_insns_for_mem (insns, NULL_RTX, &ht);
3504 for (insn = insns; insn; insn = NEXT_INSN (insn))
3505 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3506 || GET_CODE (insn) == CALL_INSN)
3508 if (! purge_addressof_1 (&PATTERN (insn), insn,
3509 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3510 /* If we could not replace the ADDRESSOFs in the insn,
3511 something is wrong. */
3514 if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht))
3516 /* If we could not replace the ADDRESSOFs in the insn's notes,
3517 we can just remove the offending notes instead. */
3520 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3522 /* If we find a REG_RETVAL note then the insn is a libcall.
3523 Such insns must have REG_EQUAL notes as well, in order
3524 for later passes of the compiler to work. So it is not
3525 safe to delete the notes here, and instead we abort. */
3526 if (REG_NOTE_KIND (note) == REG_RETVAL)
3528 if (for_each_rtx (& note, is_addressof, NULL))
3529 remove_note (insn, note);
3535 hash_table_free (&ht);
3536 purge_bitfield_addressof_replacements = 0;
3537 purge_addressof_replacements = 0;
3539 /* REGs are shared. purge_addressof will destructively replace a REG
3540 with a MEM, which creates shared MEMs.
3542 Unfortunately, the children of put_reg_into_stack assume that MEMs
3543 referring to the same stack slot are shared (fixup_var_refs and
3544 the associated hash table code).
3546 So, we have to do another unsharing pass after we have flushed any
3547 REGs that had their address taken into the stack.
3549 It may be worth tracking whether or not we converted any REGs into
3550 MEMs to avoid this overhead when it is not needed. */
3551 unshare_all_rtl_again (get_insns ());
3554 /* Pass through the INSNS of function FNDECL and convert virtual register
3555 references to hard register references. */
3558 instantiate_virtual_regs (fndecl, insns)
3565 /* Compute the offsets to use for this function. */
3566 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3567 var_offset = STARTING_FRAME_OFFSET;
3568 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3569 out_arg_offset = STACK_POINTER_OFFSET;
3570 cfa_offset = ARG_POINTER_CFA_OFFSET;
3572 /* Scan all variables and parameters of this function. For each that is
3573 in memory, instantiate all virtual registers if the result is a valid
3574 address. If not, we do it later. That will handle most uses of virtual
3575 regs on many machines. */
3576 instantiate_decls (fndecl, 1);
3578 /* Initialize recognition, indicating that volatile is OK. */
3581 /* Scan through all the insns, instantiating every virtual register still
3583 for (insn = insns; insn; insn = NEXT_INSN (insn))
3584 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3585 || GET_CODE (insn) == CALL_INSN)
3587 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3588 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
3591 /* Instantiate the stack slots for the parm registers, for later use in
3592 addressof elimination. */
3593 for (i = 0; i < max_parm_reg; ++i)
3594 if (parm_reg_stack_loc[i])
3595 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3597 /* Now instantiate the remaining register equivalences for debugging info.
3598 These will not be valid addresses. */
3599 instantiate_decls (fndecl, 0);
3601 /* Indicate that, from now on, assign_stack_local should use
3602 frame_pointer_rtx. */
3603 virtuals_instantiated = 1;
3606 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3607 all virtual registers in their DECL_RTL's.
3609 If VALID_ONLY, do this only if the resulting address is still valid.
3610 Otherwise, always do it. */
3613 instantiate_decls (fndecl, valid_only)
3619 if (DECL_SAVED_INSNS (fndecl))
3620 /* When compiling an inline function, the obstack used for
3621 rtl allocation is the maybepermanent_obstack. Calling
3622 `resume_temporary_allocation' switches us back to that
3623 obstack while we process this function's parameters. */
3624 resume_temporary_allocation ();
3626 /* Process all parameters of the function. */
3627 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3629 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3631 instantiate_decl (DECL_RTL (decl), size, valid_only);
3633 /* If the parameter was promoted, then the incoming RTL mode may be
3634 larger than the declared type size. We must use the larger of
3636 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3637 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3640 /* Now process all variables defined in the function or its subblocks. */
3641 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3643 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3645 /* Save all rtl allocated for this function by raising the
3646 high-water mark on the maybepermanent_obstack. */
3648 /* All further rtl allocation is now done in the current_obstack. */
3649 rtl_in_current_obstack ();
3653 /* Subroutine of instantiate_decls: Process all decls in the given
3654 BLOCK node and all its subblocks. */
3657 instantiate_decls_1 (let, valid_only)
3663 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3664 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3667 /* Process all subblocks. */
3668 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3669 instantiate_decls_1 (t, valid_only);
3672 /* Subroutine of the preceding procedures: Given RTL representing a
3673 decl and the size of the object, do any instantiation required.
3675 If VALID_ONLY is non-zero, it means that the RTL should only be
3676 changed if the new address is valid. */
3679 instantiate_decl (x, size, valid_only)
3684 enum machine_mode mode;
3687 /* If this is not a MEM, no need to do anything. Similarly if the
3688 address is a constant or a register that is not a virtual register. */
3690 if (x == 0 || GET_CODE (x) != MEM)
3694 if (CONSTANT_P (addr)
3695 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3696 || (GET_CODE (addr) == REG
3697 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3698 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3701 /* If we should only do this if the address is valid, copy the address.
3702 We need to do this so we can undo any changes that might make the
3703 address invalid. This copy is unfortunate, but probably can't be
3707 addr = copy_rtx (addr);
3709 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3713 /* Now verify that the resulting address is valid for every integer or
3714 floating-point mode up to and including SIZE bytes long. We do this
3715 since the object might be accessed in any mode and frame addresses
3718 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3719 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3720 mode = GET_MODE_WIDER_MODE (mode))
3721 if (! memory_address_p (mode, addr))
3724 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3725 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3726 mode = GET_MODE_WIDER_MODE (mode))
3727 if (! memory_address_p (mode, addr))
3731 /* Put back the address now that we have updated it and we either know
3732 it is valid or we don't care whether it is valid. */
3737 /* Given a pointer to a piece of rtx and an optional pointer to the
3738 containing object, instantiate any virtual registers present in it.
3740 If EXTRA_INSNS, we always do the replacement and generate
3741 any extra insns before OBJECT. If it zero, we do nothing if replacement
3744 Return 1 if we either had nothing to do or if we were able to do the
3745 needed replacement. Return 0 otherwise; we only return zero if
3746 EXTRA_INSNS is zero.
3748 We first try some simple transformations to avoid the creation of extra
3752 instantiate_virtual_regs_1 (loc, object, extra_insns)
3760 HOST_WIDE_INT offset = 0;
3766 /* Re-start here to avoid recursion in common cases. */
3773 code = GET_CODE (x);
3775 /* Check for some special cases. */
3792 /* We are allowed to set the virtual registers. This means that
3793 the actual register should receive the source minus the
3794 appropriate offset. This is used, for example, in the handling
3795 of non-local gotos. */
3796 if (SET_DEST (x) == virtual_incoming_args_rtx)
3797 new = arg_pointer_rtx, offset = - in_arg_offset;
3798 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3799 new = frame_pointer_rtx, offset = - var_offset;
3800 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3801 new = stack_pointer_rtx, offset = - dynamic_offset;
3802 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3803 new = stack_pointer_rtx, offset = - out_arg_offset;
3804 else if (SET_DEST (x) == virtual_cfa_rtx)
3805 new = arg_pointer_rtx, offset = - cfa_offset;
3809 /* The only valid sources here are PLUS or REG. Just do
3810 the simplest possible thing to handle them. */
3811 if (GET_CODE (SET_SRC (x)) != REG
3812 && GET_CODE (SET_SRC (x)) != PLUS)
3816 if (GET_CODE (SET_SRC (x)) != REG)
3817 temp = force_operand (SET_SRC (x), NULL_RTX);
3820 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3824 emit_insns_before (seq, object);
3827 if (! validate_change (object, &SET_SRC (x), temp, 0)
3834 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3839 /* Handle special case of virtual register plus constant. */
3840 if (CONSTANT_P (XEXP (x, 1)))
3842 rtx old, new_offset;
3844 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3845 if (GET_CODE (XEXP (x, 0)) == PLUS)
3847 rtx inner = XEXP (XEXP (x, 0), 0);
3849 if (inner == virtual_incoming_args_rtx)
3850 new = arg_pointer_rtx, offset = in_arg_offset;
3851 else if (inner == virtual_stack_vars_rtx)
3852 new = frame_pointer_rtx, offset = var_offset;
3853 else if (inner == virtual_stack_dynamic_rtx)
3854 new = stack_pointer_rtx, offset = dynamic_offset;
3855 else if (inner == virtual_outgoing_args_rtx)
3856 new = stack_pointer_rtx, offset = out_arg_offset;
3857 else if (inner == virtual_cfa_rtx)
3858 new = arg_pointer_rtx, offset = cfa_offset;
3865 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3867 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3870 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3871 new = arg_pointer_rtx, offset = in_arg_offset;
3872 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3873 new = frame_pointer_rtx, offset = var_offset;
3874 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3875 new = stack_pointer_rtx, offset = dynamic_offset;
3876 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3877 new = stack_pointer_rtx, offset = out_arg_offset;
3878 else if (XEXP (x, 0) == virtual_cfa_rtx)
3879 new = arg_pointer_rtx, offset = cfa_offset;
3882 /* We know the second operand is a constant. Unless the
3883 first operand is a REG (which has been already checked),
3884 it needs to be checked. */
3885 if (GET_CODE (XEXP (x, 0)) != REG)
3893 new_offset = plus_constant (XEXP (x, 1), offset);
3895 /* If the new constant is zero, try to replace the sum with just
3897 if (new_offset == const0_rtx
3898 && validate_change (object, loc, new, 0))
3901 /* Next try to replace the register and new offset.
3902 There are two changes to validate here and we can't assume that
3903 in the case of old offset equals new just changing the register
3904 will yield a valid insn. In the interests of a little efficiency,
3905 however, we only call validate change once (we don't queue up the
3906 changes and then call apply_change_group). */
3910 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3911 : (XEXP (x, 0) = new,
3912 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3920 /* Otherwise copy the new constant into a register and replace
3921 constant with that register. */
3922 temp = gen_reg_rtx (Pmode);
3924 if (validate_change (object, &XEXP (x, 1), temp, 0)
3925 && ! flag_propolice_protection)
3926 emit_insn_before (gen_move_insn (temp, new_offset), object);
3929 /* If that didn't work, replace this expression with a
3930 register containing the sum. */
3933 new = gen_rtx_PLUS (Pmode, new, new_offset);
3936 temp = force_operand (new, NULL_RTX);
3940 emit_insns_before (seq, object);
3941 if (! validate_change (object, loc, temp, 0)
3942 && ! validate_replace_rtx (x, temp, object))
3950 /* Fall through to generic two-operand expression case. */
3956 case DIV: case UDIV:
3957 case MOD: case UMOD:
3958 case AND: case IOR: case XOR:
3959 case ROTATERT: case ROTATE:
3960 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3962 case GE: case GT: case GEU: case GTU:
3963 case LE: case LT: case LEU: case LTU:
3964 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3965 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3970 /* Most cases of MEM that convert to valid addresses have already been
3971 handled by our scan of decls. The only special handling we
3972 need here is to make a copy of the rtx to ensure it isn't being
3973 shared if we have to change it to a pseudo.
3975 If the rtx is a simple reference to an address via a virtual register,
3976 it can potentially be shared. In such cases, first try to make it
3977 a valid address, which can also be shared. Otherwise, copy it and
3980 First check for common cases that need no processing. These are
3981 usually due to instantiation already being done on a previous instance
3985 if (CONSTANT_ADDRESS_P (temp)
3986 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3987 || temp == arg_pointer_rtx
3989 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3990 || temp == hard_frame_pointer_rtx
3992 || temp == frame_pointer_rtx)
3995 if (GET_CODE (temp) == PLUS
3996 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3997 && (XEXP (temp, 0) == frame_pointer_rtx
3998 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3999 || XEXP (temp, 0) == hard_frame_pointer_rtx
4001 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4002 || XEXP (temp, 0) == arg_pointer_rtx
4007 if (temp == virtual_stack_vars_rtx
4008 || temp == virtual_incoming_args_rtx
4009 || (GET_CODE (temp) == PLUS
4010 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4011 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4012 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4014 /* This MEM may be shared. If the substitution can be done without
4015 the need to generate new pseudos, we want to do it in place
4016 so all copies of the shared rtx benefit. The call below will
4017 only make substitutions if the resulting address is still
4020 Note that we cannot pass X as the object in the recursive call
4021 since the insn being processed may not allow all valid
4022 addresses. However, if we were not passed on object, we can
4023 only modify X without copying it if X will have a valid
4026 ??? Also note that this can still lose if OBJECT is an insn that
4027 has less restrictions on an address that some other insn.
4028 In that case, we will modify the shared address. This case
4029 doesn't seem very likely, though. One case where this could
4030 happen is in the case of a USE or CLOBBER reference, but we
4031 take care of that below. */
4033 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4034 object ? object : x, 0))
4037 /* Otherwise make a copy and process that copy. We copy the entire
4038 RTL expression since it might be a PLUS which could also be
4040 *loc = x = copy_rtx (x);
4043 /* Fall through to generic unary operation case. */
4045 case STRICT_LOW_PART:
4047 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4048 case SIGN_EXTEND: case ZERO_EXTEND:
4049 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4050 case FLOAT: case FIX:
4051 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4055 /* These case either have just one operand or we know that we need not
4056 check the rest of the operands. */
4062 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4063 go ahead and make the invalid one, but do it to a copy. For a REG,
4064 just make the recursive call, since there's no chance of a problem. */
4066 if ((GET_CODE (XEXP (x, 0)) == MEM
4067 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4069 || (GET_CODE (XEXP (x, 0)) == REG
4070 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4073 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4078 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4079 in front of this insn and substitute the temporary. */
4080 if (x == virtual_incoming_args_rtx)
4081 new = arg_pointer_rtx, offset = in_arg_offset;
4082 else if (x == virtual_stack_vars_rtx)
4083 new = frame_pointer_rtx, offset = var_offset;
4084 else if (x == virtual_stack_dynamic_rtx)
4085 new = stack_pointer_rtx, offset = dynamic_offset;
4086 else if (x == virtual_outgoing_args_rtx)
4087 new = stack_pointer_rtx, offset = out_arg_offset;
4088 else if (x == virtual_cfa_rtx)
4089 new = arg_pointer_rtx, offset = cfa_offset;
4093 temp = plus_constant (new, offset);
4094 if (!validate_change (object, loc, temp, 0))
4100 temp = force_operand (temp, NULL_RTX);
4104 emit_insns_before (seq, object);
4105 if (! validate_change (object, loc, temp, 0)
4106 && ! validate_replace_rtx (x, temp, object))
4114 if (GET_CODE (XEXP (x, 0)) == REG)
4117 else if (GET_CODE (XEXP (x, 0)) == MEM)
4119 /* If we have a (addressof (mem ..)), do any instantiation inside
4120 since we know we'll be making the inside valid when we finally
4121 remove the ADDRESSOF. */
4122 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4131 /* Scan all subexpressions. */
4132 fmt = GET_RTX_FORMAT (code);
4133 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4136 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4139 else if (*fmt == 'E')
4140 for (j = 0; j < XVECLEN (x, i); j++)
4141 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4148 /* Optimization: assuming this function does not receive nonlocal gotos,
4149 delete the handlers for such, as well as the insns to establish
4150 and disestablish them. */
4156 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4158 /* Delete the handler by turning off the flag that would
4159 prevent jump_optimize from deleting it.
4160 Also permit deletion of the nonlocal labels themselves
4161 if nothing local refers to them. */
4162 if (GET_CODE (insn) == CODE_LABEL)
4166 LABEL_PRESERVE_P (insn) = 0;
4168 /* Remove it from the nonlocal_label list, to avoid confusing
4170 for (t = nonlocal_labels, last_t = 0; t;
4171 last_t = t, t = TREE_CHAIN (t))
4172 if (DECL_RTL (TREE_VALUE (t)) == insn)
4177 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4179 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4182 if (GET_CODE (insn) == INSN)
4186 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4187 if (reg_mentioned_p (t, PATTERN (insn)))
4193 || (nonlocal_goto_stack_level != 0
4194 && reg_mentioned_p (nonlocal_goto_stack_level,
4201 /* Output a USE for any register use in RTL.
4202 This is used with -noreg to mark the extent of lifespan
4203 of any registers used in a user-visible variable's DECL_RTL. */
4209 if (GET_CODE (rtl) == REG)
4210 /* This is a register variable. */
4211 emit_insn (gen_rtx_USE (VOIDmode, rtl));
4212 else if (GET_CODE (rtl) == MEM
4213 && GET_CODE (XEXP (rtl, 0)) == REG
4214 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4215 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4216 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4217 /* This is a variable-sized structure. */
4218 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
4221 /* Like use_variable except that it outputs the USEs after INSN
4222 instead of at the end of the insn-chain. */
4225 use_variable_after (rtl, insn)
4228 if (GET_CODE (rtl) == REG)
4229 /* This is a register variable. */
4230 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
4231 else if (GET_CODE (rtl) == MEM
4232 && GET_CODE (XEXP (rtl, 0)) == REG
4233 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4234 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4235 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4236 /* This is a variable-sized structure. */
4237 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
4243 return max_parm_reg;
4246 /* Return the first insn following those generated by `assign_parms'. */
4249 get_first_nonparm_insn ()
4252 return NEXT_INSN (last_parm_insn);
4253 return get_insns ();
4256 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4257 Crash if there is none. */
4260 get_first_block_beg ()
4262 register rtx searcher;
4263 register rtx insn = get_first_nonparm_insn ();
4265 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4266 if (GET_CODE (searcher) == NOTE
4267 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4270 abort (); /* Invalid call to this function. (See comments above.) */
4274 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4275 This means a type for which function calls must pass an address to the
4276 function or get an address back from the function.
4277 EXP may be a type node or an expression (whose type is tested). */
4280 aggregate_value_p (exp)
4283 int i, regno, nregs;
4286 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
4289 type = TREE_TYPE (exp);
4291 if (RETURN_IN_MEMORY (type))
4293 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4294 and thus can't be returned in registers. */
4295 if (TREE_ADDRESSABLE (type))
4297 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4299 /* Make sure we have suitable call-clobbered regs to return
4300 the value in; if not, we must return it in memory. */
4301 reg = hard_function_value (type, 0);
4303 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4305 if (GET_CODE (reg) != REG)
4308 regno = REGNO (reg);
4309 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4310 for (i = 0; i < nregs; i++)
4311 if (! call_used_regs[regno + i])
4316 /* Assign RTL expressions to the function's parameters.
4317 This may involve copying them into registers and using
4318 those registers as the RTL for them.
4320 If SECOND_TIME is non-zero it means that this function is being
4321 called a second time. This is done by integrate.c when a function's
4322 compilation is deferred. We need to come back here in case the
4323 FUNCTION_ARG macro computes items needed for the rest of the compilation
4324 (such as changing which registers are fixed or caller-saved). But suppress
4325 writing any insns or setting DECL_RTL of anything in this case. */
4328 assign_parms (fndecl, second_time)
4333 register rtx entry_parm = 0;
4334 register rtx stack_parm = 0;
4335 CUMULATIVE_ARGS args_so_far;
4336 enum machine_mode promoted_mode, passed_mode;
4337 enum machine_mode nominal_mode, promoted_nominal_mode;
4339 /* Total space needed so far for args on the stack,
4340 given as a constant and a tree-expression. */
4341 struct args_size stack_args_size;
4342 tree fntype = TREE_TYPE (fndecl);
4343 tree fnargs = DECL_ARGUMENTS (fndecl);
4344 /* This is used for the arg pointer when referring to stack args. */
4345 rtx internal_arg_pointer;
4346 /* This is a dummy PARM_DECL that we used for the function result if
4347 the function returns a structure. */
4348 tree function_result_decl = 0;
4349 #ifdef SETUP_INCOMING_VARARGS
4350 int varargs_setup = 0;
4352 rtx conversion_insns = 0;
4354 /* Nonzero if the last arg is named `__builtin_va_alist',
4355 which is used on some machines for old-fashioned non-ANSI varargs.h;
4356 this should be stuck onto the stack as if it had arrived there. */
4358 = (current_function_varargs
4360 && (parm = tree_last (fnargs)) != 0
4362 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4363 "__builtin_va_alist")));
4365 /* Nonzero if function takes extra anonymous args.
4366 This means the last named arg must be on the stack
4367 right before the anonymous ones. */
4369 = (TYPE_ARG_TYPES (fntype) != 0
4370 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4371 != void_type_node));
4373 current_function_stdarg = stdarg;
4375 /* If the reg that the virtual arg pointer will be translated into is
4376 not a fixed reg or is the stack pointer, make a copy of the virtual
4377 arg pointer, and address parms via the copy. The frame pointer is
4378 considered fixed even though it is not marked as such.
4380 The second time through, simply use ap to avoid generating rtx. */
4382 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4383 || ! (fixed_regs[ARG_POINTER_REGNUM]
4384 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
4386 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4388 internal_arg_pointer = virtual_incoming_args_rtx;
4389 current_function_internal_arg_pointer = internal_arg_pointer;
4391 stack_args_size.constant = 0;
4392 stack_args_size.var = 0;
4394 /* If struct value address is treated as the first argument, make it so. */
4395 if (aggregate_value_p (DECL_RESULT (fndecl))
4396 && ! current_function_returns_pcc_struct
4397 && struct_value_incoming_rtx == 0)
4399 tree type = build_pointer_type (TREE_TYPE (fntype));
4401 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4403 DECL_ARG_TYPE (function_result_decl) = type;
4404 TREE_CHAIN (function_result_decl) = fnargs;
4405 fnargs = function_result_decl;
4408 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4409 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4410 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
4412 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4413 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4415 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4418 /* We haven't yet found an argument that we must push and pretend the
4420 current_function_pretend_args_size = 0;
4422 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4424 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4425 struct args_size stack_offset;
4426 struct args_size arg_size;
4427 int passed_pointer = 0;
4428 int did_conversion = 0;
4429 tree passed_type = DECL_ARG_TYPE (parm);
4430 tree nominal_type = TREE_TYPE (parm);
4433 /* Set LAST_NAMED if this is last named arg before some
4435 int last_named = ((TREE_CHAIN (parm) == 0
4436 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4437 && (stdarg || current_function_varargs));
4438 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4439 most machines, if this is a varargs/stdarg function, then we treat
4440 the last named arg as if it were anonymous too. */
4441 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4443 if (TREE_TYPE (parm) == error_mark_node
4444 /* This can happen after weird syntax errors
4445 or if an enum type is defined among the parms. */
4446 || TREE_CODE (parm) != PARM_DECL
4447 || passed_type == NULL)
4449 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4450 = gen_rtx_MEM (BLKmode, const0_rtx);
4451 TREE_USED (parm) = 1;
4455 /* For varargs.h function, save info about regs and stack space
4456 used by the individual args, not including the va_alist arg. */
4457 if (hide_last_arg && last_named)
4458 current_function_args_info = args_so_far;
4460 /* Find mode of arg as it is passed, and mode of arg
4461 as it should be during execution of this function. */
4462 passed_mode = TYPE_MODE (passed_type);
4463 nominal_mode = TYPE_MODE (nominal_type);
4465 /* If the parm's mode is VOID, its value doesn't matter,
4466 and avoid the usual things like emit_move_insn that could crash. */
4467 if (nominal_mode == VOIDmode)
4469 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4473 /* If the parm is to be passed as a transparent union, use the
4474 type of the first field for the tests below. We have already
4475 verified that the modes are the same. */
4476 if (DECL_TRANSPARENT_UNION (parm)
4477 || TYPE_TRANSPARENT_UNION (passed_type))
4478 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4480 /* See if this arg was passed by invisible reference. It is if
4481 it is an object whose size depends on the contents of the
4482 object itself or if the machine requires these objects be passed
4485 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4486 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4487 || TREE_ADDRESSABLE (passed_type)
4488 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4489 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4490 passed_type, named_arg)
4494 passed_type = nominal_type = build_pointer_type (passed_type);
4496 passed_mode = nominal_mode = Pmode;
4499 promoted_mode = passed_mode;
4501 #ifdef PROMOTE_FUNCTION_ARGS
4502 /* Compute the mode in which the arg is actually extended to. */
4503 unsignedp = TREE_UNSIGNED (passed_type);
4504 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4507 /* Let machine desc say which reg (if any) the parm arrives in.
4508 0 means it arrives on the stack. */
4509 #ifdef FUNCTION_INCOMING_ARG
4510 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4511 passed_type, named_arg);
4513 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4514 passed_type, named_arg);
4517 if (entry_parm == 0)
4518 promoted_mode = passed_mode;
4520 #ifdef SETUP_INCOMING_VARARGS
4521 /* If this is the last named parameter, do any required setup for
4522 varargs or stdargs. We need to know about the case of this being an
4523 addressable type, in which case we skip the registers it
4524 would have arrived in.
4526 For stdargs, LAST_NAMED will be set for two parameters, the one that
4527 is actually the last named, and the dummy parameter. We only
4528 want to do this action once.
4530 Also, indicate when RTL generation is to be suppressed. */
4531 if (last_named && !varargs_setup)
4533 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4534 current_function_pretend_args_size,
4540 /* Determine parm's home in the stack,
4541 in case it arrives in the stack or we should pretend it did.
4543 Compute the stack position and rtx where the argument arrives
4546 There is one complexity here: If this was a parameter that would
4547 have been passed in registers, but wasn't only because it is
4548 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4549 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4550 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4551 0 as it was the previous time. */
4553 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4554 locate_and_pad_parm (promoted_mode, passed_type,
4555 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4558 #ifdef FUNCTION_INCOMING_ARG
4559 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4561 pretend_named) != 0,
4563 FUNCTION_ARG (args_so_far, promoted_mode,
4565 pretend_named) != 0,
4568 fndecl, &stack_args_size, &stack_offset, &arg_size);
4572 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4574 if (offset_rtx == const0_rtx)
4575 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4577 stack_parm = gen_rtx_MEM (promoted_mode,
4578 gen_rtx_PLUS (Pmode,
4579 internal_arg_pointer,
4582 /* If this is a memory ref that contains aggregate components,
4583 mark it as such for cse and loop optimize. Likewise if it
4585 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4586 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4587 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4590 /* If this parameter was passed both in registers and in the stack,
4591 use the copy on the stack. */
4592 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4595 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4596 /* If this parm was passed part in regs and part in memory,
4597 pretend it arrived entirely in memory
4598 by pushing the register-part onto the stack.
4600 In the special case of a DImode or DFmode that is split,
4601 we could put it together in a pseudoreg directly,
4602 but for now that's not worth bothering with. */
4606 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4607 passed_type, named_arg);
4611 current_function_pretend_args_size
4612 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4613 / (PARM_BOUNDARY / BITS_PER_UNIT)
4614 * (PARM_BOUNDARY / BITS_PER_UNIT));
4618 /* Handle calls that pass values in multiple non-contiguous
4619 locations. The Irix 6 ABI has examples of this. */
4620 if (GET_CODE (entry_parm) == PARALLEL)
4621 emit_group_store (validize_mem (stack_parm), entry_parm,
4622 int_size_in_bytes (TREE_TYPE (parm)),
4623 (TYPE_ALIGN (TREE_TYPE (parm))
4626 move_block_from_reg (REGNO (entry_parm),
4627 validize_mem (stack_parm), nregs,
4628 int_size_in_bytes (TREE_TYPE (parm)));
4630 entry_parm = stack_parm;
4635 /* If we didn't decide this parm came in a register,
4636 by default it came on the stack. */
4637 if (entry_parm == 0)
4638 entry_parm = stack_parm;
4640 /* Record permanently how this parm was passed. */
4642 DECL_INCOMING_RTL (parm) = entry_parm;
4644 /* If there is actually space on the stack for this parm,
4645 count it in stack_args_size; otherwise set stack_parm to 0
4646 to indicate there is no preallocated stack slot for the parm. */
4648 if (entry_parm == stack_parm
4649 || (GET_CODE (entry_parm) == PARALLEL
4650 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4651 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4652 /* On some machines, even if a parm value arrives in a register
4653 there is still an (uninitialized) stack slot allocated for it.
4655 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4656 whether this parameter already has a stack slot allocated,
4657 because an arg block exists only if current_function_args_size
4658 is larger than some threshold, and we haven't calculated that
4659 yet. So, for now, we just assume that stack slots never exist
4661 || REG_PARM_STACK_SPACE (fndecl) > 0
4665 stack_args_size.constant += arg_size.constant;
4667 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4670 /* No stack slot was pushed for this parm. */
4673 /* Update info on where next arg arrives in registers. */
4675 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4676 passed_type, named_arg);
4678 /* If this is our second time through, we are done with this parm. */
4682 /* If we can't trust the parm stack slot to be aligned enough
4683 for its ultimate type, don't use that slot after entry.
4684 We'll make another stack slot, if we need one. */
4686 int thisparm_boundary
4687 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4689 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4693 /* If parm was passed in memory, and we need to convert it on entry,
4694 don't store it back in that same slot. */
4696 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4700 /* Now adjust STACK_PARM to the mode and precise location
4701 where this parameter should live during execution,
4702 if we discover that it must live in the stack during execution.
4703 To make debuggers happier on big-endian machines, we store
4704 the value in the last bytes of the space available. */
4706 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4711 if (BYTES_BIG_ENDIAN
4712 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4713 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4714 - GET_MODE_SIZE (nominal_mode));
4716 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4717 if (offset_rtx == const0_rtx)
4718 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4720 stack_parm = gen_rtx_MEM (nominal_mode,
4721 gen_rtx_PLUS (Pmode,
4722 internal_arg_pointer,
4725 /* If this is a memory ref that contains aggregate components,
4726 mark it as such for cse and loop optimize. */
4727 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4732 /* We need this "use" info, because the gcc-register->stack-register
4733 converter in reg-stack.c needs to know which registers are active
4734 at the start of the function call. The actual parameter loading
4735 instructions are not always available then anymore, since they might
4736 have been optimised away. */
4738 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4739 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4742 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4743 in the mode in which it arrives.
4744 STACK_PARM is an RTX for a stack slot where the parameter can live
4745 during the function (in case we want to put it there).
4746 STACK_PARM is 0 if no stack slot was pushed for it.
4748 Now output code if necessary to convert ENTRY_PARM to
4749 the type in which this function declares it,
4750 and store that result in an appropriate place,
4751 which may be a pseudo reg, may be STACK_PARM,
4752 or may be a local stack slot if STACK_PARM is 0.
4754 Set DECL_RTL to that place. */
4756 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4758 /* If a BLKmode arrives in registers, copy it to a stack slot.
4759 Handle calls that pass values in multiple non-contiguous
4760 locations. The Irix 6 ABI has examples of this. */
4761 if (GET_CODE (entry_parm) == REG
4762 || GET_CODE (entry_parm) == PARALLEL)
4765 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4768 /* Note that we will be storing an integral number of words.
4769 So we have to be careful to ensure that we allocate an
4770 integral number of words. We do this below in the
4771 assign_stack_local if space was not allocated in the argument
4772 list. If it was, this will not work if PARM_BOUNDARY is not
4773 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4774 if it becomes a problem. */
4776 if (stack_parm == 0)
4779 = assign_stack_local (GET_MODE (entry_parm),
4782 /* If this is a memory ref that contains aggregate
4783 components, mark it as such for cse and loop optimize. */
4784 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4787 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4790 if (TREE_READONLY (parm))
4791 RTX_UNCHANGING_P (stack_parm) = 1;
4793 /* Handle calls that pass values in multiple non-contiguous
4794 locations. The Irix 6 ABI has examples of this. */
4795 if (GET_CODE (entry_parm) == PARALLEL)
4796 emit_group_store (validize_mem (stack_parm), entry_parm,
4797 int_size_in_bytes (TREE_TYPE (parm)),
4798 (TYPE_ALIGN (TREE_TYPE (parm))
4801 move_block_from_reg (REGNO (entry_parm),
4802 validize_mem (stack_parm),
4803 size_stored / UNITS_PER_WORD,
4804 int_size_in_bytes (TREE_TYPE (parm)));
4806 DECL_RTL (parm) = stack_parm;
4808 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4809 && ! DECL_INLINE (fndecl))
4810 /* layout_decl may set this. */
4811 || TREE_ADDRESSABLE (parm)
4812 || TREE_SIDE_EFFECTS (parm)
4813 /* If -ffloat-store specified, don't put explicit
4814 float variables into registers. */
4815 || (flag_float_store
4816 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4817 /* Always assign pseudo to structure return or item passed
4818 by invisible reference. */
4819 || passed_pointer || parm == function_result_decl)
4821 /* Store the parm in a pseudoregister during the function, but we
4822 may need to do it in a wider mode. */
4824 register rtx parmreg;
4825 int regno, regnoi = 0, regnor = 0;
4827 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4829 promoted_nominal_mode
4830 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4832 parmreg = gen_reg_rtx (promoted_nominal_mode);
4833 mark_user_reg (parmreg);
4835 /* If this was an item that we received a pointer to, set DECL_RTL
4840 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4841 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4844 DECL_RTL (parm) = parmreg;
4846 /* Copy the value into the register. */
4847 if (nominal_mode != passed_mode
4848 || promoted_nominal_mode != promoted_mode)
4851 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4852 mode, by the caller. We now have to convert it to
4853 NOMINAL_MODE, if different. However, PARMREG may be in
4854 a different mode than NOMINAL_MODE if it is being stored
4857 If ENTRY_PARM is a hard register, it might be in a register
4858 not valid for operating in its mode (e.g., an odd-numbered
4859 register for a DFmode). In that case, moves are the only
4860 thing valid, so we can't do a convert from there. This
4861 occurs when the calling sequence allow such misaligned
4864 In addition, the conversion may involve a call, which could
4865 clobber parameters which haven't been copied to pseudo
4866 registers yet. Therefore, we must first copy the parm to
4867 a pseudo reg here, and save the conversion until after all
4868 parameters have been moved. */
4870 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4872 emit_move_insn (tempreg, validize_mem (entry_parm));
4874 push_to_sequence (conversion_insns);
4875 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4877 /* TREE_USED gets set erroneously during expand_assignment. */
4878 save_tree_used = TREE_USED (parm);
4879 expand_assignment (parm,
4880 make_tree (nominal_type, tempreg), 0, 0);
4881 TREE_USED (parm) = save_tree_used;
4882 conversion_insns = get_insns ();
4887 emit_move_insn (parmreg, validize_mem (entry_parm));
4889 /* If we were passed a pointer but the actual value
4890 can safely live in a register, put it in one. */
4891 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4892 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4893 && ! DECL_INLINE (fndecl))
4894 /* layout_decl may set this. */
4895 || TREE_ADDRESSABLE (parm)
4896 || TREE_SIDE_EFFECTS (parm)
4897 /* If -ffloat-store specified, don't put explicit
4898 float variables into registers. */
4899 || (flag_float_store
4900 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4902 /* We can't use nominal_mode, because it will have been set to
4903 Pmode above. We must use the actual mode of the parm. */
4904 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4905 mark_user_reg (parmreg);
4906 emit_move_insn (parmreg, DECL_RTL (parm));
4907 DECL_RTL (parm) = parmreg;
4908 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4912 #ifdef FUNCTION_ARG_CALLEE_COPIES
4913 /* If we are passed an arg by reference and it is our responsibility
4914 to make a copy, do it now.
4915 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4916 original argument, so we must recreate them in the call to
4917 FUNCTION_ARG_CALLEE_COPIES. */
4918 /* ??? Later add code to handle the case that if the argument isn't
4919 modified, don't do the copy. */
4921 else if (passed_pointer
4922 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4923 TYPE_MODE (DECL_ARG_TYPE (parm)),
4924 DECL_ARG_TYPE (parm),
4926 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4929 tree type = DECL_ARG_TYPE (parm);
4931 /* This sequence may involve a library call perhaps clobbering
4932 registers that haven't been copied to pseudos yet. */
4934 push_to_sequence (conversion_insns);
4936 if (TYPE_SIZE (type) == 0
4937 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4938 /* This is a variable sized object. */
4939 copy = gen_rtx_MEM (BLKmode,
4940 allocate_dynamic_stack_space
4941 (expr_size (parm), NULL_RTX,
4942 TYPE_ALIGN (type)));
4944 copy = assign_stack_temp (TYPE_MODE (type),
4945 int_size_in_bytes (type), 1);
4946 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4947 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4949 store_expr (parm, copy, 0);
4950 emit_move_insn (parmreg, XEXP (copy, 0));
4951 if (current_function_check_memory_usage)
4952 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4953 XEXP (copy, 0), Pmode,
4954 GEN_INT (int_size_in_bytes (type)),
4955 TYPE_MODE (sizetype),
4956 GEN_INT (MEMORY_USE_RW),
4957 TYPE_MODE (integer_type_node));
4958 conversion_insns = get_insns ();
4962 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4964 /* In any case, record the parm's desired stack location
4965 in case we later discover it must live in the stack.
4967 If it is a COMPLEX value, store the stack location for both
4970 if (GET_CODE (parmreg) == CONCAT)
4971 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4973 regno = REGNO (parmreg);
4975 if (regno >= max_parm_reg)
4978 int old_max_parm_reg = max_parm_reg;
4980 /* It's slow to expand this one register at a time,
4981 but it's also rare and we need max_parm_reg to be
4982 precisely correct. */
4983 max_parm_reg = regno + 1;
4984 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4985 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4986 old_max_parm_reg * sizeof (rtx));
4987 bzero ((char *) (new + old_max_parm_reg),
4988 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4989 parm_reg_stack_loc = new;
4992 if (GET_CODE (parmreg) == CONCAT)
4994 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4996 regnor = REGNO (gen_realpart (submode, parmreg));
4997 regnoi = REGNO (gen_imagpart (submode, parmreg));
4999 if (stack_parm != 0)
5001 parm_reg_stack_loc[regnor]
5002 = gen_realpart (submode, stack_parm);
5003 parm_reg_stack_loc[regnoi]
5004 = gen_imagpart (submode, stack_parm);
5008 parm_reg_stack_loc[regnor] = 0;
5009 parm_reg_stack_loc[regnoi] = 0;
5013 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
5015 /* Mark the register as eliminable if we did no conversion
5016 and it was copied from memory at a fixed offset,
5017 and the arg pointer was not copied to a pseudo-reg.
5018 If the arg pointer is a pseudo reg or the offset formed
5019 an invalid address, such memory-equivalences
5020 as we make here would screw up life analysis for it. */
5021 if (nominal_mode == passed_mode
5024 && GET_CODE (stack_parm) == MEM
5025 && stack_offset.var == 0
5026 && reg_mentioned_p (virtual_incoming_args_rtx,
5027 XEXP (stack_parm, 0)))
5029 rtx linsn = get_last_insn ();
5032 /* Mark complex types separately. */
5033 if (GET_CODE (parmreg) == CONCAT)
5034 /* Scan backwards for the set of the real and
5036 for (sinsn = linsn; sinsn != 0;
5037 sinsn = prev_nonnote_insn (sinsn))
5039 set = single_set (sinsn);
5041 && SET_DEST (set) == regno_reg_rtx [regnoi])
5043 = gen_rtx_EXPR_LIST (REG_EQUIV,
5044 parm_reg_stack_loc[regnoi],
5047 && SET_DEST (set) == regno_reg_rtx [regnor])
5049 = gen_rtx_EXPR_LIST (REG_EQUIV,
5050 parm_reg_stack_loc[regnor],
5053 else if ((set = single_set (linsn)) != 0
5054 && SET_DEST (set) == parmreg)
5056 = gen_rtx_EXPR_LIST (REG_EQUIV,
5057 stack_parm, REG_NOTES (linsn));
5060 /* For pointer data type, suggest pointer register. */
5061 if (POINTER_TYPE_P (TREE_TYPE (parm)))
5062 mark_reg_pointer (parmreg,
5063 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
5068 /* Value must be stored in the stack slot STACK_PARM
5069 during function execution. */
5071 if (promoted_mode != nominal_mode)
5073 /* Conversion is required. */
5074 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5076 emit_move_insn (tempreg, validize_mem (entry_parm));
5078 push_to_sequence (conversion_insns);
5079 entry_parm = convert_to_mode (nominal_mode, tempreg,
5080 TREE_UNSIGNED (TREE_TYPE (parm)));
5083 /* ??? This may need a big-endian conversion on sparc64. */
5084 stack_parm = change_address (stack_parm, nominal_mode,
5087 conversion_insns = get_insns ();
5092 if (entry_parm != stack_parm)
5094 if (stack_parm == 0)
5097 = assign_stack_local (GET_MODE (entry_parm),
5098 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5099 /* If this is a memory ref that contains aggregate components,
5100 mark it as such for cse and loop optimize. */
5101 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
5104 if (promoted_mode != nominal_mode)
5106 push_to_sequence (conversion_insns);
5107 emit_move_insn (validize_mem (stack_parm),
5108 validize_mem (entry_parm));
5109 conversion_insns = get_insns ();
5113 emit_move_insn (validize_mem (stack_parm),
5114 validize_mem (entry_parm));
5116 if (current_function_check_memory_usage)
5118 push_to_sequence (conversion_insns);
5119 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
5120 XEXP (stack_parm, 0), Pmode,
5121 GEN_INT (GET_MODE_SIZE (GET_MODE
5123 TYPE_MODE (sizetype),
5124 GEN_INT (MEMORY_USE_RW),
5125 TYPE_MODE (integer_type_node));
5127 conversion_insns = get_insns ();
5130 DECL_RTL (parm) = stack_parm;
5133 /* If this "parameter" was the place where we are receiving the
5134 function's incoming structure pointer, set up the result. */
5135 if (parm == function_result_decl)
5137 tree result = DECL_RESULT (fndecl);
5138 tree restype = TREE_TYPE (result);
5141 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
5143 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
5144 AGGREGATE_TYPE_P (restype));
5147 if (TREE_THIS_VOLATILE (parm))
5148 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
5149 if (TREE_READONLY (parm))
5150 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
5153 /* Output all parameter conversion instructions (possibly including calls)
5154 now that all parameters have been copied out of hard registers. */
5155 emit_insns (conversion_insns);
5157 last_parm_insn = get_last_insn ();
5159 current_function_args_size = stack_args_size.constant;
5161 /* Adjust function incoming argument size for alignment and
5164 #ifdef REG_PARM_STACK_SPACE
5165 #ifndef MAYBE_REG_PARM_STACK_SPACE
5166 current_function_args_size = MAX (current_function_args_size,
5167 REG_PARM_STACK_SPACE (fndecl));
5171 #ifdef STACK_BOUNDARY
5172 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5174 current_function_args_size
5175 = ((current_function_args_size + STACK_BYTES - 1)
5176 / STACK_BYTES) * STACK_BYTES;
5179 #ifdef ARGS_GROW_DOWNWARD
5180 current_function_arg_offset_rtx
5181 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5182 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
5183 size_int (-stack_args_size.constant)),
5184 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5186 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5189 /* See how many bytes, if any, of its args a function should try to pop
5192 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5193 current_function_args_size);
5195 /* For stdarg.h function, save info about
5196 regs and stack space used by the named args. */
5199 current_function_args_info = args_so_far;
5201 /* Set the rtx used for the function return value. Put this in its
5202 own variable so any optimizers that need this information don't have
5203 to include tree.h. Do this here so it gets done when an inlined
5204 function gets output. */
5206 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
5209 /* Indicate whether REGNO is an incoming argument to the current function
5210 that was promoted to a wider mode. If so, return the RTX for the
5211 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5212 that REGNO is promoted from and whether the promotion was signed or
5215 #ifdef PROMOTE_FUNCTION_ARGS
5218 promoted_input_arg (regno, pmode, punsignedp)
5220 enum machine_mode *pmode;
5225 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5226 arg = TREE_CHAIN (arg))
5227 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5228 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5229 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5231 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5232 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5234 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5235 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5236 && mode != DECL_MODE (arg))
5238 *pmode = DECL_MODE (arg);
5239 *punsignedp = unsignedp;
5240 return DECL_INCOMING_RTL (arg);
5249 /* Compute the size and offset from the start of the stacked arguments for a
5250 parm passed in mode PASSED_MODE and with type TYPE.
5252 INITIAL_OFFSET_PTR points to the current offset into the stacked
5255 The starting offset and size for this parm are returned in *OFFSET_PTR
5256 and *ARG_SIZE_PTR, respectively.
5258 IN_REGS is non-zero if the argument will be passed in registers. It will
5259 never be set if REG_PARM_STACK_SPACE is not defined.
5261 FNDECL is the function in which the argument was defined.
5263 There are two types of rounding that are done. The first, controlled by
5264 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5265 list to be aligned to the specific boundary (in bits). This rounding
5266 affects the initial and starting offsets, but not the argument size.
5268 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5269 optionally rounds the size of the parm to PARM_BOUNDARY. The
5270 initial offset is not affected by this rounding, while the size always
5271 is and the starting offset may be. */
5273 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5274 initial_offset_ptr is positive because locate_and_pad_parm's
5275 callers pass in the total size of args so far as
5276 initial_offset_ptr. arg_size_ptr is always positive.*/
5279 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5280 initial_offset_ptr, offset_ptr, arg_size_ptr)
5281 enum machine_mode passed_mode;
5284 tree fndecl ATTRIBUTE_UNUSED;
5285 struct args_size *initial_offset_ptr;
5286 struct args_size *offset_ptr;
5287 struct args_size *arg_size_ptr;
5290 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5291 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5292 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5294 #ifdef REG_PARM_STACK_SPACE
5295 /* If we have found a stack parm before we reach the end of the
5296 area reserved for registers, skip that area. */
5299 int reg_parm_stack_space = 0;
5301 #ifdef MAYBE_REG_PARM_STACK_SPACE
5302 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5304 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5306 if (reg_parm_stack_space > 0)
5308 if (initial_offset_ptr->var)
5310 initial_offset_ptr->var
5311 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5312 size_int (reg_parm_stack_space));
5313 initial_offset_ptr->constant = 0;
5315 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5316 initial_offset_ptr->constant = reg_parm_stack_space;
5319 #endif /* REG_PARM_STACK_SPACE */
5321 arg_size_ptr->var = 0;
5322 arg_size_ptr->constant = 0;
5324 #ifdef ARGS_GROW_DOWNWARD
5325 if (initial_offset_ptr->var)
5327 offset_ptr->constant = 0;
5328 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
5329 initial_offset_ptr->var);
5333 offset_ptr->constant = - initial_offset_ptr->constant;
5334 offset_ptr->var = 0;
5336 if (where_pad != none
5337 && (TREE_CODE (sizetree) != INTEGER_CST
5338 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5339 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5340 SUB_PARM_SIZE (*offset_ptr, sizetree);
5341 if (where_pad != downward)
5342 pad_to_arg_alignment (offset_ptr, boundary);
5343 if (initial_offset_ptr->var)
5345 arg_size_ptr->var = size_binop (MINUS_EXPR,
5346 size_binop (MINUS_EXPR,
5348 initial_offset_ptr->var),
5353 arg_size_ptr->constant = (- initial_offset_ptr->constant
5354 - offset_ptr->constant);
5356 #else /* !ARGS_GROW_DOWNWARD */
5358 #ifdef REG_PARM_STACK_SPACE
5359 || REG_PARM_STACK_SPACE (fndecl) > 0
5361 /* For the gcc-2_95-branch we want to make sure not to break something
5362 on platforms which pass argument in registers but don't define
5363 REG_PARM_STACK_SPACE. So we force the original behaviour here. */
5367 pad_to_arg_alignment (initial_offset_ptr, boundary);
5369 *offset_ptr = *initial_offset_ptr;
5371 #ifdef PUSH_ROUNDING
5372 if (passed_mode != BLKmode)
5373 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5376 /* Pad_below needs the pre-rounded size to know how much to pad below
5377 so this must be done before rounding up. */
5378 if (where_pad == downward
5379 /* However, BLKmode args passed in regs have their padding done elsewhere.
5380 The stack slot must be able to hold the entire register. */
5381 && !(in_regs && passed_mode == BLKmode))
5382 pad_below (offset_ptr, passed_mode, sizetree);
5384 if (where_pad != none
5385 && (TREE_CODE (sizetree) != INTEGER_CST
5386 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5387 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5389 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5390 #endif /* ARGS_GROW_DOWNWARD */
5393 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5394 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5397 pad_to_arg_alignment (offset_ptr, boundary)
5398 struct args_size *offset_ptr;
5401 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5403 if (boundary > BITS_PER_UNIT)
5405 if (offset_ptr->var)
5408 #ifdef ARGS_GROW_DOWNWARD
5413 (ARGS_SIZE_TREE (*offset_ptr),
5414 boundary / BITS_PER_UNIT);
5415 offset_ptr->constant = 0; /*?*/
5418 offset_ptr->constant =
5419 #ifdef ARGS_GROW_DOWNWARD
5420 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5422 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5427 #ifndef ARGS_GROW_DOWNWARD
5429 pad_below (offset_ptr, passed_mode, sizetree)
5430 struct args_size *offset_ptr;
5431 enum machine_mode passed_mode;
5434 if (passed_mode != BLKmode)
5436 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5437 offset_ptr->constant
5438 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5439 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5440 - GET_MODE_SIZE (passed_mode));
5444 if (TREE_CODE (sizetree) != INTEGER_CST
5445 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5447 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5448 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5450 ADD_PARM_SIZE (*offset_ptr, s2);
5451 SUB_PARM_SIZE (*offset_ptr, sizetree);
5457 #ifdef ARGS_GROW_DOWNWARD
5459 round_down (value, divisor)
5463 return size_binop (MULT_EXPR,
5464 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5465 size_int (divisor));
5469 /* Walk the tree of blocks describing the binding levels within a function
5470 and warn about uninitialized variables.
5471 This is done after calling flow_analysis and before global_alloc
5472 clobbers the pseudo-regs to hard regs. */
5475 uninitialized_vars_warning (block)
5478 register tree decl, sub;
5479 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5481 if (TREE_CODE (decl) == VAR_DECL
5482 /* These warnings are unreliable for and aggregates
5483 because assigning the fields one by one can fail to convince
5484 flow.c that the entire aggregate was initialized.
5485 Unions are troublesome because members may be shorter. */
5486 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5487 && DECL_RTL (decl) != 0
5488 && GET_CODE (DECL_RTL (decl)) == REG
5489 /* Global optimizations can make it difficult to determine if a
5490 particular variable has been initialized. However, a VAR_DECL
5491 with a nonzero DECL_INITIAL had an initializer, so do not
5492 claim it is potentially uninitialized.
5494 We do not care about the actual value in DECL_INITIAL, so we do
5495 not worry that it may be a dangling pointer. */
5496 && DECL_INITIAL (decl) == NULL_TREE
5497 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5498 warning_with_decl (decl,
5499 "`%s' might be used uninitialized in this function");
5500 if (TREE_CODE (decl) == VAR_DECL
5501 && DECL_RTL (decl) != 0
5502 && GET_CODE (DECL_RTL (decl)) == REG
5503 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5504 warning_with_decl (decl,
5505 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5507 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5508 uninitialized_vars_warning (sub);
5511 /* Do the appropriate part of uninitialized_vars_warning
5512 but for arguments instead of local variables. */
5515 setjmp_args_warning ()
5518 for (decl = DECL_ARGUMENTS (current_function_decl);
5519 decl; decl = TREE_CHAIN (decl))
5520 if (DECL_RTL (decl) != 0
5521 && GET_CODE (DECL_RTL (decl)) == REG
5522 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5523 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5526 /* If this function call setjmp, put all vars into the stack
5527 unless they were declared `register'. */
5530 setjmp_protect (block)
5533 register tree decl, sub;
5534 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5535 if ((TREE_CODE (decl) == VAR_DECL
5536 || TREE_CODE (decl) == PARM_DECL)
5537 && DECL_RTL (decl) != 0
5538 && (GET_CODE (DECL_RTL (decl)) == REG
5539 || (GET_CODE (DECL_RTL (decl)) == MEM
5540 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5541 /* If this variable came from an inline function, it must be
5542 that its life doesn't overlap the setjmp. If there was a
5543 setjmp in the function, it would already be in memory. We
5544 must exclude such variable because their DECL_RTL might be
5545 set to strange things such as virtual_stack_vars_rtx. */
5546 && ! DECL_FROM_INLINE (decl)
5548 #ifdef NON_SAVING_SETJMP
5549 /* If longjmp doesn't restore the registers,
5550 don't put anything in them. */
5554 ! DECL_REGISTER (decl)))
5555 put_var_into_stack (decl);
5556 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5557 setjmp_protect (sub);
5560 /* Like the previous function, but for args instead of local variables. */
5563 setjmp_protect_args ()
5566 for (decl = DECL_ARGUMENTS (current_function_decl);
5567 decl; decl = TREE_CHAIN (decl))
5568 if ((TREE_CODE (decl) == VAR_DECL
5569 || TREE_CODE (decl) == PARM_DECL)
5570 && DECL_RTL (decl) != 0
5571 && (GET_CODE (DECL_RTL (decl)) == REG
5572 || (GET_CODE (DECL_RTL (decl)) == MEM
5573 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5575 /* If longjmp doesn't restore the registers,
5576 don't put anything in them. */
5577 #ifdef NON_SAVING_SETJMP
5581 ! DECL_REGISTER (decl)))
5582 put_var_into_stack (decl);
5585 /* Return the context-pointer register corresponding to DECL,
5586 or 0 if it does not need one. */
5589 lookup_static_chain (decl)
5592 tree context = decl_function_context (decl);
5596 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5599 /* We treat inline_function_decl as an alias for the current function
5600 because that is the inline function whose vars, types, etc.
5601 are being merged into the current function.
5602 See expand_inline_function. */
5603 if (context == current_function_decl || context == inline_function_decl)
5604 return virtual_stack_vars_rtx;
5606 for (link = context_display; link; link = TREE_CHAIN (link))
5607 if (TREE_PURPOSE (link) == context)
5608 return RTL_EXPR_RTL (TREE_VALUE (link));
5613 /* Convert a stack slot address ADDR for variable VAR
5614 (from a containing function)
5615 into an address valid in this function (using a static chain). */
5618 fix_lexical_addr (addr, var)
5623 HOST_WIDE_INT displacement;
5624 tree context = decl_function_context (var);
5625 struct function *fp;
5628 /* If this is the present function, we need not do anything. */
5629 if (context == current_function_decl || context == inline_function_decl)
5632 for (fp = outer_function_chain; fp; fp = fp->next)
5633 if (fp->decl == context)
5639 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5640 addr = XEXP (XEXP (addr, 0), 0);
5642 /* Decode given address as base reg plus displacement. */
5643 if (GET_CODE (addr) == REG)
5644 basereg = addr, displacement = 0;
5645 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5646 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5650 /* We accept vars reached via the containing function's
5651 incoming arg pointer and via its stack variables pointer. */
5652 if (basereg == fp->internal_arg_pointer)
5654 /* If reached via arg pointer, get the arg pointer value
5655 out of that function's stack frame.
5657 There are two cases: If a separate ap is needed, allocate a
5658 slot in the outer function for it and dereference it that way.
5659 This is correct even if the real ap is actually a pseudo.
5660 Otherwise, just adjust the offset from the frame pointer to
5663 #ifdef NEED_SEPARATE_AP
5666 if (fp->arg_pointer_save_area == 0)
5667 fp->arg_pointer_save_area
5668 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5670 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5671 addr = memory_address (Pmode, addr);
5673 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5675 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5676 base = lookup_static_chain (var);
5680 else if (basereg == virtual_stack_vars_rtx)
5682 /* This is the same code as lookup_static_chain, duplicated here to
5683 avoid an extra call to decl_function_context. */
5686 for (link = context_display; link; link = TREE_CHAIN (link))
5687 if (TREE_PURPOSE (link) == context)
5689 base = RTL_EXPR_RTL (TREE_VALUE (link));
5697 /* Use same offset, relative to appropriate static chain or argument
5699 return plus_constant (base, displacement);
5702 /* Return the address of the trampoline for entering nested fn FUNCTION.
5703 If necessary, allocate a trampoline (in the stack frame)
5704 and emit rtl to initialize its contents (at entry to this function). */
5707 trampoline_address (function)
5713 struct function *fp;
5716 /* Find an existing trampoline and return it. */
5717 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5718 if (TREE_PURPOSE (link) == function)
5720 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5722 for (fp = outer_function_chain; fp; fp = fp->next)
5723 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5724 if (TREE_PURPOSE (link) == function)
5726 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5728 return round_trampoline_addr (tramp);
5731 /* None exists; we must make one. */
5733 /* Find the `struct function' for the function containing FUNCTION. */
5735 fn_context = decl_function_context (function);
5736 if (fn_context != current_function_decl
5737 && fn_context != inline_function_decl)
5738 for (fp = outer_function_chain; fp; fp = fp->next)
5739 if (fp->decl == fn_context)
5742 /* Allocate run-time space for this trampoline
5743 (usually in the defining function's stack frame). */
5744 #ifdef ALLOCATE_TRAMPOLINE
5745 tramp = ALLOCATE_TRAMPOLINE (fp);
5747 /* If rounding needed, allocate extra space
5748 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5749 #ifdef TRAMPOLINE_ALIGNMENT
5750 #define TRAMPOLINE_REAL_SIZE \
5751 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5753 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5756 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5758 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5761 /* Record the trampoline for reuse and note it for later initialization
5762 by expand_function_end. */
5765 push_obstacks (fp->function_maybepermanent_obstack,
5766 fp->function_maybepermanent_obstack);
5767 rtlexp = make_node (RTL_EXPR);
5768 RTL_EXPR_RTL (rtlexp) = tramp;
5769 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5774 /* Make the RTL_EXPR node temporary, not momentary, so that the
5775 trampoline_list doesn't become garbage. */
5776 int momentary = suspend_momentary ();
5777 rtlexp = make_node (RTL_EXPR);
5778 resume_momentary (momentary);
5780 RTL_EXPR_RTL (rtlexp) = tramp;
5781 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5784 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5785 return round_trampoline_addr (tramp);
5788 /* Given a trampoline address,
5789 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5792 round_trampoline_addr (tramp)
5795 #ifdef TRAMPOLINE_ALIGNMENT
5796 /* Round address up to desired boundary. */
5797 rtx temp = gen_reg_rtx (Pmode);
5798 temp = expand_binop (Pmode, add_optab, tramp,
5799 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5800 temp, 0, OPTAB_LIB_WIDEN);
5801 tramp = expand_binop (Pmode, and_optab, temp,
5802 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5803 temp, 0, OPTAB_LIB_WIDEN);
5808 /* The functions identify_blocks and reorder_blocks provide a way to
5809 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5810 duplicate portions of the RTL code. Call identify_blocks before
5811 changing the RTL, and call reorder_blocks after. */
5813 /* Put all this function's BLOCK nodes including those that are chained
5814 onto the first block into a vector, and return it.
5815 Also store in each NOTE for the beginning or end of a block
5816 the index of that block in the vector.
5817 The arguments are BLOCK, the chain of top-level blocks of the function,
5818 and INSNS, the insn chain of the function. */
5821 identify_blocks (block, insns)
5829 int next_block_number = 1;
5830 int current_block_number = 1;
5836 n_blocks = all_blocks (block, 0);
5837 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5838 block_stack = (int *) alloca (n_blocks * sizeof (int));
5840 all_blocks (block, block_vector);
5842 for (insn = insns; insn; insn = NEXT_INSN (insn))
5843 if (GET_CODE (insn) == NOTE)
5845 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5847 block_stack[depth++] = current_block_number;
5848 current_block_number = next_block_number;
5849 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5851 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5853 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5854 current_block_number = block_stack[--depth];
5858 if (n_blocks != next_block_number)
5861 return block_vector;
5864 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5865 and a revised instruction chain, rebuild the tree structure
5866 of BLOCK nodes to correspond to the new order of RTL.
5867 The new block tree is inserted below TOP_BLOCK.
5868 Returns the current top-level block. */
5871 reorder_blocks (block_vector, block, insns)
5876 tree current_block = block;
5879 if (block_vector == 0)
5882 /* Prune the old trees away, so that it doesn't get in the way. */
5883 BLOCK_SUBBLOCKS (current_block) = 0;
5884 BLOCK_CHAIN (current_block) = 0;
5886 for (insn = insns; insn; insn = NEXT_INSN (insn))
5887 if (GET_CODE (insn) == NOTE)
5889 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5891 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5892 /* If we have seen this block before, copy it. */
5893 if (TREE_ASM_WRITTEN (block))
5894 block = copy_node (block);
5895 BLOCK_SUBBLOCKS (block) = 0;
5896 TREE_ASM_WRITTEN (block) = 1;
5897 BLOCK_SUPERCONTEXT (block) = current_block;
5898 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5899 BLOCK_SUBBLOCKS (current_block) = block;
5900 current_block = block;
5901 NOTE_SOURCE_FILE (insn) = 0;
5903 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5905 BLOCK_SUBBLOCKS (current_block)
5906 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5907 current_block = BLOCK_SUPERCONTEXT (current_block);
5908 NOTE_SOURCE_FILE (insn) = 0;
5912 BLOCK_SUBBLOCKS (current_block)
5913 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5914 return current_block;
5917 /* Reverse the order of elements in the chain T of blocks,
5918 and return the new head of the chain (old last element). */
5924 register tree prev = 0, decl, next;
5925 for (decl = t; decl; decl = next)
5927 next = BLOCK_CHAIN (decl);
5928 BLOCK_CHAIN (decl) = prev;
5934 /* Count the subblocks of the list starting with BLOCK, and list them
5935 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5939 all_blocks (block, vector)
5947 TREE_ASM_WRITTEN (block) = 0;
5949 /* Record this block. */
5951 vector[n_blocks] = block;
5955 /* Record the subblocks, and their subblocks... */
5956 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5957 vector ? vector + n_blocks : 0);
5958 block = BLOCK_CHAIN (block);
5964 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5965 and initialize static variables for generating RTL for the statements
5969 init_function_start (subr, filename, line)
5974 init_stmt_for_function ();
5976 cse_not_expected = ! optimize;
5978 /* Caller save not needed yet. */
5979 caller_save_needed = 0;
5981 /* No stack slots have been made yet. */
5982 stack_slot_list = 0;
5984 /* There is no stack slot for handling nonlocal gotos. */
5985 nonlocal_goto_handler_slots = 0;
5986 nonlocal_goto_stack_level = 0;
5988 /* No labels have been declared for nonlocal use. */
5989 nonlocal_labels = 0;
5990 nonlocal_goto_handler_labels = 0;
5992 /* No function calls so far in this function. */
5993 function_call_count = 0;
5995 /* No parm regs have been allocated.
5996 (This is important for output_inline_function.) */
5997 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5999 /* Initialize the RTL mechanism. */
6002 /* Initialize the queue of pending postincrement and postdecrements,
6003 and some other info in expr.c. */
6006 /* We haven't done register allocation yet. */
6009 init_const_rtx_hash_table ();
6011 current_function_name = (*decl_printable_name) (subr, 2);
6013 /* Nonzero if this is a nested function that uses a static chain. */
6015 current_function_needs_context
6016 = (decl_function_context (current_function_decl) != 0
6017 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6019 /* Set if a call to setjmp is seen. */
6020 current_function_calls_setjmp = 0;
6022 /* Set if a call to longjmp is seen. */
6023 current_function_calls_longjmp = 0;
6025 current_function_calls_alloca = 0;
6026 current_function_has_nonlocal_label = 0;
6027 current_function_has_nonlocal_goto = 0;
6028 current_function_contains_functions = 0;
6029 current_function_is_leaf = 0;
6030 current_function_sp_is_unchanging = 0;
6031 current_function_uses_only_leaf_regs = 0;
6032 current_function_has_computed_jump = 0;
6033 current_function_is_thunk = 0;
6035 current_function_returns_pcc_struct = 0;
6036 current_function_returns_struct = 0;
6037 current_function_epilogue_delay_list = 0;
6038 current_function_uses_const_pool = 0;
6039 current_function_uses_pic_offset_table = 0;
6040 current_function_cannot_inline = 0;
6042 /* We have not yet needed to make a label to jump to for tail-recursion. */
6043 tail_recursion_label = 0;
6045 /* We haven't had a need to make a save area for ap yet. */
6047 arg_pointer_save_area = 0;
6049 /* No stack slots allocated yet. */
6052 /* No SAVE_EXPRs in this function yet. */
6055 /* No RTL_EXPRs in this function yet. */
6058 /* Set up to allocate temporaries. */
6061 /* Within function body, compute a type's size as soon it is laid out. */
6062 immediate_size_expand++;
6064 /* We haven't made any trampolines for this function yet. */
6065 trampoline_list = 0;
6067 init_pending_stack_adjust ();
6068 inhibit_defer_pop = 0;
6070 current_function_outgoing_args_size = 0;
6072 /* Prevent ever trying to delete the first instruction of a function.
6073 Also tell final how to output a linenum before the function prologue.
6074 Note linenums could be missing, e.g. when compiling a Java .class file. */
6076 emit_line_note (filename, line);
6078 /* Make sure first insn is a note even if we don't want linenums.
6079 This makes sure the first insn will never be deleted.
6080 Also, final expects a note to appear there. */
6081 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6083 /* Set flags used by final.c. */
6084 if (aggregate_value_p (DECL_RESULT (subr)))
6086 #ifdef PCC_STATIC_STRUCT_RETURN
6087 current_function_returns_pcc_struct = 1;
6089 current_function_returns_struct = 1;
6092 /* Warn if this value is an aggregate type,
6093 regardless of which calling convention we are using for it. */
6094 if (warn_aggregate_return
6095 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6096 warning ("function returns an aggregate");
6098 current_function_returns_pointer
6099 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6101 /* Indicate that we need to distinguish between the return value of the
6102 present function and the return value of a function being called. */
6103 rtx_equal_function_value_matters = 1;
6105 /* Indicate that we have not instantiated virtual registers yet. */
6106 virtuals_instantiated = 0;
6108 /* Indicate we have no need of a frame pointer yet. */
6109 frame_pointer_needed = 0;
6111 /* By default assume not varargs or stdarg. */
6112 current_function_varargs = 0;
6113 current_function_stdarg = 0;
6116 /* Indicate that the current function uses extra args
6117 not explicitly mentioned in the argument list in any fashion. */
6122 current_function_varargs = 1;
6125 /* Expand a call to __main at the beginning of a possible main function. */
6127 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6128 #undef HAS_INIT_SECTION
6129 #define HAS_INIT_SECTION
6132 #ifndef GEN_CALL__MAIN
6133 #define GEN_CALL__MAIN \
6135 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0, \
6141 expand_main_function ()
6143 #if defined(INVOKE__main) || !defined (HAS_INIT_SECTION)
6145 #endif /* not HAS_INIT_SECTION */
6148 extern struct obstack permanent_obstack;
6150 /* Start the RTL for a new function, and set variables used for
6152 SUBR is the FUNCTION_DECL node.
6153 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6154 the function's parameters, which must be run at any return statement. */
6157 expand_function_start (subr, parms_have_cleanups)
6159 int parms_have_cleanups;
6163 rtx last_ptr = NULL_RTX;
6165 /* Make sure volatile mem refs aren't considered
6166 valid operands of arithmetic insns. */
6167 init_recog_no_volatile ();
6169 /* Set this before generating any memory accesses. */
6170 current_function_check_memory_usage
6171 = (flag_check_memory_usage
6172 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6174 current_function_instrument_entry_exit
6175 = (flag_instrument_function_entry_exit
6176 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6178 /* If function gets a static chain arg, store it in the stack frame.
6179 Do this first, so it gets the first stack slot offset. */
6180 if (current_function_needs_context)
6182 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6184 /* Delay copying static chain if it is not a register to avoid
6185 conflicts with regs used for parameters. */
6186 if (! SMALL_REGISTER_CLASSES
6187 || GET_CODE (static_chain_incoming_rtx) == REG)
6188 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6191 /* If the parameters of this function need cleaning up, get a label
6192 for the beginning of the code which executes those cleanups. This must
6193 be done before doing anything with return_label. */
6194 if (parms_have_cleanups)
6195 cleanup_label = gen_label_rtx ();
6199 /* Make the label for return statements to jump to, if this machine
6200 does not have a one-instruction return and uses an epilogue,
6201 or if it returns a structure, or if it has parm cleanups. */
6203 if (cleanup_label == 0 && HAVE_return
6204 && ! current_function_instrument_entry_exit
6205 && ! current_function_returns_pcc_struct
6206 && ! (current_function_returns_struct && ! optimize))
6209 return_label = gen_label_rtx ();
6211 return_label = gen_label_rtx ();
6214 /* Initialize rtx used to return the value. */
6215 /* Do this before assign_parms so that we copy the struct value address
6216 before any library calls that assign parms might generate. */
6218 /* Decide whether to return the value in memory or in a register. */
6219 if (aggregate_value_p (DECL_RESULT (subr)))
6221 /* Returning something that won't go in a register. */
6222 register rtx value_address = 0;
6224 #ifdef PCC_STATIC_STRUCT_RETURN
6225 if (current_function_returns_pcc_struct)
6227 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6228 value_address = assemble_static_space (size);
6233 /* Expect to be passed the address of a place to store the value.
6234 If it is passed as an argument, assign_parms will take care of
6236 if (struct_value_incoming_rtx)
6238 value_address = gen_reg_rtx (Pmode);
6239 emit_move_insn (value_address, struct_value_incoming_rtx);
6244 DECL_RTL (DECL_RESULT (subr))
6245 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6246 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
6247 AGGREGATE_TYPE_P (TREE_TYPE
6252 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6253 /* If return mode is void, this decl rtl should not be used. */
6254 DECL_RTL (DECL_RESULT (subr)) = 0;
6255 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6257 /* If function will end with cleanup code for parms,
6258 compute the return values into a pseudo reg,
6259 which we will copy into the true return register
6260 after the cleanups are done. */
6262 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6264 #ifdef PROMOTE_FUNCTION_RETURN
6265 tree type = TREE_TYPE (DECL_RESULT (subr));
6266 int unsignedp = TREE_UNSIGNED (type);
6268 mode = promote_mode (type, mode, &unsignedp, 1);
6271 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6274 /* Scalar, returned in a register. */
6276 #ifdef FUNCTION_OUTGOING_VALUE
6277 DECL_RTL (DECL_RESULT (subr))
6278 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6280 DECL_RTL (DECL_RESULT (subr))
6281 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6284 /* Mark this reg as the function's return value. */
6285 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6287 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6288 /* Needed because we may need to move this to memory
6289 in case it's a named return value whose address is taken. */
6290 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6294 /* Initialize rtx for parameters and local variables.
6295 In some cases this requires emitting insns. */
6297 assign_parms (subr, 0);
6299 /* Copy the static chain now if it wasn't a register. The delay is to
6300 avoid conflicts with the parameter passing registers. */
6302 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6303 if (GET_CODE (static_chain_incoming_rtx) != REG)
6304 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6306 /* The following was moved from init_function_start.
6307 The move is supposed to make sdb output more accurate. */
6308 /* Indicate the beginning of the function body,
6309 as opposed to parm setup. */
6310 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6312 /* If doing stupid allocation, mark parms as born here. */
6314 if (GET_CODE (get_last_insn ()) != NOTE)
6315 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6316 parm_birth_insn = get_last_insn ();
6320 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6321 use_variable (regno_reg_rtx[i]);
6323 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6324 use_variable (current_function_internal_arg_pointer);
6327 context_display = 0;
6328 if (current_function_needs_context)
6330 /* Fetch static chain values for containing functions. */
6331 tem = decl_function_context (current_function_decl);
6332 /* If not doing stupid register allocation copy the static chain
6333 pointer into a pseudo. If we have small register classes, copy
6334 the value from memory if static_chain_incoming_rtx is a REG. If
6335 we do stupid register allocation, we use the stack address
6337 if (tem && ! obey_regdecls)
6339 /* If the static chain originally came in a register, put it back
6340 there, then move it out in the next insn. The reason for
6341 this peculiar code is to satisfy function integration. */
6342 if (SMALL_REGISTER_CLASSES
6343 && GET_CODE (static_chain_incoming_rtx) == REG)
6344 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6345 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6350 tree rtlexp = make_node (RTL_EXPR);
6352 RTL_EXPR_RTL (rtlexp) = last_ptr;
6353 context_display = tree_cons (tem, rtlexp, context_display);
6354 tem = decl_function_context (tem);
6357 /* Chain thru stack frames, assuming pointer to next lexical frame
6358 is found at the place we always store it. */
6359 #ifdef FRAME_GROWS_DOWNWARD
6360 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6362 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6363 memory_address (Pmode, last_ptr)));
6365 /* If we are not optimizing, ensure that we know that this
6366 piece of context is live over the entire function. */
6368 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6373 if (current_function_instrument_entry_exit)
6375 rtx fun = DECL_RTL (current_function_decl);
6376 if (GET_CODE (fun) == MEM)
6377 fun = XEXP (fun, 0);
6380 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6382 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6384 hard_frame_pointer_rtx),
6388 /* After the display initializations is where the tail-recursion label
6389 should go, if we end up needing one. Ensure we have a NOTE here
6390 since some things (like trampolines) get placed before this. */
6391 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6393 /* Evaluate now the sizes of any types declared among the arguments. */
6394 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6396 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6397 EXPAND_MEMORY_USE_BAD);
6398 /* Flush the queue in case this parameter declaration has
6403 /* Make sure there is a line number after the function entry setup code. */
6404 force_next_line_note ();
6407 /* Generate RTL for the end of the current function.
6408 FILENAME and LINE are the current position in the source file.
6410 It is up to language-specific callers to do cleanups for parameters--
6411 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6414 expand_function_end (filename, line, end_bindings)
6422 #ifdef TRAMPOLINE_TEMPLATE
6423 static rtx initial_trampoline;
6426 #ifdef NON_SAVING_SETJMP
6427 /* Don't put any variables in registers if we call setjmp
6428 on a machine that fails to restore the registers. */
6429 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6431 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6432 setjmp_protect (DECL_INITIAL (current_function_decl));
6434 setjmp_protect_args ();
6438 /* Save the argument pointer if a save area was made for it. */
6439 if (arg_pointer_save_area)
6441 /* arg_pointer_save_area may not be a valid memory address, so we
6442 have to check it and fix it if necessary. */
6445 emit_move_insn (validize_mem (arg_pointer_save_area),
6446 virtual_incoming_args_rtx);
6447 seq = gen_sequence ();
6449 emit_insn_before (seq, tail_recursion_reentry);
6452 /* Initialize any trampolines required by this function. */
6453 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6455 tree function = TREE_PURPOSE (link);
6456 rtx context = lookup_static_chain (function);
6457 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6458 #ifdef TRAMPOLINE_TEMPLATE
6463 #ifdef TRAMPOLINE_TEMPLATE
6464 /* First make sure this compilation has a template for
6465 initializing trampolines. */
6466 if (initial_trampoline == 0)
6468 end_temporary_allocation ();
6470 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6471 resume_temporary_allocation ();
6475 /* Generate insns to initialize the trampoline. */
6477 tramp = round_trampoline_addr (XEXP (tramp, 0));
6478 #ifdef TRAMPOLINE_TEMPLATE
6479 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6480 emit_block_move (blktramp, initial_trampoline,
6481 GEN_INT (TRAMPOLINE_SIZE),
6482 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6484 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6488 /* Put those insns at entry to the containing function (this one). */
6489 emit_insns_before (seq, tail_recursion_reentry);
6492 /* If we are doing stack checking and this function makes calls,
6493 do a stack probe at the start of the function to ensure we have enough
6494 space for another stack frame. */
6495 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6499 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6500 if (GET_CODE (insn) == CALL_INSN)
6503 probe_stack_range (STACK_CHECK_PROTECT,
6504 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6507 emit_insns_before (seq, tail_recursion_reentry);
6512 /* Warn about unused parms if extra warnings were specified. */
6513 if (warn_unused && extra_warnings)
6517 for (decl = DECL_ARGUMENTS (current_function_decl);
6518 decl; decl = TREE_CHAIN (decl))
6519 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6520 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6521 warning_with_decl (decl, "unused parameter `%s'");
6524 /* Delete handlers for nonlocal gotos if nothing uses them. */
6525 if (nonlocal_goto_handler_slots != 0
6526 && ! current_function_has_nonlocal_label)
6529 /* End any sequences that failed to be closed due to syntax errors. */
6530 while (in_sequence_p ())
6533 /* Outside function body, can't compute type's actual size
6534 until next function's body starts. */
6535 immediate_size_expand--;
6537 /* If doing stupid register allocation,
6538 mark register parms as dying here. */
6543 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6544 use_variable (regno_reg_rtx[i]);
6546 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6548 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6550 use_variable (XEXP (tem, 0));
6551 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6554 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6555 use_variable (current_function_internal_arg_pointer);
6558 clear_pending_stack_adjust ();
6559 do_pending_stack_adjust ();
6561 /* Mark the end of the function body.
6562 If control reaches this insn, the function can drop through
6563 without returning a value. */
6564 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6566 /* Must mark the last line number note in the function, so that the test
6567 coverage code can avoid counting the last line twice. This just tells
6568 the code to ignore the immediately following line note, since there
6569 already exists a copy of this note somewhere above. This line number
6570 note is still needed for debugging though, so we can't delete it. */
6571 if (flag_test_coverage)
6572 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6574 /* Output a linenumber for the end of the function.
6575 SDB depends on this. */
6576 emit_line_note_force (filename, line);
6578 /* Output the label for the actual return from the function,
6579 if one is expected. This happens either because a function epilogue
6580 is used instead of a return instruction, or because a return was done
6581 with a goto in order to run local cleanups, or because of pcc-style
6582 structure returning. */
6585 emit_label (return_label);
6587 /* C++ uses this. */
6589 expand_end_bindings (0, 0, 0);
6591 /* Now handle any leftover exception regions that may have been
6592 created for the parameters. */
6594 rtx last = get_last_insn ();
6597 expand_leftover_cleanups ();
6599 /* If the above emitted any code, may sure we jump around it. */
6600 if (last != get_last_insn ())
6602 label = gen_label_rtx ();
6603 last = emit_jump_insn_after (gen_jump (label), last);
6604 last = emit_barrier_after (last);
6609 if (current_function_instrument_entry_exit)
6611 rtx fun = DECL_RTL (current_function_decl);
6612 if (GET_CODE (fun) == MEM)
6613 fun = XEXP (fun, 0);
6616 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6618 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6620 hard_frame_pointer_rtx),
6624 /* If we had calls to alloca, and this machine needs
6625 an accurate stack pointer to exit the function,
6626 insert some code to save and restore the stack pointer. */
6627 #ifdef EXIT_IGNORE_STACK
6628 if (! EXIT_IGNORE_STACK)
6630 if (current_function_calls_alloca)
6634 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6635 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6638 /* If scalar return value was computed in a pseudo-reg,
6639 copy that to the hard return register. */
6640 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6641 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6642 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6643 >= FIRST_PSEUDO_REGISTER))
6645 rtx real_decl_result;
6647 #ifdef FUNCTION_OUTGOING_VALUE
6649 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6650 current_function_decl);
6653 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6654 current_function_decl);
6656 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6657 /* If this is a BLKmode structure being returned in registers, then use
6658 the mode computed in expand_return. */
6659 if (GET_MODE (real_decl_result) == BLKmode)
6660 PUT_MODE (real_decl_result,
6661 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6662 emit_move_insn (real_decl_result,
6663 DECL_RTL (DECL_RESULT (current_function_decl)));
6664 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6666 /* The delay slot scheduler assumes that current_function_return_rtx
6667 holds the hard register containing the return value, not a temporary
6669 current_function_return_rtx = real_decl_result;
6672 /* If returning a structure, arrange to return the address of the value
6673 in a place where debuggers expect to find it.
6675 If returning a structure PCC style,
6676 the caller also depends on this value.
6677 And current_function_returns_pcc_struct is not necessarily set. */
6678 if (current_function_returns_struct
6679 || current_function_returns_pcc_struct)
6681 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6682 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6683 #ifdef FUNCTION_OUTGOING_VALUE
6685 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6686 current_function_decl);
6689 = FUNCTION_VALUE (build_pointer_type (type),
6690 current_function_decl);
6693 /* Mark this as a function return value so integrate will delete the
6694 assignment and USE below when inlining this function. */
6695 REG_FUNCTION_VALUE_P (outgoing) = 1;
6697 emit_move_insn (outgoing, value_address);
6698 use_variable (outgoing);
6701 /* If this is an implementation of __throw, do what's necessary to
6702 communicate between __builtin_eh_return and the epilogue. */
6703 expand_eh_return ();
6705 /* Output a return insn if we are using one.
6706 Otherwise, let the rtl chain end here, to drop through
6707 into the epilogue. */
6712 emit_jump_insn (gen_return ());
6717 /* Fix up any gotos that jumped out to the outermost
6718 binding level of the function.
6719 Must follow emitting RETURN_LABEL. */
6721 /* If you have any cleanups to do at this point,
6722 and they need to create temporary variables,
6723 then you will lose. */
6724 expand_fixups (get_insns ());
6727 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6729 static int *prologue;
6730 static int *epilogue;
6732 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6733 or a single insn). */
6735 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6737 record_insns (insns)
6742 if (GET_CODE (insns) == SEQUENCE)
6744 int len = XVECLEN (insns, 0);
6745 vec = (int *) oballoc ((len + 1) * sizeof (int));
6748 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6752 vec = (int *) oballoc (2 * sizeof (int));
6753 vec[0] = INSN_UID (insns);
6759 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6762 contains (insn, vec)
6768 if (GET_CODE (insn) == INSN
6769 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6772 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6773 for (j = 0; vec[j]; j++)
6774 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6780 for (j = 0; vec[j]; j++)
6781 if (INSN_UID (insn) == vec[j])
6786 #endif /* HAVE_prologue || HAVE_epilogue */
6788 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6789 this into place with notes indicating where the prologue ends and where
6790 the epilogue begins. Update the basic block information when possible. */
6793 thread_prologue_and_epilogue_insns (f)
6794 rtx f ATTRIBUTE_UNUSED;
6797 #ifdef HAVE_prologue
6798 rtx prologue_end = NULL_RTX;
6802 #ifdef HAVE_prologue
6808 seq = gen_prologue();
6811 /* Retain a map of the prologue insns. */
6812 if (GET_CODE (seq) != SEQUENCE)
6814 prologue = record_insns (seq);
6816 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6817 seq = gen_sequence ();
6820 /* If optimization is off, and perhaps in an empty function,
6821 the entry block will have no successors. */
6822 if (ENTRY_BLOCK_PTR->succ)
6824 /* Can't deal with multiple successsors of the entry block. */
6825 if (ENTRY_BLOCK_PTR->succ->succ_next)
6828 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6832 emit_insn_after (seq, f);
6837 #ifdef HAVE_epilogue
6842 rtx tail = get_last_insn ();
6844 /* ??? This is gastly. If function returns were not done via uses,
6845 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6846 and all of this uglyness would go away. */
6851 /* If the exit block has no non-fake predecessors, we don't
6852 need an epilogue. Furthermore, only pay attention to the
6853 fallthru predecessors; if (conditional) return insns were
6854 generated, by definition we do not need to emit epilogue
6857 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6858 if ((e->flags & EDGE_FAKE) == 0
6859 && (e->flags & EDGE_FALLTHRU) != 0)
6864 /* We can't handle multiple epilogues -- if one is needed,
6865 we won't be able to place it multiple times.
6867 ??? Fix epilogue expanders to not assume they are the
6868 last thing done compiling the function. Either that
6869 or copy_rtx each insn.
6871 ??? Blah, it's not a simple expression to assert that
6872 we've exactly one fallthru exit edge. */
6877 /* ??? If the last insn of the basic block is a jump, then we
6878 are creating a new basic block. Wimp out and leave these
6879 insns outside any block. */
6880 if (GET_CODE (tail) == JUMP_INSN)
6886 rtx prev, seq, first_use;
6888 /* Move the USE insns at the end of a function onto a list. */
6890 if (GET_CODE (prev) == BARRIER
6891 || GET_CODE (prev) == NOTE)
6892 prev = prev_nonnote_insn (prev);
6896 && GET_CODE (prev) == INSN
6897 && GET_CODE (PATTERN (prev)) == USE)
6899 /* If the end of the block is the use, grab hold of something
6900 else so that we emit barriers etc in the right place. */
6904 tail = PREV_INSN (tail);
6905 while (GET_CODE (tail) == INSN
6906 && GET_CODE (PATTERN (tail)) == USE);
6912 prev = prev_nonnote_insn (prev);
6917 NEXT_INSN (use) = first_use;
6918 PREV_INSN (first_use) = use;
6921 NEXT_INSN (use) = NULL_RTX;
6925 && GET_CODE (prev) == INSN
6926 && GET_CODE (PATTERN (prev)) == USE);
6929 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6930 epilogue insns, the USE insns at the end of a function,
6931 the jump insn that returns, and then a BARRIER. */
6933 if (GET_CODE (tail) != BARRIER)
6935 prev = next_nonnote_insn (tail);
6936 if (!prev || GET_CODE (prev) != BARRIER)
6937 emit_barrier_after (tail);
6940 seq = gen_epilogue ();
6942 tail = emit_jump_insn_after (seq, tail);
6944 /* Insert the USE insns immediately before the return insn, which
6945 must be the last instruction emitted in the sequence. */
6947 emit_insns_before (first_use, tail);
6948 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6950 /* Update the tail of the basic block. */
6954 /* Retain a map of the epilogue insns. */
6955 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6962 commit_edge_insertions ();
6964 #ifdef HAVE_prologue
6969 /* GDB handles `break f' by setting a breakpoint on the first
6970 line note *after* the prologue. Which means (1) that if
6971 there are line number notes before where we inserted the
6972 prologue we should move them, and (2) if there is no such
6973 note, then we should generate one at the prologue. */
6975 for (insn = prologue_end; insn ; insn = prev)
6977 prev = PREV_INSN (insn);
6978 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
6980 /* Note that we cannot reorder the first insn in the
6981 chain, since rest_of_compilation relies on that
6982 remaining constant. Do the next best thing. */
6985 emit_line_note_after (NOTE_SOURCE_FILE (insn),
6986 NOTE_LINE_NUMBER (insn),
6988 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6991 reorder_insns (insn, insn, prologue_end);
6995 insn = NEXT_INSN (prologue_end);
6996 if (! insn || GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) <= 0)
6998 for (insn = next_active_insn (f); insn ; insn = PREV_INSN (insn))
7000 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7002 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7003 NOTE_LINE_NUMBER (insn),
7013 /* Reposition the prologue-end and epilogue-begin notes after instruction
7014 scheduling and delayed branch scheduling. */
7017 reposition_prologue_and_epilogue_notes (f)
7018 rtx f ATTRIBUTE_UNUSED;
7020 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7021 /* Reposition the prologue and epilogue notes. */
7028 register rtx insn, note = 0;
7030 /* Scan from the beginning until we reach the last prologue insn.
7031 We apparently can't depend on basic_block_{head,end} after
7033 for (len = 0; prologue[len]; len++)
7035 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7037 if (GET_CODE (insn) == NOTE)
7039 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7042 else if ((len -= contains (insn, prologue)) == 0)
7045 /* Find the prologue-end note if we haven't already, and
7046 move it to just after the last prologue insn. */
7049 for (note = insn; (note = NEXT_INSN (note));)
7050 if (GET_CODE (note) == NOTE
7051 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7055 next = NEXT_INSN (note);
7057 /* Whether or not we can depend on BLOCK_HEAD,
7058 attempt to keep it up-to-date. */
7059 if (BLOCK_HEAD (0) == note)
7060 BLOCK_HEAD (0) = next;
7063 add_insn_after (note, insn);
7070 register rtx insn, note = 0;
7072 /* Scan from the end until we reach the first epilogue insn.
7073 We apparently can't depend on basic_block_{head,end} after
7075 for (len = 0; epilogue[len]; len++)
7077 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7079 if (GET_CODE (insn) == NOTE)
7081 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7084 else if ((len -= contains (insn, epilogue)) == 0)
7086 /* Find the epilogue-begin note if we haven't already, and
7087 move it to just before the first epilogue insn. */
7090 for (note = insn; (note = PREV_INSN (note));)
7091 if (GET_CODE (note) == NOTE
7092 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7096 /* Whether or not we can depend on BLOCK_HEAD,
7097 attempt to keep it up-to-date. */
7099 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7100 BLOCK_HEAD (n_basic_blocks-1) = note;
7103 add_insn_before (note, insn);
7108 #endif /* HAVE_prologue or HAVE_epilogue */