1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
40 #include "langhooks.h"
45 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
48 /* Data structure and subroutines used within expand_call. */
52 /* Tree node for this argument. */
54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
58 /* Initially-compute RTL value for argument; only for const functions. */
60 /* Register to pass this argument in, 0 if passed on stack, or an
61 PARALLEL if the arg is to be copied into multiple non-contiguous
64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
91 /* Place that this stack area has been saved, if needed. */
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 int stack_arg_under_construction;
124 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
127 static void precompute_register_parameters (int, struct arg_data *, int *);
128 static int store_one_arg (struct arg_data *, rtx, int, int, int);
129 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130 static int finalize_must_preallocate (int, int, struct arg_data *,
132 static void precompute_arguments (int, int, struct arg_data *);
133 static int compute_argument_block_size (int, struct args_size *, int);
134 static void initialize_argument_information (int, struct arg_data *,
135 struct args_size *, int, tree,
136 tree, CUMULATIVE_ARGS *, int,
137 rtx *, int *, int *, int *,
139 static void compute_argument_addresses (struct arg_data *, rtx, int);
140 static rtx rtx_for_function_call (tree, tree);
141 static void load_register_parameters (struct arg_data *, int, rtx *, int,
143 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
144 enum machine_mode, int, va_list);
145 static int special_function_p (tree, int);
146 static int check_sibcall_argument_overlap_1 (rtx);
147 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
149 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
151 static tree split_complex_values (tree);
152 static tree split_complex_types (tree);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 /* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
167 prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
170 /* Make a valid memory address and copy constants through pseudo-regs,
171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
173 /* If we are using registers for parameters, force the
174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
180 #ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
182 funexp = force_reg (Pmode, funexp);
186 if (static_chain_value != 0)
188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
189 emit_move_insn (static_chain_rtx, static_chain_value);
191 if (REG_P (static_chain_rtx))
192 use_reg (call_fusage, static_chain_rtx);
198 /* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
202 FNDECL is the declaration node of the function. This is given to the
203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
211 STACK_SIZE is the number of bytes of arguments on the stack,
212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236 denote registers used by the called function. */
239 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
250 int already_popped = 0;
251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252 #if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
257 #ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
267 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270 && (n_popped > 0 || stack_size == 0))
272 rtx n_pop = GEN_INT (n_popped);
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
279 pat = GEN_SIBCALL_VALUE_POP (valreg,
280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285 rounded_stack_size_rtx, next_arg_reg, n_pop);
287 emit_call_insn (pat);
293 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
298 #if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
300 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
302 if (HAVE_call_pop && HAVE_call_value_pop)
305 rtx n_pop = GEN_INT (n_popped);
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
312 pat = GEN_CALL_VALUE_POP (valreg,
313 gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317 rounded_stack_size_rtx, next_arg_reg, n_pop);
319 emit_call_insn (pat);
325 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
342 #if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
346 emit_call_insn (GEN_CALL_VALUE (valreg,
347 gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352 rounded_stack_size_rtx, next_arg_reg,
353 struct_value_size_rtx));
359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
362 /* Mark memory as used for "pure" function call. */
363 if (ecf_flags & ECF_PURE)
367 gen_rtx_USE (VOIDmode,
368 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
371 /* Put the register usage information there. */
372 add_function_usage_to (call_insn, call_fusage);
374 /* If this is a const call, then set the insn's unchanging bit. */
375 if (ecf_flags & (ECF_CONST | ECF_PURE))
376 CONST_OR_PURE_CALL_P (call_insn) = 1;
378 /* If this call can't throw, attach a REG_EH_REGION reg note to that
380 if (ecf_flags & ECF_NOTHROW)
381 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
382 REG_NOTES (call_insn));
385 int rn = lookup_stmt_eh_region (fntree);
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
390 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
391 REG_NOTES (call_insn));
392 note_current_region_may_contain_throw ();
395 if (ecf_flags & ECF_NORETURN)
396 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
397 REG_NOTES (call_insn));
398 if (ecf_flags & ECF_ALWAYS_RETURN)
399 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
400 REG_NOTES (call_insn));
402 if (ecf_flags & ECF_RETURNS_TWICE)
404 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
405 REG_NOTES (call_insn));
406 current_function_calls_setjmp = 1;
409 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
411 /* Restore this now, so that we do defer pops for this call's args
412 if the context of the call as a whole permits. */
413 inhibit_defer_pop = old_inhibit_defer_pop;
418 CALL_INSN_FUNCTION_USAGE (call_insn)
419 = gen_rtx_EXPR_LIST (VOIDmode,
420 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
421 CALL_INSN_FUNCTION_USAGE (call_insn));
422 rounded_stack_size -= n_popped;
423 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
424 stack_pointer_delta -= n_popped;
427 if (!ACCUMULATE_OUTGOING_ARGS)
429 /* If returning from the subroutine does not automatically pop the args,
430 we need an instruction to pop them sooner or later.
431 Perhaps do it now; perhaps just record how much space to pop later.
433 If returning from the subroutine does pop the args, indicate that the
434 stack pointer will be changed. */
436 if (rounded_stack_size != 0)
438 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
439 /* Just pretend we did the pop. */
440 stack_pointer_delta -= rounded_stack_size;
441 else if (flag_defer_pop && inhibit_defer_pop == 0
442 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
443 pending_stack_adjust += rounded_stack_size;
445 adjust_stack (rounded_stack_size_rtx);
448 /* When we accumulate outgoing args, we must avoid any stack manipulations.
449 Restore the stack pointer to its original value now. Usually
450 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
451 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
452 popping variants of functions exist as well.
454 ??? We may optimize similar to defer_pop above, but it is
455 probably not worthwhile.
457 ??? It will be worthwhile to enable combine_stack_adjustments even for
460 anti_adjust_stack (GEN_INT (n_popped));
463 /* Determine if the function identified by NAME and FNDECL is one with
464 special properties we wish to know about.
466 For example, if the function might return more than one time (setjmp), then
467 set RETURNS_TWICE to a nonzero value.
469 Similarly set LONGJMP for if the function is in the longjmp family.
471 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
472 space from the stack such as alloca. */
475 special_function_p (tree fndecl, int flags)
477 if (fndecl && DECL_NAME (fndecl)
478 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
479 /* Exclude functions not at the file scope, or not `extern',
480 since they are not the magic functions we would otherwise
482 FIXME: this should be handled with attributes, not with this
483 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
484 because you can declare fork() inside a function if you
486 && (DECL_CONTEXT (fndecl) == NULL_TREE
487 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
488 && TREE_PUBLIC (fndecl))
490 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
491 const char *tname = name;
493 /* We assume that alloca will always be called by name. It
494 makes no sense to pass it as a pointer-to-function to
495 anything that does not understand its behavior. */
496 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
498 && ! strcmp (name, "alloca"))
499 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
501 && ! strcmp (name, "__builtin_alloca"))))
502 flags |= ECF_MAY_BE_ALLOCA;
504 /* Disregard prefix _, __ or __x. */
507 if (name[1] == '_' && name[2] == 'x')
509 else if (name[1] == '_')
518 && (! strcmp (tname, "setjmp")
519 || ! strcmp (tname, "setjmp_syscall")))
521 && ! strcmp (tname, "sigsetjmp"))
523 && ! strcmp (tname, "savectx")))
524 flags |= ECF_RETURNS_TWICE;
527 && ! strcmp (tname, "siglongjmp"))
528 flags |= ECF_NORETURN;
530 else if ((tname[0] == 'q' && tname[1] == 's'
531 && ! strcmp (tname, "qsetjmp"))
532 || (tname[0] == 'v' && tname[1] == 'f'
533 && ! strcmp (tname, "vfork")))
534 flags |= ECF_RETURNS_TWICE;
536 else if (tname[0] == 'l' && tname[1] == 'o'
537 && ! strcmp (tname, "longjmp"))
538 flags |= ECF_NORETURN;
544 /* Return nonzero when tree represent call to longjmp. */
547 setjmp_call_p (tree fndecl)
549 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
552 /* Return true when exp contains alloca call. */
554 alloca_call_p (tree exp)
556 if (TREE_CODE (exp) == CALL_EXPR
557 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
558 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
560 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
561 0) & ECF_MAY_BE_ALLOCA))
566 /* Detect flags (function attributes) from the function decl or type node. */
569 flags_from_decl_or_type (tree exp)
576 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
577 type = TREE_TYPE (exp);
581 if (i->pure_function)
582 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
583 if (i->const_function)
584 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
587 /* The function exp may have the `malloc' attribute. */
588 if (DECL_IS_MALLOC (exp))
591 /* The function exp may have the `pure' attribute. */
592 if (DECL_IS_PURE (exp))
593 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
595 if (TREE_NOTHROW (exp))
596 flags |= ECF_NOTHROW;
598 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
599 flags |= ECF_LIBCALL_BLOCK | ECF_CONST;
601 flags = special_function_p (exp, flags);
603 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
606 if (TREE_THIS_VOLATILE (exp))
607 flags |= ECF_NORETURN;
609 /* Mark if the function returns with the stack pointer depressed. We
610 cannot consider it pure or constant in that case. */
611 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
613 flags |= ECF_SP_DEPRESSED;
614 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
620 /* Detect flags from a CALL_EXPR. */
623 call_expr_flags (tree t)
626 tree decl = get_callee_fndecl (t);
629 flags = flags_from_decl_or_type (decl);
632 t = TREE_TYPE (TREE_OPERAND (t, 0));
633 if (t && TREE_CODE (t) == POINTER_TYPE)
634 flags = flags_from_decl_or_type (TREE_TYPE (t));
642 /* Precompute all register parameters as described by ARGS, storing values
643 into fields within the ARGS array.
645 NUM_ACTUALS indicates the total number elements in the ARGS array.
647 Set REG_PARM_SEEN if we encounter a register parameter. */
650 precompute_register_parameters (int num_actuals, struct arg_data *args,
657 for (i = 0; i < num_actuals; i++)
658 if (args[i].reg != 0 && ! args[i].pass_on_stack)
662 if (args[i].value == 0)
665 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
667 preserve_temp_slots (args[i].value);
671 /* If the value is a non-legitimate constant, force it into a
672 pseudo now. TLS symbols sometimes need a call to resolve. */
673 if (CONSTANT_P (args[i].value)
674 && !LEGITIMATE_CONSTANT_P (args[i].value))
675 args[i].value = force_reg (args[i].mode, args[i].value);
677 /* If we are to promote the function arg to a wider mode,
680 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
682 = convert_modes (args[i].mode,
683 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
684 args[i].value, args[i].unsignedp);
686 /* If we're going to have to load the value by parts, pull the
687 parts into pseudos. The part extraction process can involve
688 non-trivial computation. */
689 if (GET_CODE (args[i].reg) == PARALLEL)
691 tree type = TREE_TYPE (args[i].tree_value);
692 args[i].parallel_value
693 = emit_group_load_into_temps (args[i].reg, args[i].value,
694 type, int_size_in_bytes (type));
697 /* If the value is expensive, and we are inside an appropriately
698 short loop, put the value into a pseudo and then put the pseudo
701 For small register classes, also do this if this call uses
702 register parameters. This is to avoid reload conflicts while
703 loading the parameters registers. */
705 else if ((! (REG_P (args[i].value)
706 || (GET_CODE (args[i].value) == SUBREG
707 && REG_P (SUBREG_REG (args[i].value)))))
708 && args[i].mode != BLKmode
709 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
710 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
712 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
716 #ifdef REG_PARM_STACK_SPACE
718 /* The argument list is the property of the called routine and it
719 may clobber it. If the fixed area has been used for previous
720 parameters, we must save and restore it. */
723 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
728 /* Compute the boundary of the area that needs to be saved, if any. */
729 high = reg_parm_stack_space;
730 #ifdef ARGS_GROW_DOWNWARD
733 if (high > highest_outgoing_arg_in_use)
734 high = highest_outgoing_arg_in_use;
736 for (low = 0; low < high; low++)
737 if (stack_usage_map[low] != 0)
740 enum machine_mode save_mode;
745 while (stack_usage_map[--high] == 0)
749 *high_to_save = high;
751 num_to_save = high - low + 1;
752 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
754 /* If we don't have the required alignment, must do this
756 if ((low & (MIN (GET_MODE_SIZE (save_mode),
757 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
760 #ifdef ARGS_GROW_DOWNWARD
765 stack_area = gen_rtx_MEM (save_mode,
766 memory_address (save_mode,
767 plus_constant (argblock,
770 set_mem_align (stack_area, PARM_BOUNDARY);
771 if (save_mode == BLKmode)
773 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
774 emit_block_move (validize_mem (save_area), stack_area,
775 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
779 save_area = gen_reg_rtx (save_mode);
780 emit_move_insn (save_area, stack_area);
790 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
792 enum machine_mode save_mode = GET_MODE (save_area);
796 #ifdef ARGS_GROW_DOWNWARD
797 delta = -high_to_save;
801 stack_area = gen_rtx_MEM (save_mode,
802 memory_address (save_mode,
803 plus_constant (argblock, delta)));
804 set_mem_align (stack_area, PARM_BOUNDARY);
806 if (save_mode != BLKmode)
807 emit_move_insn (stack_area, save_area);
809 emit_block_move (stack_area, validize_mem (save_area),
810 GEN_INT (high_to_save - low_to_save + 1),
813 #endif /* REG_PARM_STACK_SPACE */
815 /* If any elements in ARGS refer to parameters that are to be passed in
816 registers, but not in memory, and whose alignment does not permit a
817 direct copy into registers. Copy the values into a group of pseudos
818 which we will later copy into the appropriate hard registers.
820 Pseudos for each unaligned argument will be stored into the array
821 args[argnum].aligned_regs. The caller is responsible for deallocating
822 the aligned_regs array if it is nonzero. */
825 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
829 for (i = 0; i < num_actuals; i++)
830 if (args[i].reg != 0 && ! args[i].pass_on_stack
831 && args[i].mode == BLKmode
832 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
833 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
835 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
836 int endian_correction = 0;
840 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
841 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
845 args[i].n_aligned_regs
846 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
849 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
851 /* Structures smaller than a word are normally aligned to the
852 least significant byte. On a BYTES_BIG_ENDIAN machine,
853 this means we must skip the empty high order bytes when
854 calculating the bit offset. */
855 if (bytes < UNITS_PER_WORD
856 #ifdef BLOCK_REG_PADDING
857 && (BLOCK_REG_PADDING (args[i].mode,
858 TREE_TYPE (args[i].tree_value), 1)
864 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
866 for (j = 0; j < args[i].n_aligned_regs; j++)
868 rtx reg = gen_reg_rtx (word_mode);
869 rtx word = operand_subword_force (args[i].value, j, BLKmode);
870 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
872 args[i].aligned_regs[j] = reg;
873 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
874 word_mode, word_mode);
876 /* There is no need to restrict this code to loading items
877 in TYPE_ALIGN sized hunks. The bitfield instructions can
878 load up entire word sized registers efficiently.
880 ??? This may not be needed anymore.
881 We use to emit a clobber here but that doesn't let later
882 passes optimize the instructions we emit. By storing 0 into
883 the register later passes know the first AND to zero out the
884 bitfield being set in the register is unnecessary. The store
885 of 0 will be deleted as will at least the first AND. */
887 emit_move_insn (reg, const0_rtx);
889 bytes -= bitsize / BITS_PER_UNIT;
890 store_bit_field (reg, bitsize, endian_correction, word_mode,
896 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
899 NUM_ACTUALS is the total number of parameters.
901 N_NAMED_ARGS is the total number of named arguments.
903 FNDECL is the tree code for the target of this call (if known)
905 ARGS_SO_FAR holds state needed by the target to know where to place
908 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
909 for arguments which are passed in registers.
911 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
912 and may be modified by this routine.
914 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
915 flags which may may be modified by this routine.
917 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
918 that requires allocation of stack space.
920 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
921 the thunked-to function. */
924 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
925 struct arg_data *args,
926 struct args_size *args_size,
927 int n_named_args ATTRIBUTE_UNUSED,
928 tree actparms, tree fndecl,
929 CUMULATIVE_ARGS *args_so_far,
930 int reg_parm_stack_space,
931 rtx *old_stack_level, int *old_pending_adj,
932 int *must_preallocate, int *ecf_flags,
933 bool *may_tailcall, bool call_from_thunk_p)
935 /* 1 if scanning parms front to back, -1 if scanning back to front. */
938 /* Count arg position in order args appear. */
944 args_size->constant = 0;
947 /* In this loop, we consider args in the order they are written.
948 We fill up ARGS from the front or from the back if necessary
949 so that in any case the first arg to be pushed ends up at the front. */
951 if (PUSH_ARGS_REVERSED)
953 i = num_actuals - 1, inc = -1;
954 /* In this case, must reverse order of args
955 so that we compute and push the last arg first. */
962 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
963 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
965 tree type = TREE_TYPE (TREE_VALUE (p));
967 enum machine_mode mode;
969 args[i].tree_value = TREE_VALUE (p);
971 /* Replace erroneous argument with constant zero. */
972 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
973 args[i].tree_value = integer_zero_node, type = integer_type_node;
975 /* If TYPE is a transparent union, pass things the way we would
976 pass the first field of the union. We have already verified that
977 the modes are the same. */
978 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
979 type = TREE_TYPE (TYPE_FIELDS (type));
981 /* Decide where to pass this arg.
983 args[i].reg is nonzero if all or part is passed in registers.
985 args[i].partial is nonzero if part but not all is passed in registers,
986 and the exact value says how many bytes are passed in registers.
988 args[i].pass_on_stack is nonzero if the argument must at least be
989 computed on the stack. It may then be loaded back into registers
990 if args[i].reg is nonzero.
992 These decisions are driven by the FUNCTION_... macros and must agree
993 with those made by function.c. */
995 /* See if this argument should be passed by invisible reference. */
996 if (pass_by_reference (args_so_far, TYPE_MODE (type),
997 type, argpos < n_named_args))
1003 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1004 type, argpos < n_named_args);
1006 /* If we're compiling a thunk, pass through invisible references
1007 instead of making a copy. */
1008 if (call_from_thunk_p
1010 && !TREE_ADDRESSABLE (type)
1011 && (base = get_base_address (args[i].tree_value))
1012 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1014 /* We can't use sibcalls if a callee-copied argument is
1015 stored in the current function's frame. */
1016 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1017 *may_tailcall = false;
1019 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1020 type = TREE_TYPE (args[i].tree_value);
1022 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1026 /* We make a copy of the object and pass the address to the
1027 function being called. */
1030 if (!COMPLETE_TYPE_P (type)
1031 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1032 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1033 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1034 STACK_CHECK_MAX_VAR_SIZE))))
1036 /* This is a variable-sized object. Make space on the stack
1038 rtx size_rtx = expr_size (TREE_VALUE (p));
1040 if (*old_stack_level == 0)
1042 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1043 *old_pending_adj = pending_stack_adjust;
1044 pending_stack_adjust = 0;
1047 copy = gen_rtx_MEM (BLKmode,
1048 allocate_dynamic_stack_space
1049 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1050 set_mem_attributes (copy, type, 1);
1053 copy = assign_temp (type, 0, 1, 0);
1055 store_expr (args[i].tree_value, copy, 0);
1058 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1060 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1063 = build_fold_addr_expr (make_tree (type, copy));
1064 type = TREE_TYPE (args[i].tree_value);
1065 *may_tailcall = false;
1069 mode = TYPE_MODE (type);
1070 unsignedp = TYPE_UNSIGNED (type);
1072 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1073 mode = promote_mode (type, mode, &unsignedp, 1);
1075 args[i].unsignedp = unsignedp;
1076 args[i].mode = mode;
1078 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1079 argpos < n_named_args);
1080 #ifdef FUNCTION_INCOMING_ARG
1081 /* If this is a sibling call and the machine has register windows, the
1082 register window has to be unwinded before calling the routine, so
1083 arguments have to go into the incoming registers. */
1084 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1085 argpos < n_named_args);
1087 args[i].tail_call_reg = args[i].reg;
1092 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1093 argpos < n_named_args);
1095 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1097 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1098 it means that we are to pass this arg in the register(s) designated
1099 by the PARALLEL, but also to pass it in the stack. */
1100 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1101 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1102 args[i].pass_on_stack = 1;
1104 /* If this is an addressable type, we must preallocate the stack
1105 since we must evaluate the object into its final location.
1107 If this is to be passed in both registers and the stack, it is simpler
1109 if (TREE_ADDRESSABLE (type)
1110 || (args[i].pass_on_stack && args[i].reg != 0))
1111 *must_preallocate = 1;
1113 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1114 we cannot consider this function call constant. */
1115 if (TREE_ADDRESSABLE (type))
1116 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1118 /* Compute the stack-size of this argument. */
1119 if (args[i].reg == 0 || args[i].partial != 0
1120 || reg_parm_stack_space > 0
1121 || args[i].pass_on_stack)
1122 locate_and_pad_parm (mode, type,
1123 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1128 args[i].pass_on_stack ? 0 : args[i].partial,
1129 fndecl, args_size, &args[i].locate);
1130 #ifdef BLOCK_REG_PADDING
1132 /* The argument is passed entirely in registers. See at which
1133 end it should be padded. */
1134 args[i].locate.where_pad =
1135 BLOCK_REG_PADDING (mode, type,
1136 int_size_in_bytes (type) <= UNITS_PER_WORD);
1139 /* Update ARGS_SIZE, the total stack space for args so far. */
1141 args_size->constant += args[i].locate.size.constant;
1142 if (args[i].locate.size.var)
1143 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1145 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1146 have been used, etc. */
1148 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1149 argpos < n_named_args);
1153 /* Update ARGS_SIZE to contain the total size for the argument block.
1154 Return the original constant component of the argument block's size.
1156 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1157 for arguments passed in registers. */
1160 compute_argument_block_size (int reg_parm_stack_space,
1161 struct args_size *args_size,
1162 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1164 int unadjusted_args_size = args_size->constant;
1166 /* For accumulate outgoing args mode we don't need to align, since the frame
1167 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1168 backends from generating misaligned frame sizes. */
1169 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1170 preferred_stack_boundary = STACK_BOUNDARY;
1172 /* Compute the actual size of the argument block required. The variable
1173 and constant sizes must be combined, the size may have to be rounded,
1174 and there may be a minimum required size. */
1178 args_size->var = ARGS_SIZE_TREE (*args_size);
1179 args_size->constant = 0;
1181 preferred_stack_boundary /= BITS_PER_UNIT;
1182 if (preferred_stack_boundary > 1)
1184 /* We don't handle this case yet. To handle it correctly we have
1185 to add the delta, round and subtract the delta.
1186 Currently no machine description requires this support. */
1187 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1188 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1191 if (reg_parm_stack_space > 0)
1194 = size_binop (MAX_EXPR, args_size->var,
1195 ssize_int (reg_parm_stack_space));
1197 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1198 /* The area corresponding to register parameters is not to count in
1199 the size of the block we need. So make the adjustment. */
1201 = size_binop (MINUS_EXPR, args_size->var,
1202 ssize_int (reg_parm_stack_space));
1208 preferred_stack_boundary /= BITS_PER_UNIT;
1209 if (preferred_stack_boundary < 1)
1210 preferred_stack_boundary = 1;
1211 args_size->constant = (((args_size->constant
1212 + stack_pointer_delta
1213 + preferred_stack_boundary - 1)
1214 / preferred_stack_boundary
1215 * preferred_stack_boundary)
1216 - stack_pointer_delta);
1218 args_size->constant = MAX (args_size->constant,
1219 reg_parm_stack_space);
1221 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1222 args_size->constant -= reg_parm_stack_space;
1225 return unadjusted_args_size;
1228 /* Precompute parameters as needed for a function call.
1230 FLAGS is mask of ECF_* constants.
1232 NUM_ACTUALS is the number of arguments.
1234 ARGS is an array containing information for each argument; this
1235 routine fills in the INITIAL_VALUE and VALUE fields for each
1236 precomputed argument. */
1239 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1243 /* If this is a libcall, then precompute all arguments so that we do not
1244 get extraneous instructions emitted as part of the libcall sequence. */
1245 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1248 for (i = 0; i < num_actuals; i++)
1250 enum machine_mode mode;
1252 /* If this is an addressable type, we cannot pre-evaluate it. */
1253 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1255 args[i].initial_value = args[i].value
1256 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1258 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1259 if (mode != args[i].mode)
1262 = convert_modes (args[i].mode, mode,
1263 args[i].value, args[i].unsignedp);
1264 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1265 /* CSE will replace this only if it contains args[i].value
1266 pseudo, so convert it down to the declared mode using
1268 if (REG_P (args[i].value)
1269 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1271 args[i].initial_value
1272 = gen_lowpart_SUBREG (mode, args[i].value);
1273 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1274 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1282 /* Given the current state of MUST_PREALLOCATE and information about
1283 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1284 compute and return the final value for MUST_PREALLOCATE. */
1287 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1289 /* See if we have or want to preallocate stack space.
1291 If we would have to push a partially-in-regs parm
1292 before other stack parms, preallocate stack space instead.
1294 If the size of some parm is not a multiple of the required stack
1295 alignment, we must preallocate.
1297 If the total size of arguments that would otherwise create a copy in
1298 a temporary (such as a CALL) is more than half the total argument list
1299 size, preallocation is faster.
1301 Another reason to preallocate is if we have a machine (like the m88k)
1302 where stack alignment is required to be maintained between every
1303 pair of insns, not just when the call is made. However, we assume here
1304 that such machines either do not have push insns (and hence preallocation
1305 would occur anyway) or the problem is taken care of with
1308 if (! must_preallocate)
1310 int partial_seen = 0;
1311 int copy_to_evaluate_size = 0;
1314 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1316 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1318 else if (partial_seen && args[i].reg == 0)
1319 must_preallocate = 1;
1321 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1322 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1323 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1324 || TREE_CODE (args[i].tree_value) == COND_EXPR
1325 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1326 copy_to_evaluate_size
1327 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1330 if (copy_to_evaluate_size * 2 >= args_size->constant
1331 && args_size->constant > 0)
1332 must_preallocate = 1;
1334 return must_preallocate;
1337 /* If we preallocated stack space, compute the address of each argument
1338 and store it into the ARGS array.
1340 We need not ensure it is a valid memory address here; it will be
1341 validized when it is used.
1343 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1346 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1350 rtx arg_reg = argblock;
1351 int i, arg_offset = 0;
1353 if (GET_CODE (argblock) == PLUS)
1354 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1356 for (i = 0; i < num_actuals; i++)
1358 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1359 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1361 unsigned int align, boundary;
1363 /* Skip this parm if it will not be passed on the stack. */
1364 if (! args[i].pass_on_stack && args[i].reg != 0)
1367 if (GET_CODE (offset) == CONST_INT)
1368 addr = plus_constant (arg_reg, INTVAL (offset));
1370 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1372 addr = plus_constant (addr, arg_offset);
1373 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1374 set_mem_attributes (args[i].stack,
1375 TREE_TYPE (args[i].tree_value), 1);
1376 align = BITS_PER_UNIT;
1377 boundary = args[i].locate.boundary;
1378 if (args[i].locate.where_pad != downward)
1380 else if (GET_CODE (offset) == CONST_INT)
1382 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1383 align = align & -align;
1385 set_mem_align (args[i].stack, align);
1387 if (GET_CODE (slot_offset) == CONST_INT)
1388 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1390 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1392 addr = plus_constant (addr, arg_offset);
1393 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1394 set_mem_attributes (args[i].stack_slot,
1395 TREE_TYPE (args[i].tree_value), 1);
1396 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1398 /* Function incoming arguments may overlap with sibling call
1399 outgoing arguments and we cannot allow reordering of reads
1400 from function arguments with stores to outgoing arguments
1401 of sibling calls. */
1402 set_mem_alias_set (args[i].stack, 0);
1403 set_mem_alias_set (args[i].stack_slot, 0);
1408 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1409 in a call instruction.
1411 FNDECL is the tree node for the target function. For an indirect call
1412 FNDECL will be NULL_TREE.
1414 ADDR is the operand 0 of CALL_EXPR for this call. */
1417 rtx_for_function_call (tree fndecl, tree addr)
1421 /* Get the function to call, in the form of RTL. */
1424 /* If this is the first use of the function, see if we need to
1425 make an external definition for it. */
1426 if (! TREE_USED (fndecl))
1428 assemble_external (fndecl);
1429 TREE_USED (fndecl) = 1;
1432 /* Get a SYMBOL_REF rtx for the function address. */
1433 funexp = XEXP (DECL_RTL (fndecl), 0);
1436 /* Generate an rtx (probably a pseudo-register) for the address. */
1439 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1440 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1445 /* Do the register loads required for any wholly-register parms or any
1446 parms which are passed both on the stack and in a register. Their
1447 expressions were already evaluated.
1449 Mark all register-parms as living through the call, putting these USE
1450 insns in the CALL_INSN_FUNCTION_USAGE field.
1452 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1453 checking, setting *SIBCALL_FAILURE if appropriate. */
1456 load_register_parameters (struct arg_data *args, int num_actuals,
1457 rtx *call_fusage, int flags, int is_sibcall,
1458 int *sibcall_failure)
1462 for (i = 0; i < num_actuals; i++)
1464 rtx reg = ((flags & ECF_SIBCALL)
1465 ? args[i].tail_call_reg : args[i].reg);
1468 int partial = args[i].partial;
1471 rtx before_arg = get_last_insn ();
1472 /* Set to non-negative if must move a word at a time, even if just
1473 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1474 we just use a normal move insn. This value can be zero if the
1475 argument is a zero size structure with no fields. */
1477 if (GET_CODE (reg) == PARALLEL)
1481 gcc_assert (partial % UNITS_PER_WORD == 0);
1482 nregs = partial / UNITS_PER_WORD;
1484 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1486 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1487 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1490 size = GET_MODE_SIZE (args[i].mode);
1492 /* Handle calls that pass values in multiple non-contiguous
1493 locations. The Irix 6 ABI has examples of this. */
1495 if (GET_CODE (reg) == PARALLEL)
1496 emit_group_move (reg, args[i].parallel_value);
1498 /* If simple case, just do move. If normal partial, store_one_arg
1499 has already loaded the register for us. In all other cases,
1500 load the register(s) from memory. */
1502 else if (nregs == -1)
1504 emit_move_insn (reg, args[i].value);
1505 #ifdef BLOCK_REG_PADDING
1506 /* Handle case where we have a value that needs shifting
1507 up to the msb. eg. a QImode value and we're padding
1508 upward on a BYTES_BIG_ENDIAN machine. */
1509 if (size < UNITS_PER_WORD
1510 && (args[i].locate.where_pad
1511 == (BYTES_BIG_ENDIAN ? upward : downward)))
1514 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1516 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1517 report the whole reg as used. Strictly speaking, the
1518 call only uses SIZE bytes at the msb end, but it doesn't
1519 seem worth generating rtl to say that. */
1520 reg = gen_rtx_REG (word_mode, REGNO (reg));
1521 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1522 build_int_cst (NULL_TREE, shift),
1525 emit_move_insn (reg, x);
1530 /* If we have pre-computed the values to put in the registers in
1531 the case of non-aligned structures, copy them in now. */
1533 else if (args[i].n_aligned_regs != 0)
1534 for (j = 0; j < args[i].n_aligned_regs; j++)
1535 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1536 args[i].aligned_regs[j]);
1538 else if (partial == 0 || args[i].pass_on_stack)
1540 rtx mem = validize_mem (args[i].value);
1542 /* Handle a BLKmode that needs shifting. */
1543 if (nregs == 1 && size < UNITS_PER_WORD
1544 #ifdef BLOCK_REG_PADDING
1545 && args[i].locate.where_pad == downward
1551 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1552 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1553 rtx x = gen_reg_rtx (word_mode);
1554 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1555 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1558 emit_move_insn (x, tem);
1559 x = expand_shift (dir, word_mode, x,
1560 build_int_cst (NULL_TREE, shift),
1563 emit_move_insn (ri, x);
1566 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1569 /* When a parameter is a block, and perhaps in other cases, it is
1570 possible that it did a load from an argument slot that was
1571 already clobbered. */
1573 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1574 *sibcall_failure = 1;
1576 /* Handle calls that pass values in multiple non-contiguous
1577 locations. The Irix 6 ABI has examples of this. */
1578 if (GET_CODE (reg) == PARALLEL)
1579 use_group_regs (call_fusage, reg);
1580 else if (nregs == -1)
1581 use_reg (call_fusage, reg);
1583 use_regs (call_fusage, REGNO (reg), nregs);
1588 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1589 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1590 bytes, then we would need to push some additional bytes to pad the
1591 arguments. So, we compute an adjust to the stack pointer for an
1592 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1593 bytes. Then, when the arguments are pushed the stack will be perfectly
1594 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1595 be popped after the call. Returns the adjustment. */
1598 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1599 struct args_size *args_size,
1600 unsigned int preferred_unit_stack_boundary)
1602 /* The number of bytes to pop so that the stack will be
1603 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1604 HOST_WIDE_INT adjustment;
1605 /* The alignment of the stack after the arguments are pushed, if we
1606 just pushed the arguments without adjust the stack here. */
1607 unsigned HOST_WIDE_INT unadjusted_alignment;
1609 unadjusted_alignment
1610 = ((stack_pointer_delta + unadjusted_args_size)
1611 % preferred_unit_stack_boundary);
1613 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1614 as possible -- leaving just enough left to cancel out the
1615 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1616 PENDING_STACK_ADJUST is non-negative, and congruent to
1617 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1619 /* Begin by trying to pop all the bytes. */
1620 unadjusted_alignment
1621 = (unadjusted_alignment
1622 - (pending_stack_adjust % preferred_unit_stack_boundary));
1623 adjustment = pending_stack_adjust;
1624 /* Push enough additional bytes that the stack will be aligned
1625 after the arguments are pushed. */
1626 if (preferred_unit_stack_boundary > 1)
1628 if (unadjusted_alignment > 0)
1629 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1631 adjustment += unadjusted_alignment;
1634 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1635 bytes after the call. The right number is the entire
1636 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1637 by the arguments in the first place. */
1639 = pending_stack_adjust - adjustment + unadjusted_args_size;
1644 /* Scan X expression if it does not dereference any argument slots
1645 we already clobbered by tail call arguments (as noted in stored_args_map
1647 Return nonzero if X expression dereferences such argument slots,
1651 check_sibcall_argument_overlap_1 (rtx x)
1661 code = GET_CODE (x);
1665 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1667 else if (GET_CODE (XEXP (x, 0)) == PLUS
1668 && XEXP (XEXP (x, 0), 0) ==
1669 current_function_internal_arg_pointer
1670 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1671 i = INTVAL (XEXP (XEXP (x, 0), 1));
1675 #ifdef ARGS_GROW_DOWNWARD
1676 i = -i - GET_MODE_SIZE (GET_MODE (x));
1679 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1680 if (i + k < stored_args_map->n_bits
1681 && TEST_BIT (stored_args_map, i + k))
1687 /* Scan all subexpressions. */
1688 fmt = GET_RTX_FORMAT (code);
1689 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1693 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1696 else if (*fmt == 'E')
1698 for (j = 0; j < XVECLEN (x, i); j++)
1699 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1706 /* Scan sequence after INSN if it does not dereference any argument slots
1707 we already clobbered by tail call arguments (as noted in stored_args_map
1708 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1709 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1710 should be 0). Return nonzero if sequence after INSN dereferences such argument
1711 slots, zero otherwise. */
1714 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1718 if (insn == NULL_RTX)
1719 insn = get_insns ();
1721 insn = NEXT_INSN (insn);
1723 for (; insn; insn = NEXT_INSN (insn))
1725 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1728 if (mark_stored_args_map)
1730 #ifdef ARGS_GROW_DOWNWARD
1731 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1733 low = arg->locate.slot_offset.constant;
1736 for (high = low + arg->locate.size.constant; low < high; low++)
1737 SET_BIT (stored_args_map, low);
1739 return insn != NULL_RTX;
1742 /* Given that a function returns a value of mode MODE at the most
1743 significant end of hard register VALUE, shift VALUE left or right
1744 as specified by LEFT_P. Return true if some action was needed. */
1747 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1749 HOST_WIDE_INT shift;
1751 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1752 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1756 /* Use ashr rather than lshr for right shifts. This is for the benefit
1757 of the MIPS port, which requires SImode values to be sign-extended
1758 when stored in 64-bit registers. */
1759 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1760 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1765 /* Remove all REG_EQUIV notes found in the insn chain. */
1768 purge_reg_equiv_notes (void)
1772 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1776 rtx note = find_reg_note (insn, REG_EQUIV, 0);
1779 /* Remove the note and keep looking at the notes for
1781 remove_note (insn, note);
1789 /* Generate all the code for a function call
1790 and return an rtx for its value.
1791 Store the value in TARGET (specified as an rtx) if convenient.
1792 If the value is stored in TARGET then TARGET is returned.
1793 If IGNORE is nonzero, then we ignore the value of the function call. */
1796 expand_call (tree exp, rtx target, int ignore)
1798 /* Nonzero if we are currently expanding a call. */
1799 static int currently_expanding_call = 0;
1801 /* List of actual parameters. */
1802 tree actparms = TREE_OPERAND (exp, 1);
1803 /* RTX for the function to be called. */
1805 /* Sequence of insns to perform a normal "call". */
1806 rtx normal_call_insns = NULL_RTX;
1807 /* Sequence of insns to perform a tail "call". */
1808 rtx tail_call_insns = NULL_RTX;
1809 /* Data type of the function. */
1811 tree type_arg_types;
1812 /* Declaration of the function being called,
1813 or 0 if the function is computed (not known by name). */
1815 /* The type of the function being called. */
1817 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1820 /* Register in which non-BLKmode value will be returned,
1821 or 0 if no value or if value is BLKmode. */
1823 /* Address where we should return a BLKmode value;
1824 0 if value not BLKmode. */
1825 rtx structure_value_addr = 0;
1826 /* Nonzero if that address is being passed by treating it as
1827 an extra, implicit first parameter. Otherwise,
1828 it is passed by being copied directly into struct_value_rtx. */
1829 int structure_value_addr_parm = 0;
1830 /* Size of aggregate value wanted, or zero if none wanted
1831 or if we are using the non-reentrant PCC calling convention
1832 or expecting the value in registers. */
1833 HOST_WIDE_INT struct_value_size = 0;
1834 /* Nonzero if called function returns an aggregate in memory PCC style,
1835 by returning the address of where to find it. */
1836 int pcc_struct_value = 0;
1837 rtx struct_value = 0;
1839 /* Number of actual parameters in this call, including struct value addr. */
1841 /* Number of named args. Args after this are anonymous ones
1842 and they must all go on the stack. */
1845 /* Vector of information about each argument.
1846 Arguments are numbered in the order they will be pushed,
1847 not the order they are written. */
1848 struct arg_data *args;
1850 /* Total size in bytes of all the stack-parms scanned so far. */
1851 struct args_size args_size;
1852 struct args_size adjusted_args_size;
1853 /* Size of arguments before any adjustments (such as rounding). */
1854 int unadjusted_args_size;
1855 /* Data on reg parms scanned so far. */
1856 CUMULATIVE_ARGS args_so_far;
1857 /* Nonzero if a reg parm has been scanned. */
1859 /* Nonzero if this is an indirect function call. */
1861 /* Nonzero if we must avoid push-insns in the args for this call.
1862 If stack space is allocated for register parameters, but not by the
1863 caller, then it is preallocated in the fixed part of the stack frame.
1864 So the entire argument block must then be preallocated (i.e., we
1865 ignore PUSH_ROUNDING in that case). */
1867 int must_preallocate = !PUSH_ARGS;
1869 /* Size of the stack reserved for parameter registers. */
1870 int reg_parm_stack_space = 0;
1872 /* Address of space preallocated for stack parms
1873 (on machines that lack push insns), or 0 if space not preallocated. */
1876 /* Mask of ECF_ flags. */
1878 #ifdef REG_PARM_STACK_SPACE
1879 /* Define the boundary of the register parm stack space that needs to be
1881 int low_to_save, high_to_save;
1882 rtx save_area = 0; /* Place that it is saved */
1885 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1886 char *initial_stack_usage_map = stack_usage_map;
1888 int old_stack_allocated;
1890 /* State variables to track stack modifications. */
1891 rtx old_stack_level = 0;
1892 int old_stack_arg_under_construction = 0;
1893 int old_pending_adj = 0;
1894 int old_inhibit_defer_pop = inhibit_defer_pop;
1896 /* Some stack pointer alterations we make are performed via
1897 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1898 which we then also need to save/restore along the way. */
1899 int old_stack_pointer_delta = 0;
1902 tree p = TREE_OPERAND (exp, 0);
1903 tree addr = TREE_OPERAND (exp, 0);
1905 /* The alignment of the stack, in bits. */
1906 unsigned HOST_WIDE_INT preferred_stack_boundary;
1907 /* The alignment of the stack, in bytes. */
1908 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1909 /* The static chain value to use for this call. */
1910 rtx static_chain_value;
1911 /* See if this is "nothrow" function call. */
1912 if (TREE_NOTHROW (exp))
1913 flags |= ECF_NOTHROW;
1915 /* See if we can find a DECL-node for the actual function, and get the
1916 function attributes (flags) from the function decl or type node. */
1917 fndecl = get_callee_fndecl (exp);
1920 fntype = TREE_TYPE (fndecl);
1921 flags |= flags_from_decl_or_type (fndecl);
1925 fntype = TREE_TYPE (TREE_TYPE (p));
1926 flags |= flags_from_decl_or_type (fntype);
1929 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
1931 /* Warn if this value is an aggregate type,
1932 regardless of which calling convention we are using for it. */
1933 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1934 warning ("function call has aggregate value");
1936 /* If the result of a pure or const function call is ignored (or void),
1937 and none of its arguments are volatile, we can avoid expanding the
1938 call and just evaluate the arguments for side-effects. */
1939 if ((flags & (ECF_CONST | ECF_PURE))
1940 && (ignore || target == const0_rtx
1941 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
1943 bool volatilep = false;
1946 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1947 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
1955 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1956 expand_expr (TREE_VALUE (arg), const0_rtx,
1957 VOIDmode, EXPAND_NORMAL);
1962 #ifdef REG_PARM_STACK_SPACE
1963 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1966 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1967 if (reg_parm_stack_space > 0 && PUSH_ARGS)
1968 must_preallocate = 1;
1971 /* Set up a place to return a structure. */
1973 /* Cater to broken compilers. */
1974 if (aggregate_value_p (exp, fndecl))
1976 /* This call returns a big structure. */
1977 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1979 #ifdef PCC_STATIC_STRUCT_RETURN
1981 pcc_struct_value = 1;
1983 #else /* not PCC_STATIC_STRUCT_RETURN */
1985 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1987 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
1989 /* The structure value address arg is already in actparms.
1990 Pull it out. It might be nice to just leave it there, but
1991 we need to set structure_value_addr. */
1992 tree return_arg = TREE_VALUE (actparms);
1993 actparms = TREE_CHAIN (actparms);
1994 structure_value_addr = expand_expr (return_arg, NULL_RTX,
1995 VOIDmode, EXPAND_NORMAL);
1998 else if (target && MEM_P (target))
1999 structure_value_addr = XEXP (target, 0);
2003 /* For variable-sized objects, we must be called with a target
2004 specified. If we were to allocate space on the stack here,
2005 we would have no way of knowing when to free it. */
2006 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2008 mark_temp_addr_taken (d);
2009 structure_value_addr = XEXP (d, 0);
2013 #endif /* not PCC_STATIC_STRUCT_RETURN */
2016 /* Figure out the amount to which the stack should be aligned. */
2017 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2020 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2021 if (i && i->preferred_incoming_stack_boundary)
2022 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2025 /* Operand 0 is a pointer-to-function; get the type of the function. */
2026 funtype = TREE_TYPE (addr);
2027 gcc_assert (POINTER_TYPE_P (funtype));
2028 funtype = TREE_TYPE (funtype);
2030 /* Munge the tree to split complex arguments into their imaginary
2032 if (targetm.calls.split_complex_arg)
2034 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2035 actparms = split_complex_values (actparms);
2038 type_arg_types = TYPE_ARG_TYPES (funtype);
2040 if (flags & ECF_MAY_BE_ALLOCA)
2041 current_function_calls_alloca = 1;
2043 /* If struct_value_rtx is 0, it means pass the address
2044 as if it were an extra parameter. */
2045 if (structure_value_addr && struct_value == 0)
2047 /* If structure_value_addr is a REG other than
2048 virtual_outgoing_args_rtx, we can use always use it. If it
2049 is not a REG, we must always copy it into a register.
2050 If it is virtual_outgoing_args_rtx, we must copy it to another
2051 register in some cases. */
2052 rtx temp = (!REG_P (structure_value_addr)
2053 || (ACCUMULATE_OUTGOING_ARGS
2054 && stack_arg_under_construction
2055 && structure_value_addr == virtual_outgoing_args_rtx)
2056 ? copy_addr_to_reg (convert_memory_address
2057 (Pmode, structure_value_addr))
2058 : structure_value_addr);
2061 = tree_cons (error_mark_node,
2062 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2065 structure_value_addr_parm = 1;
2068 /* Count the arguments and set NUM_ACTUALS. */
2069 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2072 /* Compute number of named args.
2073 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2075 if (type_arg_types != 0)
2077 = (list_length (type_arg_types)
2078 /* Count the struct value address, if it is passed as a parm. */
2079 + structure_value_addr_parm);
2081 /* If we know nothing, treat all args as named. */
2082 n_named_args = num_actuals;
2084 /* Start updating where the next arg would go.
2086 On some machines (such as the PA) indirect calls have a different
2087 calling convention than normal calls. The fourth argument in
2088 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2090 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2092 /* Now possibly adjust the number of named args.
2093 Normally, don't include the last named arg if anonymous args follow.
2094 We do include the last named arg if
2095 targetm.calls.strict_argument_naming() returns nonzero.
2096 (If no anonymous args follow, the result of list_length is actually
2097 one too large. This is harmless.)
2099 If targetm.calls.pretend_outgoing_varargs_named() returns
2100 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2101 this machine will be able to place unnamed args that were passed
2102 in registers into the stack. So treat all args as named. This
2103 allows the insns emitting for a specific argument list to be
2104 independent of the function declaration.
2106 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2107 we do not have any reliable way to pass unnamed args in
2108 registers, so we must force them into memory. */
2110 if (type_arg_types != 0
2111 && targetm.calls.strict_argument_naming (&args_so_far))
2113 else if (type_arg_types != 0
2114 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2115 /* Don't include the last named arg. */
2118 /* Treat all args as named. */
2119 n_named_args = num_actuals;
2121 /* Make a vector to hold all the information about each arg. */
2122 args = alloca (num_actuals * sizeof (struct arg_data));
2123 memset (args, 0, num_actuals * sizeof (struct arg_data));
2125 /* Build up entries in the ARGS array, compute the size of the
2126 arguments into ARGS_SIZE, etc. */
2127 initialize_argument_information (num_actuals, args, &args_size,
2128 n_named_args, actparms, fndecl,
2129 &args_so_far, reg_parm_stack_space,
2130 &old_stack_level, &old_pending_adj,
2131 &must_preallocate, &flags,
2132 &try_tail_call, CALL_FROM_THUNK_P (exp));
2136 /* If this function requires a variable-sized argument list, don't
2137 try to make a cse'able block for this call. We may be able to
2138 do this eventually, but it is too complicated to keep track of
2139 what insns go in the cse'able block and which don't. */
2141 flags &= ~ECF_LIBCALL_BLOCK;
2142 must_preallocate = 1;
2145 /* Now make final decision about preallocating stack space. */
2146 must_preallocate = finalize_must_preallocate (must_preallocate,
2150 /* If the structure value address will reference the stack pointer, we
2151 must stabilize it. We don't need to do this if we know that we are
2152 not going to adjust the stack pointer in processing this call. */
2154 if (structure_value_addr
2155 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2156 || reg_mentioned_p (virtual_outgoing_args_rtx,
2157 structure_value_addr))
2159 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2160 structure_value_addr = copy_to_reg (structure_value_addr);
2162 /* Tail calls can make things harder to debug, and we've traditionally
2163 pushed these optimizations into -O2. Don't try if we're already
2164 expanding a call, as that means we're an argument. Don't try if
2165 there's cleanups, as we know there's code to follow the call. */
2167 if (currently_expanding_call++ != 0
2168 || !flag_optimize_sibling_calls
2170 || lookup_stmt_eh_region (exp) >= 0)
2173 /* Rest of purposes for tail call optimizations to fail. */
2175 #ifdef HAVE_sibcall_epilogue
2176 !HAVE_sibcall_epilogue
2181 /* Doing sibling call optimization needs some work, since
2182 structure_value_addr can be allocated on the stack.
2183 It does not seem worth the effort since few optimizable
2184 sibling calls will return a structure. */
2185 || structure_value_addr != NULL_RTX
2186 /* Check whether the target is able to optimize the call
2188 || !targetm.function_ok_for_sibcall (fndecl, exp)
2189 /* Functions that do not return exactly once may not be sibcall
2191 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2192 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2193 /* If the called function is nested in the current one, it might access
2194 some of the caller's arguments, but could clobber them beforehand if
2195 the argument areas are shared. */
2196 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2197 /* If this function requires more stack slots than the current
2198 function, we cannot change it into a sibling call. */
2199 || args_size.constant > current_function_args_size
2200 /* If the callee pops its own arguments, then it must pop exactly
2201 the same number of arguments as the current function. */
2202 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2203 != RETURN_POPS_ARGS (current_function_decl,
2204 TREE_TYPE (current_function_decl),
2205 current_function_args_size))
2206 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2209 /* Ensure current function's preferred stack boundary is at least
2210 what we need. We don't have to increase alignment for recursive
2212 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2213 && fndecl != current_function_decl)
2214 cfun->preferred_stack_boundary = preferred_stack_boundary;
2215 if (fndecl == current_function_decl)
2216 cfun->recursive_call_emit = true;
2218 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2220 /* We want to make two insn chains; one for a sibling call, the other
2221 for a normal call. We will select one of the two chains after
2222 initial RTL generation is complete. */
2223 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2225 int sibcall_failure = 0;
2226 /* We want to emit any pending stack adjustments before the tail
2227 recursion "call". That way we know any adjustment after the tail
2228 recursion call can be ignored if we indeed use the tail
2230 int save_pending_stack_adjust = 0;
2231 int save_stack_pointer_delta = 0;
2233 rtx before_call, next_arg_reg;
2237 /* State variables we need to save and restore between
2239 save_pending_stack_adjust = pending_stack_adjust;
2240 save_stack_pointer_delta = stack_pointer_delta;
2243 flags &= ~ECF_SIBCALL;
2245 flags |= ECF_SIBCALL;
2247 /* Other state variables that we must reinitialize each time
2248 through the loop (that are not initialized by the loop itself). */
2252 /* Start a new sequence for the normal call case.
2254 From this point on, if the sibling call fails, we want to set
2255 sibcall_failure instead of continuing the loop. */
2258 /* Don't let pending stack adjusts add up to too much.
2259 Also, do all pending adjustments now if there is any chance
2260 this might be a call to alloca or if we are expanding a sibling
2261 call sequence or if we are calling a function that is to return
2262 with stack pointer depressed.
2263 Also do the adjustments before a throwing call, otherwise
2264 exception handling can fail; PR 19225. */
2265 if (pending_stack_adjust >= 32
2266 || (pending_stack_adjust > 0
2267 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2268 || (pending_stack_adjust > 0
2269 && flag_exceptions && !(flags & ECF_NOTHROW))
2271 do_pending_stack_adjust ();
2273 /* When calling a const function, we must pop the stack args right away,
2274 so that the pop is deleted or moved with the call. */
2275 if (pass && (flags & ECF_LIBCALL_BLOCK))
2278 /* Precompute any arguments as needed. */
2280 precompute_arguments (flags, num_actuals, args);
2282 /* Now we are about to start emitting insns that can be deleted
2283 if a libcall is deleted. */
2284 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2287 adjusted_args_size = args_size;
2288 /* Compute the actual size of the argument block required. The variable
2289 and constant sizes must be combined, the size may have to be rounded,
2290 and there may be a minimum required size. When generating a sibcall
2291 pattern, do not round up, since we'll be re-using whatever space our
2293 unadjusted_args_size
2294 = compute_argument_block_size (reg_parm_stack_space,
2295 &adjusted_args_size,
2297 : preferred_stack_boundary));
2299 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2301 /* The argument block when performing a sibling call is the
2302 incoming argument block. */
2305 argblock = virtual_incoming_args_rtx;
2307 #ifdef STACK_GROWS_DOWNWARD
2308 = plus_constant (argblock, current_function_pretend_args_size);
2310 = plus_constant (argblock, -current_function_pretend_args_size);
2312 stored_args_map = sbitmap_alloc (args_size.constant);
2313 sbitmap_zero (stored_args_map);
2316 /* If we have no actual push instructions, or shouldn't use them,
2317 make space for all args right now. */
2318 else if (adjusted_args_size.var != 0)
2320 if (old_stack_level == 0)
2322 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2323 old_stack_pointer_delta = stack_pointer_delta;
2324 old_pending_adj = pending_stack_adjust;
2325 pending_stack_adjust = 0;
2326 /* stack_arg_under_construction says whether a stack arg is
2327 being constructed at the old stack level. Pushing the stack
2328 gets a clean outgoing argument block. */
2329 old_stack_arg_under_construction = stack_arg_under_construction;
2330 stack_arg_under_construction = 0;
2332 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2336 /* Note that we must go through the motions of allocating an argument
2337 block even if the size is zero because we may be storing args
2338 in the area reserved for register arguments, which may be part of
2341 int needed = adjusted_args_size.constant;
2343 /* Store the maximum argument space used. It will be pushed by
2344 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2347 if (needed > current_function_outgoing_args_size)
2348 current_function_outgoing_args_size = needed;
2350 if (must_preallocate)
2352 if (ACCUMULATE_OUTGOING_ARGS)
2354 /* Since the stack pointer will never be pushed, it is
2355 possible for the evaluation of a parm to clobber
2356 something we have already written to the stack.
2357 Since most function calls on RISC machines do not use
2358 the stack, this is uncommon, but must work correctly.
2360 Therefore, we save any area of the stack that was already
2361 written and that we are using. Here we set up to do this
2362 by making a new stack usage map from the old one. The
2363 actual save will be done by store_one_arg.
2365 Another approach might be to try to reorder the argument
2366 evaluations to avoid this conflicting stack usage. */
2368 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2369 /* Since we will be writing into the entire argument area,
2370 the map must be allocated for its entire size, not just
2371 the part that is the responsibility of the caller. */
2372 needed += reg_parm_stack_space;
2375 #ifdef ARGS_GROW_DOWNWARD
2376 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2379 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2382 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2384 if (initial_highest_arg_in_use)
2385 memcpy (stack_usage_map, initial_stack_usage_map,
2386 initial_highest_arg_in_use);
2388 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2389 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2390 (highest_outgoing_arg_in_use
2391 - initial_highest_arg_in_use));
2394 /* The address of the outgoing argument list must not be
2395 copied to a register here, because argblock would be left
2396 pointing to the wrong place after the call to
2397 allocate_dynamic_stack_space below. */
2399 argblock = virtual_outgoing_args_rtx;
2403 if (inhibit_defer_pop == 0)
2405 /* Try to reuse some or all of the pending_stack_adjust
2406 to get this space. */
2408 = (combine_pending_stack_adjustment_and_call
2409 (unadjusted_args_size,
2410 &adjusted_args_size,
2411 preferred_unit_stack_boundary));
2413 /* combine_pending_stack_adjustment_and_call computes
2414 an adjustment before the arguments are allocated.
2415 Account for them and see whether or not the stack
2416 needs to go up or down. */
2417 needed = unadjusted_args_size - needed;
2421 /* We're releasing stack space. */
2422 /* ??? We can avoid any adjustment at all if we're
2423 already aligned. FIXME. */
2424 pending_stack_adjust = -needed;
2425 do_pending_stack_adjust ();
2429 /* We need to allocate space. We'll do that in
2430 push_block below. */
2431 pending_stack_adjust = 0;
2434 /* Special case this because overhead of `push_block' in
2435 this case is non-trivial. */
2437 argblock = virtual_outgoing_args_rtx;
2440 argblock = push_block (GEN_INT (needed), 0, 0);
2441 #ifdef ARGS_GROW_DOWNWARD
2442 argblock = plus_constant (argblock, needed);
2446 /* We only really need to call `copy_to_reg' in the case
2447 where push insns are going to be used to pass ARGBLOCK
2448 to a function call in ARGS. In that case, the stack
2449 pointer changes value from the allocation point to the
2450 call point, and hence the value of
2451 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2452 as well always do it. */
2453 argblock = copy_to_reg (argblock);
2458 if (ACCUMULATE_OUTGOING_ARGS)
2460 /* The save/restore code in store_one_arg handles all
2461 cases except one: a constructor call (including a C
2462 function returning a BLKmode struct) to initialize
2464 if (stack_arg_under_construction)
2466 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2467 rtx push_size = GEN_INT (reg_parm_stack_space
2468 + adjusted_args_size.constant);
2470 rtx push_size = GEN_INT (adjusted_args_size.constant);
2472 if (old_stack_level == 0)
2474 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2476 old_stack_pointer_delta = stack_pointer_delta;
2477 old_pending_adj = pending_stack_adjust;
2478 pending_stack_adjust = 0;
2479 /* stack_arg_under_construction says whether a stack
2480 arg is being constructed at the old stack level.
2481 Pushing the stack gets a clean outgoing argument
2483 old_stack_arg_under_construction
2484 = stack_arg_under_construction;
2485 stack_arg_under_construction = 0;
2486 /* Make a new map for the new argument list. */
2487 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2488 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2489 highest_outgoing_arg_in_use = 0;
2491 allocate_dynamic_stack_space (push_size, NULL_RTX,
2495 /* If argument evaluation might modify the stack pointer,
2496 copy the address of the argument list to a register. */
2497 for (i = 0; i < num_actuals; i++)
2498 if (args[i].pass_on_stack)
2500 argblock = copy_addr_to_reg (argblock);
2505 compute_argument_addresses (args, argblock, num_actuals);
2507 /* If we push args individually in reverse order, perform stack alignment
2508 before the first push (the last arg). */
2509 if (PUSH_ARGS_REVERSED && argblock == 0
2510 && adjusted_args_size.constant != unadjusted_args_size)
2512 /* When the stack adjustment is pending, we get better code
2513 by combining the adjustments. */
2514 if (pending_stack_adjust
2515 && ! (flags & ECF_LIBCALL_BLOCK)
2516 && ! inhibit_defer_pop)
2518 pending_stack_adjust
2519 = (combine_pending_stack_adjustment_and_call
2520 (unadjusted_args_size,
2521 &adjusted_args_size,
2522 preferred_unit_stack_boundary));
2523 do_pending_stack_adjust ();
2525 else if (argblock == 0)
2526 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2527 - unadjusted_args_size));
2529 /* Now that the stack is properly aligned, pops can't safely
2530 be deferred during the evaluation of the arguments. */
2533 funexp = rtx_for_function_call (fndecl, addr);
2535 /* Figure out the register where the value, if any, will come back. */
2537 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2538 && ! structure_value_addr)
2540 if (pcc_struct_value)
2541 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2542 fndecl, (pass == 0));
2544 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2547 /* Precompute all register parameters. It isn't safe to compute anything
2548 once we have started filling any specific hard regs. */
2549 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2551 if (TREE_OPERAND (exp, 2))
2552 static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
2553 NULL_RTX, VOIDmode, 0);
2555 static_chain_value = 0;
2557 #ifdef REG_PARM_STACK_SPACE
2558 /* Save the fixed argument area if it's part of the caller's frame and
2559 is clobbered by argument setup for this call. */
2560 if (ACCUMULATE_OUTGOING_ARGS && pass)
2561 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2562 &low_to_save, &high_to_save);
2565 /* Now store (and compute if necessary) all non-register parms.
2566 These come before register parms, since they can require block-moves,
2567 which could clobber the registers used for register parms.
2568 Parms which have partial registers are not stored here,
2569 but we do preallocate space here if they want that. */
2571 for (i = 0; i < num_actuals; i++)
2572 if (args[i].reg == 0 || args[i].pass_on_stack)
2574 rtx before_arg = get_last_insn ();
2576 if (store_one_arg (&args[i], argblock, flags,
2577 adjusted_args_size.var != 0,
2578 reg_parm_stack_space)
2580 && check_sibcall_argument_overlap (before_arg,
2582 sibcall_failure = 1;
2584 if (flags & ECF_CONST
2586 && args[i].value == args[i].stack)
2587 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2588 gen_rtx_USE (VOIDmode,
2593 /* If we have a parm that is passed in registers but not in memory
2594 and whose alignment does not permit a direct copy into registers,
2595 make a group of pseudos that correspond to each register that we
2597 if (STRICT_ALIGNMENT)
2598 store_unaligned_arguments_into_pseudos (args, num_actuals);
2600 /* Now store any partially-in-registers parm.
2601 This is the last place a block-move can happen. */
2603 for (i = 0; i < num_actuals; i++)
2604 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2606 rtx before_arg = get_last_insn ();
2608 if (store_one_arg (&args[i], argblock, flags,
2609 adjusted_args_size.var != 0,
2610 reg_parm_stack_space)
2612 && check_sibcall_argument_overlap (before_arg,
2614 sibcall_failure = 1;
2617 /* If we pushed args in forward order, perform stack alignment
2618 after pushing the last arg. */
2619 if (!PUSH_ARGS_REVERSED && argblock == 0)
2620 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2621 - unadjusted_args_size));
2623 /* If register arguments require space on the stack and stack space
2624 was not preallocated, allocate stack space here for arguments
2625 passed in registers. */
2626 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2627 if (!ACCUMULATE_OUTGOING_ARGS
2628 && must_preallocate == 0 && reg_parm_stack_space > 0)
2629 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2632 /* Pass the function the address in which to return a
2634 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2636 structure_value_addr
2637 = convert_memory_address (Pmode, structure_value_addr);
2638 emit_move_insn (struct_value,
2640 force_operand (structure_value_addr,
2643 if (REG_P (struct_value))
2644 use_reg (&call_fusage, struct_value);
2647 funexp = prepare_call_address (funexp, static_chain_value,
2648 &call_fusage, reg_parm_seen, pass == 0);
2650 load_register_parameters (args, num_actuals, &call_fusage, flags,
2651 pass == 0, &sibcall_failure);
2653 /* Save a pointer to the last insn before the call, so that we can
2654 later safely search backwards to find the CALL_INSN. */
2655 before_call = get_last_insn ();
2657 /* Set up next argument register. For sibling calls on machines
2658 with register windows this should be the incoming register. */
2659 #ifdef FUNCTION_INCOMING_ARG
2661 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2665 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2668 /* All arguments and registers used for the call must be set up by
2671 /* Stack must be properly aligned now. */
2673 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2675 /* Generate the actual call instruction. */
2676 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2677 adjusted_args_size.constant, struct_value_size,
2678 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2679 flags, & args_so_far);
2681 /* If a non-BLKmode value is returned at the most significant end
2682 of a register, shift the register right by the appropriate amount
2683 and update VALREG accordingly. BLKmode values are handled by the
2684 group load/store machinery below. */
2685 if (!structure_value_addr
2686 && !pcc_struct_value
2687 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2688 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2690 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2691 sibcall_failure = 1;
2692 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2695 /* If call is cse'able, make appropriate pair of reg-notes around it.
2696 Test valreg so we don't crash; may safely ignore `const'
2697 if return type is void. Disable for PARALLEL return values, because
2698 we have no way to move such values into a pseudo register. */
2699 if (pass && (flags & ECF_LIBCALL_BLOCK))
2703 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2705 insns = get_insns ();
2707 /* Expansion of block moves possibly introduced a loop that may
2708 not appear inside libcall block. */
2709 for (insn = insns; insn; insn = NEXT_INSN (insn))
2721 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2723 /* Mark the return value as a pointer if needed. */
2724 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2725 mark_reg_pointer (temp,
2726 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2729 if (flag_unsafe_math_optimizations
2731 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2732 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2733 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2734 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2735 note = gen_rtx_fmt_e (SQRT,
2737 args[0].initial_value);
2740 /* Construct an "equal form" for the value which
2741 mentions all the arguments in order as well as
2742 the function name. */
2743 for (i = 0; i < num_actuals; i++)
2744 note = gen_rtx_EXPR_LIST (VOIDmode,
2745 args[i].initial_value, note);
2746 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2748 if (flags & ECF_PURE)
2749 note = gen_rtx_EXPR_LIST (VOIDmode,
2750 gen_rtx_USE (VOIDmode,
2751 gen_rtx_MEM (BLKmode,
2752 gen_rtx_SCRATCH (VOIDmode))),
2755 emit_libcall_block (insns, temp, valreg, note);
2760 else if (pass && (flags & ECF_MALLOC))
2762 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2765 /* The return value from a malloc-like function is a pointer. */
2766 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2767 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2769 emit_move_insn (temp, valreg);
2771 /* The return value from a malloc-like function can not alias
2773 last = get_last_insn ();
2775 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2777 /* Write out the sequence. */
2778 insns = get_insns ();
2784 /* For calls to `setjmp', etc., inform flow.c it should complain
2785 if nonvolatile values are live. For functions that cannot return,
2786 inform flow that control does not fall through. */
2788 if ((flags & ECF_NORETURN) || pass == 0)
2790 /* The barrier must be emitted
2791 immediately after the CALL_INSN. Some ports emit more
2792 than just a CALL_INSN above, so we must search for it here. */
2794 rtx last = get_last_insn ();
2795 while (!CALL_P (last))
2797 last = PREV_INSN (last);
2798 /* There was no CALL_INSN? */
2799 gcc_assert (last != before_call);
2802 emit_barrier_after (last);
2804 /* Stack adjustments after a noreturn call are dead code.
2805 However when NO_DEFER_POP is in effect, we must preserve
2806 stack_pointer_delta. */
2807 if (inhibit_defer_pop == 0)
2809 stack_pointer_delta = old_stack_allocated;
2810 pending_stack_adjust = 0;
2814 /* If value type not void, return an rtx for the value. */
2816 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2818 target = const0_rtx;
2819 else if (structure_value_addr)
2821 if (target == 0 || !MEM_P (target))
2824 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2825 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2826 structure_value_addr));
2827 set_mem_attributes (target, exp, 1);
2830 else if (pcc_struct_value)
2832 /* This is the special C++ case where we need to
2833 know what the true target was. We take care to
2834 never use this value more than once in one expression. */
2835 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2836 copy_to_reg (valreg));
2837 set_mem_attributes (target, exp, 1);
2839 /* Handle calls that return values in multiple non-contiguous locations.
2840 The Irix 6 ABI has examples of this. */
2841 else if (GET_CODE (valreg) == PARALLEL)
2845 /* This will only be assigned once, so it can be readonly. */
2846 tree nt = build_qualified_type (TREE_TYPE (exp),
2847 (TYPE_QUALS (TREE_TYPE (exp))
2848 | TYPE_QUAL_CONST));
2850 target = assign_temp (nt, 0, 1, 1);
2853 if (! rtx_equal_p (target, valreg))
2854 emit_group_store (target, valreg, TREE_TYPE (exp),
2855 int_size_in_bytes (TREE_TYPE (exp)));
2857 /* We can not support sibling calls for this case. */
2858 sibcall_failure = 1;
2861 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2862 && GET_MODE (target) == GET_MODE (valreg))
2864 /* TARGET and VALREG cannot be equal at this point because the
2865 latter would not have REG_FUNCTION_VALUE_P true, while the
2866 former would if it were referring to the same register.
2868 If they refer to the same register, this move will be a no-op,
2869 except when function inlining is being done. */
2870 emit_move_insn (target, valreg);
2872 /* If we are setting a MEM, this code must be executed. Since it is
2873 emitted after the call insn, sibcall optimization cannot be
2874 performed in that case. */
2876 sibcall_failure = 1;
2878 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2880 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2882 /* We can not support sibling calls for this case. */
2883 sibcall_failure = 1;
2886 target = copy_to_reg (valreg);
2888 if (targetm.calls.promote_function_return(funtype))
2890 /* If we promoted this return value, make the proper SUBREG.
2891 TARGET might be const0_rtx here, so be careful. */
2893 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2894 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2896 tree type = TREE_TYPE (exp);
2897 int unsignedp = TYPE_UNSIGNED (type);
2899 enum machine_mode pmode;
2901 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
2902 /* If we don't promote as expected, something is wrong. */
2903 gcc_assert (GET_MODE (target) == pmode);
2905 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
2906 && (GET_MODE_SIZE (GET_MODE (target))
2907 > GET_MODE_SIZE (TYPE_MODE (type))))
2909 offset = GET_MODE_SIZE (GET_MODE (target))
2910 - GET_MODE_SIZE (TYPE_MODE (type));
2911 if (! BYTES_BIG_ENDIAN)
2912 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
2913 else if (! WORDS_BIG_ENDIAN)
2914 offset %= UNITS_PER_WORD;
2916 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
2917 SUBREG_PROMOTED_VAR_P (target) = 1;
2918 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
2922 /* If size of args is variable or this was a constructor call for a stack
2923 argument, restore saved stack-pointer value. */
2925 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
2927 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2928 stack_pointer_delta = old_stack_pointer_delta;
2929 pending_stack_adjust = old_pending_adj;
2930 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2931 stack_arg_under_construction = old_stack_arg_under_construction;
2932 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2933 stack_usage_map = initial_stack_usage_map;
2934 sibcall_failure = 1;
2936 else if (ACCUMULATE_OUTGOING_ARGS && pass)
2938 #ifdef REG_PARM_STACK_SPACE
2940 restore_fixed_argument_area (save_area, argblock,
2941 high_to_save, low_to_save);
2944 /* If we saved any argument areas, restore them. */
2945 for (i = 0; i < num_actuals; i++)
2946 if (args[i].save_area)
2948 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2950 = gen_rtx_MEM (save_mode,
2951 memory_address (save_mode,
2952 XEXP (args[i].stack_slot, 0)));
2954 if (save_mode != BLKmode)
2955 emit_move_insn (stack_area, args[i].save_area);
2957 emit_block_move (stack_area, args[i].save_area,
2958 GEN_INT (args[i].locate.size.constant),
2959 BLOCK_OP_CALL_PARM);
2962 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2963 stack_usage_map = initial_stack_usage_map;
2966 /* If this was alloca, record the new stack level for nonlocal gotos.
2967 Check for the handler slots since we might not have a save area
2968 for non-local gotos. */
2970 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
2971 update_nonlocal_goto_save_area ();
2973 /* Free up storage we no longer need. */
2974 for (i = 0; i < num_actuals; ++i)
2975 if (args[i].aligned_regs)
2976 free (args[i].aligned_regs);
2978 insns = get_insns ();
2983 tail_call_insns = insns;
2985 /* Restore the pending stack adjustment now that we have
2986 finished generating the sibling call sequence. */
2988 pending_stack_adjust = save_pending_stack_adjust;
2989 stack_pointer_delta = save_stack_pointer_delta;
2991 /* Prepare arg structure for next iteration. */
2992 for (i = 0; i < num_actuals; i++)
2995 args[i].aligned_regs = 0;
2999 sbitmap_free (stored_args_map);
3003 normal_call_insns = insns;
3005 /* Verify that we've deallocated all the stack we used. */
3006 gcc_assert ((flags & ECF_NORETURN)
3007 || (old_stack_allocated
3008 == stack_pointer_delta - pending_stack_adjust));
3011 /* If something prevents making this a sibling call,
3012 zero out the sequence. */
3013 if (sibcall_failure)
3014 tail_call_insns = NULL_RTX;
3019 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3020 arguments too, as argument area is now clobbered by the call. */
3021 if (tail_call_insns)
3023 emit_insn (tail_call_insns);
3024 cfun->tail_call_emit = true;
3027 emit_insn (normal_call_insns);
3029 currently_expanding_call--;
3031 /* If this function returns with the stack pointer depressed, ensure
3032 this block saves and restores the stack pointer, show it was
3033 changed, and adjust for any outgoing arg space. */
3034 if (flags & ECF_SP_DEPRESSED)
3036 clear_pending_stack_adjust ();
3037 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3038 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3044 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3045 this function's incoming arguments.
3047 At the start of RTL generation we know the only REG_EQUIV notes
3048 in the rtl chain are those for incoming arguments, so we can safely
3049 flush any REG_EQUIV note.
3051 This is (slight) overkill. We could keep track of the highest
3052 argument we clobber and be more selective in removing notes, but it
3053 does not seem to be worth the effort. */
3055 fixup_tail_calls (void)
3057 purge_reg_equiv_notes ();
3060 /* Traverse an argument list in VALUES and expand all complex
3061 arguments into their components. */
3063 split_complex_values (tree values)
3067 /* Before allocating memory, check for the common case of no complex. */
3068 for (p = values; p; p = TREE_CHAIN (p))
3070 tree type = TREE_TYPE (TREE_VALUE (p));
3071 if (type && TREE_CODE (type) == COMPLEX_TYPE
3072 && targetm.calls.split_complex_arg (type))
3078 values = copy_list (values);
3080 for (p = values; p; p = TREE_CHAIN (p))
3082 tree complex_value = TREE_VALUE (p);
3085 complex_type = TREE_TYPE (complex_value);
3089 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3090 && targetm.calls.split_complex_arg (complex_type))
3093 tree real, imag, next;
3095 subtype = TREE_TYPE (complex_type);
3096 complex_value = save_expr (complex_value);
3097 real = build1 (REALPART_EXPR, subtype, complex_value);
3098 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3100 TREE_VALUE (p) = real;
3101 next = TREE_CHAIN (p);
3102 imag = build_tree_list (NULL_TREE, imag);
3103 TREE_CHAIN (p) = imag;
3104 TREE_CHAIN (imag) = next;
3106 /* Skip the newly created node. */
3114 /* Traverse a list of TYPES and expand all complex types into their
3117 split_complex_types (tree types)
3121 /* Before allocating memory, check for the common case of no complex. */
3122 for (p = types; p; p = TREE_CHAIN (p))
3124 tree type = TREE_VALUE (p);
3125 if (TREE_CODE (type) == COMPLEX_TYPE
3126 && targetm.calls.split_complex_arg (type))
3132 types = copy_list (types);
3134 for (p = types; p; p = TREE_CHAIN (p))
3136 tree complex_type = TREE_VALUE (p);
3138 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3139 && targetm.calls.split_complex_arg (complex_type))
3143 /* Rewrite complex type with component type. */
3144 TREE_VALUE (p) = TREE_TYPE (complex_type);
3145 next = TREE_CHAIN (p);
3147 /* Add another component type for the imaginary part. */
3148 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3149 TREE_CHAIN (p) = imag;
3150 TREE_CHAIN (imag) = next;
3152 /* Skip the newly created node. */
3160 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3161 The RETVAL parameter specifies whether return value needs to be saved, other
3162 parameters are documented in the emit_library_call function below. */
3165 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3166 enum libcall_type fn_type,
3167 enum machine_mode outmode, int nargs, va_list p)
3169 /* Total size in bytes of all the stack-parms scanned so far. */
3170 struct args_size args_size;
3171 /* Size of arguments before any adjustments (such as rounding). */
3172 struct args_size original_args_size;
3178 CUMULATIVE_ARGS args_so_far;
3182 enum machine_mode mode;
3185 struct locate_and_pad_arg_data locate;
3189 int old_inhibit_defer_pop = inhibit_defer_pop;
3190 rtx call_fusage = 0;
3193 int pcc_struct_value = 0;
3194 int struct_value_size = 0;
3196 int reg_parm_stack_space = 0;
3199 tree tfom; /* type_for_mode (outmode, 0) */
3201 #ifdef REG_PARM_STACK_SPACE
3202 /* Define the boundary of the register parm stack space that needs to be
3204 int low_to_save, high_to_save;
3205 rtx save_area = 0; /* Place that it is saved. */
3208 /* Size of the stack reserved for parameter registers. */
3209 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3210 char *initial_stack_usage_map = stack_usage_map;
3212 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3214 #ifdef REG_PARM_STACK_SPACE
3215 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3218 /* By default, library functions can not throw. */
3219 flags = ECF_NOTHROW;
3231 case LCT_CONST_MAKE_BLOCK:
3232 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3234 case LCT_PURE_MAKE_BLOCK:
3235 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3238 flags |= ECF_NORETURN;
3241 flags = ECF_NORETURN;
3243 case LCT_ALWAYS_RETURN:
3244 flags = ECF_ALWAYS_RETURN;
3246 case LCT_RETURNS_TWICE:
3247 flags = ECF_RETURNS_TWICE;
3252 /* Ensure current function's preferred stack boundary is at least
3254 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3255 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3257 /* If this kind of value comes back in memory,
3258 decide where in memory it should come back. */
3259 if (outmode != VOIDmode)
3261 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3262 if (aggregate_value_p (tfom, 0))
3264 #ifdef PCC_STATIC_STRUCT_RETURN
3266 = hard_function_value (build_pointer_type (tfom), 0, 0);
3267 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3268 pcc_struct_value = 1;
3270 value = gen_reg_rtx (outmode);
3271 #else /* not PCC_STATIC_STRUCT_RETURN */
3272 struct_value_size = GET_MODE_SIZE (outmode);
3273 if (value != 0 && MEM_P (value))
3276 mem_value = assign_temp (tfom, 0, 1, 1);
3278 /* This call returns a big structure. */
3279 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3283 tfom = void_type_node;
3285 /* ??? Unfinished: must pass the memory address as an argument. */
3287 /* Copy all the libcall-arguments out of the varargs data
3288 and into a vector ARGVEC.
3290 Compute how to pass each argument. We only support a very small subset
3291 of the full argument passing conventions to limit complexity here since
3292 library functions shouldn't have many args. */
3294 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3295 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3297 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3298 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3300 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3303 args_size.constant = 0;
3308 /* Now we are about to start emitting insns that can be deleted
3309 if a libcall is deleted. */
3310 if (flags & ECF_LIBCALL_BLOCK)
3315 /* If there's a structure value address to be passed,
3316 either pass it in the special place, or pass it as an extra argument. */
3317 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3319 rtx addr = XEXP (mem_value, 0);
3323 /* Make sure it is a reasonable operand for a move or push insn. */
3324 if (!REG_P (addr) && !MEM_P (addr)
3325 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3326 addr = force_operand (addr, NULL_RTX);
3328 argvec[count].value = addr;
3329 argvec[count].mode = Pmode;
3330 argvec[count].partial = 0;
3332 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3333 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3334 NULL_TREE, 1) == 0);
3336 locate_and_pad_parm (Pmode, NULL_TREE,
3337 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3340 argvec[count].reg != 0,
3342 0, NULL_TREE, &args_size, &argvec[count].locate);
3344 if (argvec[count].reg == 0 || argvec[count].partial != 0
3345 || reg_parm_stack_space > 0)
3346 args_size.constant += argvec[count].locate.size.constant;
3348 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3353 for (; count < nargs; count++)
3355 rtx val = va_arg (p, rtx);
3356 enum machine_mode mode = va_arg (p, enum machine_mode);
3358 /* We cannot convert the arg value to the mode the library wants here;
3359 must do it earlier where we know the signedness of the arg. */
3360 gcc_assert (mode != BLKmode
3361 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3363 /* Make sure it is a reasonable operand for a move or push insn. */
3364 if (!REG_P (val) && !MEM_P (val)
3365 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3366 val = force_operand (val, NULL_RTX);
3368 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3372 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3374 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3375 functions, so we have to pretend this isn't such a function. */
3376 if (flags & ECF_LIBCALL_BLOCK)
3378 rtx insns = get_insns ();
3382 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3384 /* If this was a CONST function, it is now PURE since
3385 it now reads memory. */
3386 if (flags & ECF_CONST)
3388 flags &= ~ECF_CONST;
3392 if (GET_MODE (val) == MEM && !must_copy)
3396 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3398 emit_move_insn (slot, val);
3401 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3402 gen_rtx_USE (VOIDmode, slot),
3405 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3406 gen_rtx_CLOBBER (VOIDmode,
3411 val = force_operand (XEXP (slot, 0), NULL_RTX);
3414 argvec[count].value = val;
3415 argvec[count].mode = mode;
3417 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3419 argvec[count].partial
3420 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3422 locate_and_pad_parm (mode, NULL_TREE,
3423 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3426 argvec[count].reg != 0,
3428 argvec[count].partial,
3429 NULL_TREE, &args_size, &argvec[count].locate);
3431 gcc_assert (!argvec[count].locate.size.var);
3433 if (argvec[count].reg == 0 || argvec[count].partial != 0
3434 || reg_parm_stack_space > 0)
3435 args_size.constant += argvec[count].locate.size.constant;
3437 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3440 /* If this machine requires an external definition for library
3441 functions, write one out. */
3442 assemble_external_libcall (fun);
3444 original_args_size = args_size;
3445 args_size.constant = (((args_size.constant
3446 + stack_pointer_delta
3450 - stack_pointer_delta);
3452 args_size.constant = MAX (args_size.constant,
3453 reg_parm_stack_space);
3455 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3456 args_size.constant -= reg_parm_stack_space;
3459 if (args_size.constant > current_function_outgoing_args_size)
3460 current_function_outgoing_args_size = args_size.constant;
3462 if (ACCUMULATE_OUTGOING_ARGS)
3464 /* Since the stack pointer will never be pushed, it is possible for
3465 the evaluation of a parm to clobber something we have already
3466 written to the stack. Since most function calls on RISC machines
3467 do not use the stack, this is uncommon, but must work correctly.
3469 Therefore, we save any area of the stack that was already written
3470 and that we are using. Here we set up to do this by making a new
3471 stack usage map from the old one.
3473 Another approach might be to try to reorder the argument
3474 evaluations to avoid this conflicting stack usage. */
3476 needed = args_size.constant;
3478 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3479 /* Since we will be writing into the entire argument area, the
3480 map must be allocated for its entire size, not just the part that
3481 is the responsibility of the caller. */
3482 needed += reg_parm_stack_space;
3485 #ifdef ARGS_GROW_DOWNWARD
3486 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3489 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3492 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3494 if (initial_highest_arg_in_use)
3495 memcpy (stack_usage_map, initial_stack_usage_map,
3496 initial_highest_arg_in_use);
3498 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3499 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3500 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3503 /* We must be careful to use virtual regs before they're instantiated,
3504 and real regs afterwards. Loop optimization, for example, can create
3505 new libcalls after we've instantiated the virtual regs, and if we
3506 use virtuals anyway, they won't match the rtl patterns. */
3508 if (virtuals_instantiated)
3509 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3511 argblock = virtual_outgoing_args_rtx;
3516 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3519 /* If we push args individually in reverse order, perform stack alignment
3520 before the first push (the last arg). */
3521 if (argblock == 0 && PUSH_ARGS_REVERSED)
3522 anti_adjust_stack (GEN_INT (args_size.constant
3523 - original_args_size.constant));
3525 if (PUSH_ARGS_REVERSED)
3536 #ifdef REG_PARM_STACK_SPACE
3537 if (ACCUMULATE_OUTGOING_ARGS)
3539 /* The argument list is the property of the called routine and it
3540 may clobber it. If the fixed area has been used for previous
3541 parameters, we must save and restore it. */
3542 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3543 &low_to_save, &high_to_save);
3547 /* Push the args that need to be pushed. */
3549 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3550 are to be pushed. */
3551 for (count = 0; count < nargs; count++, argnum += inc)
3553 enum machine_mode mode = argvec[argnum].mode;
3554 rtx val = argvec[argnum].value;
3555 rtx reg = argvec[argnum].reg;
3556 int partial = argvec[argnum].partial;
3557 int lower_bound = 0, upper_bound = 0, i;
3559 if (! (reg != 0 && partial == 0))
3561 if (ACCUMULATE_OUTGOING_ARGS)
3563 /* If this is being stored into a pre-allocated, fixed-size,
3564 stack area, save any previous data at that location. */
3566 #ifdef ARGS_GROW_DOWNWARD
3567 /* stack_slot is negative, but we want to index stack_usage_map
3568 with positive values. */
3569 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3570 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3572 lower_bound = argvec[argnum].locate.offset.constant;
3573 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3577 /* Don't worry about things in the fixed argument area;
3578 it has already been saved. */
3579 if (i < reg_parm_stack_space)
3580 i = reg_parm_stack_space;
3581 while (i < upper_bound && stack_usage_map[i] == 0)
3584 if (i < upper_bound)
3586 /* We need to make a save area. */
3588 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3589 enum machine_mode save_mode
3590 = mode_for_size (size, MODE_INT, 1);
3592 = plus_constant (argblock,
3593 argvec[argnum].locate.offset.constant);
3595 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3597 if (save_mode == BLKmode)
3599 argvec[argnum].save_area
3600 = assign_stack_temp (BLKmode,
3601 argvec[argnum].locate.size.constant,
3604 emit_block_move (validize_mem (argvec[argnum].save_area),
3606 GEN_INT (argvec[argnum].locate.size.constant),
3607 BLOCK_OP_CALL_PARM);
3611 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3613 emit_move_insn (argvec[argnum].save_area, stack_area);
3618 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3619 partial, reg, 0, argblock,
3620 GEN_INT (argvec[argnum].locate.offset.constant),
3621 reg_parm_stack_space,
3622 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3624 /* Now mark the segment we just used. */
3625 if (ACCUMULATE_OUTGOING_ARGS)
3626 for (i = lower_bound; i < upper_bound; i++)
3627 stack_usage_map[i] = 1;
3633 /* If we pushed args in forward order, perform stack alignment
3634 after pushing the last arg. */
3635 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3636 anti_adjust_stack (GEN_INT (args_size.constant
3637 - original_args_size.constant));
3639 if (PUSH_ARGS_REVERSED)
3644 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3646 /* Now load any reg parms into their regs. */
3648 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3649 are to be pushed. */
3650 for (count = 0; count < nargs; count++, argnum += inc)
3652 enum machine_mode mode = argvec[argnum].mode;
3653 rtx val = argvec[argnum].value;
3654 rtx reg = argvec[argnum].reg;
3655 int partial = argvec[argnum].partial;
3657 /* Handle calls that pass values in multiple non-contiguous
3658 locations. The PA64 has examples of this for library calls. */
3659 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3660 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3661 else if (reg != 0 && partial == 0)
3662 emit_move_insn (reg, val);
3667 /* Any regs containing parms remain in use through the call. */
3668 for (count = 0; count < nargs; count++)
3670 rtx reg = argvec[count].reg;
3671 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3672 use_group_regs (&call_fusage, reg);
3674 use_reg (&call_fusage, reg);
3677 /* Pass the function the address in which to return a structure value. */
3678 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3680 emit_move_insn (struct_value,
3682 force_operand (XEXP (mem_value, 0),
3684 if (REG_P (struct_value))
3685 use_reg (&call_fusage, struct_value);
3688 /* Don't allow popping to be deferred, since then
3689 cse'ing of library calls could delete a call and leave the pop. */
3691 valreg = (mem_value == 0 && outmode != VOIDmode
3692 ? hard_libcall_value (outmode) : NULL_RTX);
3694 /* Stack must be properly aligned now. */
3695 gcc_assert (!(stack_pointer_delta
3696 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3698 before_call = get_last_insn ();
3700 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3701 will set inhibit_defer_pop to that value. */
3702 /* The return type is needed to decide how many bytes the function pops.
3703 Signedness plays no role in that, so for simplicity, we pretend it's
3704 always signed. We also assume that the list of arguments passed has
3705 no impact, so we pretend it is unknown. */
3707 emit_call_1 (fun, NULL,
3708 get_identifier (XSTR (orgfun, 0)),
3709 build_function_type (tfom, NULL_TREE),
3710 original_args_size.constant, args_size.constant,
3712 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3714 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3716 /* For calls to `setjmp', etc., inform flow.c it should complain
3717 if nonvolatile values are live. For functions that cannot return,
3718 inform flow that control does not fall through. */
3720 if (flags & ECF_NORETURN)
3722 /* The barrier note must be emitted
3723 immediately after the CALL_INSN. Some ports emit more than
3724 just a CALL_INSN above, so we must search for it here. */
3726 rtx last = get_last_insn ();
3727 while (!CALL_P (last))
3729 last = PREV_INSN (last);
3730 /* There was no CALL_INSN? */
3731 gcc_assert (last != before_call);
3734 emit_barrier_after (last);
3737 /* Now restore inhibit_defer_pop to its actual original value. */
3740 /* If call is cse'able, make appropriate pair of reg-notes around it.
3741 Test valreg so we don't crash; may safely ignore `const'
3742 if return type is void. Disable for PARALLEL return values, because
3743 we have no way to move such values into a pseudo register. */
3744 if (flags & ECF_LIBCALL_BLOCK)
3750 insns = get_insns ();
3760 if (GET_CODE (valreg) == PARALLEL)
3762 temp = gen_reg_rtx (outmode);
3763 emit_group_store (temp, valreg, NULL_TREE,
3764 GET_MODE_SIZE (outmode));
3768 temp = gen_reg_rtx (GET_MODE (valreg));
3770 /* Construct an "equal form" for the value which mentions all the
3771 arguments in order as well as the function name. */
3772 for (i = 0; i < nargs; i++)
3773 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3774 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3776 insns = get_insns ();
3779 if (flags & ECF_PURE)
3780 note = gen_rtx_EXPR_LIST (VOIDmode,
3781 gen_rtx_USE (VOIDmode,
3782 gen_rtx_MEM (BLKmode,
3783 gen_rtx_SCRATCH (VOIDmode))),
3786 emit_libcall_block (insns, temp, valreg, note);
3793 /* Copy the value to the right place. */
3794 if (outmode != VOIDmode && retval)
3800 if (value != mem_value)
3801 emit_move_insn (value, mem_value);
3803 else if (GET_CODE (valreg) == PARALLEL)
3806 value = gen_reg_rtx (outmode);
3807 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3809 else if (value != 0)
3810 emit_move_insn (value, valreg);
3815 if (ACCUMULATE_OUTGOING_ARGS)
3817 #ifdef REG_PARM_STACK_SPACE
3819 restore_fixed_argument_area (save_area, argblock,
3820 high_to_save, low_to_save);
3823 /* If we saved any argument areas, restore them. */
3824 for (count = 0; count < nargs; count++)
3825 if (argvec[count].save_area)
3827 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3828 rtx adr = plus_constant (argblock,
3829 argvec[count].locate.offset.constant);
3830 rtx stack_area = gen_rtx_MEM (save_mode,
3831 memory_address (save_mode, adr));
3833 if (save_mode == BLKmode)
3834 emit_block_move (stack_area,
3835 validize_mem (argvec[count].save_area),
3836 GEN_INT (argvec[count].locate.size.constant),
3837 BLOCK_OP_CALL_PARM);
3839 emit_move_insn (stack_area, argvec[count].save_area);
3842 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3843 stack_usage_map = initial_stack_usage_map;
3850 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3851 (emitting the queue unless NO_QUEUE is nonzero),
3852 for a value of mode OUTMODE,
3853 with NARGS different arguments, passed as alternating rtx values
3854 and machine_modes to convert them to.
3856 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3857 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3858 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3859 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3860 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3861 or other LCT_ value for other types of library calls. */
3864 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3865 enum machine_mode outmode, int nargs, ...)
3869 va_start (p, nargs);
3870 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3874 /* Like emit_library_call except that an extra argument, VALUE,
3875 comes second and says where to store the result.
3876 (If VALUE is zero, this function chooses a convenient way
3877 to return the value.
3879 This function returns an rtx for where the value is to be found.
3880 If VALUE is nonzero, VALUE is returned. */
3883 emit_library_call_value (rtx orgfun, rtx value,
3884 enum libcall_type fn_type,
3885 enum machine_mode outmode, int nargs, ...)
3890 va_start (p, nargs);
3891 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
3898 /* Store a single argument for a function call
3899 into the register or memory area where it must be passed.
3900 *ARG describes the argument value and where to pass it.
3902 ARGBLOCK is the address of the stack-block for all the arguments,
3903 or 0 on a machine where arguments are pushed individually.
3905 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3906 so must be careful about how the stack is used.
3908 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3909 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3910 that we need not worry about saving and restoring the stack.
3912 FNDECL is the declaration of the function we are calling.
3914 Return nonzero if this arg should cause sibcall failure,
3918 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
3919 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
3921 tree pval = arg->tree_value;
3925 int i, lower_bound = 0, upper_bound = 0;
3926 int sibcall_failure = 0;
3928 if (TREE_CODE (pval) == ERROR_MARK)
3931 /* Push a new temporary level for any temporaries we make for
3935 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
3937 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3938 save any previous data at that location. */
3939 if (argblock && ! variable_size && arg->stack)
3941 #ifdef ARGS_GROW_DOWNWARD
3942 /* stack_slot is negative, but we want to index stack_usage_map
3943 with positive values. */
3944 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3945 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3949 lower_bound = upper_bound - arg->locate.size.constant;
3951 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3952 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3956 upper_bound = lower_bound + arg->locate.size.constant;
3960 /* Don't worry about things in the fixed argument area;
3961 it has already been saved. */
3962 if (i < reg_parm_stack_space)
3963 i = reg_parm_stack_space;
3964 while (i < upper_bound && stack_usage_map[i] == 0)
3967 if (i < upper_bound)
3969 /* We need to make a save area. */
3970 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
3971 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
3972 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
3973 rtx stack_area = gen_rtx_MEM (save_mode, adr);
3975 if (save_mode == BLKmode)
3977 tree ot = TREE_TYPE (arg->tree_value);
3978 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
3979 | TYPE_QUAL_CONST));
3981 arg->save_area = assign_temp (nt, 0, 1, 1);
3982 preserve_temp_slots (arg->save_area);
3983 emit_block_move (validize_mem (arg->save_area), stack_area,
3984 expr_size (arg->tree_value),
3985 BLOCK_OP_CALL_PARM);
3989 arg->save_area = gen_reg_rtx (save_mode);
3990 emit_move_insn (arg->save_area, stack_area);
3996 /* If this isn't going to be placed on both the stack and in registers,
3997 set up the register and number of words. */
3998 if (! arg->pass_on_stack)
4000 if (flags & ECF_SIBCALL)
4001 reg = arg->tail_call_reg;
4004 partial = arg->partial;
4007 /* Being passed entirely in a register. We shouldn't be called in
4009 gcc_assert (reg == 0 || partial != 0);
4011 /* If this arg needs special alignment, don't load the registers
4013 if (arg->n_aligned_regs != 0)
4016 /* If this is being passed partially in a register, we can't evaluate
4017 it directly into its stack slot. Otherwise, we can. */
4018 if (arg->value == 0)
4020 /* stack_arg_under_construction is nonzero if a function argument is
4021 being evaluated directly into the outgoing argument list and
4022 expand_call must take special action to preserve the argument list
4023 if it is called recursively.
4025 For scalar function arguments stack_usage_map is sufficient to
4026 determine which stack slots must be saved and restored. Scalar
4027 arguments in general have pass_on_stack == 0.
4029 If this argument is initialized by a function which takes the
4030 address of the argument (a C++ constructor or a C function
4031 returning a BLKmode structure), then stack_usage_map is
4032 insufficient and expand_call must push the stack around the
4033 function call. Such arguments have pass_on_stack == 1.
4035 Note that it is always safe to set stack_arg_under_construction,
4036 but this generates suboptimal code if set when not needed. */
4038 if (arg->pass_on_stack)
4039 stack_arg_under_construction++;
4041 arg->value = expand_expr (pval,
4043 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4044 ? NULL_RTX : arg->stack,
4045 VOIDmode, EXPAND_STACK_PARM);
4047 /* If we are promoting object (or for any other reason) the mode
4048 doesn't agree, convert the mode. */
4050 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4051 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4052 arg->value, arg->unsignedp);
4054 if (arg->pass_on_stack)
4055 stack_arg_under_construction--;
4058 /* Don't allow anything left on stack from computation
4059 of argument to alloca. */
4060 if (flags & ECF_MAY_BE_ALLOCA)
4061 do_pending_stack_adjust ();
4063 if (arg->value == arg->stack)
4064 /* If the value is already in the stack slot, we are done. */
4066 else if (arg->mode != BLKmode)
4070 /* Argument is a scalar, not entirely passed in registers.
4071 (If part is passed in registers, arg->partial says how much
4072 and emit_push_insn will take care of putting it there.)
4074 Push it, and if its size is less than the
4075 amount of space allocated to it,
4076 also bump stack pointer by the additional space.
4077 Note that in C the default argument promotions
4078 will prevent such mismatches. */
4080 size = GET_MODE_SIZE (arg->mode);
4081 /* Compute how much space the push instruction will push.
4082 On many machines, pushing a byte will advance the stack
4083 pointer by a halfword. */
4084 #ifdef PUSH_ROUNDING
4085 size = PUSH_ROUNDING (size);
4089 /* Compute how much space the argument should get:
4090 round up to a multiple of the alignment for arguments. */
4091 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4092 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4093 / (PARM_BOUNDARY / BITS_PER_UNIT))
4094 * (PARM_BOUNDARY / BITS_PER_UNIT));
4096 /* This isn't already where we want it on the stack, so put it there.
4097 This can either be done with push or copy insns. */
4098 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4099 PARM_BOUNDARY, partial, reg, used - size, argblock,
4100 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4101 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4103 /* Unless this is a partially-in-register argument, the argument is now
4106 arg->value = arg->stack;
4110 /* BLKmode, at least partly to be pushed. */
4112 unsigned int parm_align;
4116 /* Pushing a nonscalar.
4117 If part is passed in registers, PARTIAL says how much
4118 and emit_push_insn will take care of putting it there. */
4120 /* Round its size up to a multiple
4121 of the allocation unit for arguments. */
4123 if (arg->locate.size.var != 0)
4126 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4130 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4131 for BLKmode is careful to avoid it. */
4132 excess = (arg->locate.size.constant
4133 - int_size_in_bytes (TREE_TYPE (pval))
4135 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4136 NULL_RTX, TYPE_MODE (sizetype), 0);
4139 parm_align = arg->locate.boundary;
4141 /* When an argument is padded down, the block is aligned to
4142 PARM_BOUNDARY, but the actual argument isn't. */
4143 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4145 if (arg->locate.size.var)
4146 parm_align = BITS_PER_UNIT;
4149 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4150 parm_align = MIN (parm_align, excess_align);
4154 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4156 /* emit_push_insn might not work properly if arg->value and
4157 argblock + arg->locate.offset areas overlap. */
4161 if (XEXP (x, 0) == current_function_internal_arg_pointer
4162 || (GET_CODE (XEXP (x, 0)) == PLUS
4163 && XEXP (XEXP (x, 0), 0) ==
4164 current_function_internal_arg_pointer
4165 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4167 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4168 i = INTVAL (XEXP (XEXP (x, 0), 1));
4170 /* expand_call should ensure this. */
4171 gcc_assert (!arg->locate.offset.var
4172 && GET_CODE (size_rtx) == CONST_INT);
4174 if (arg->locate.offset.constant > i)
4176 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4177 sibcall_failure = 1;
4179 else if (arg->locate.offset.constant < i)
4181 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4182 sibcall_failure = 1;
4187 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4188 parm_align, partial, reg, excess, argblock,
4189 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4190 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4192 /* Unless this is a partially-in-register argument, the argument is now
4195 ??? Unlike the case above, in which we want the actual
4196 address of the data, so that we can load it directly into a
4197 register, here we want the address of the stack slot, so that
4198 it's properly aligned for word-by-word copying or something
4199 like that. It's not clear that this is always correct. */
4201 arg->value = arg->stack_slot;
4204 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4206 tree type = TREE_TYPE (arg->tree_value);
4208 = emit_group_load_into_temps (arg->reg, arg->value, type,
4209 int_size_in_bytes (type));
4212 /* Mark all slots this store used. */
4213 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4214 && argblock && ! variable_size && arg->stack)
4215 for (i = lower_bound; i < upper_bound; i++)
4216 stack_usage_map[i] = 1;
4218 /* Once we have pushed something, pops can't safely
4219 be deferred during the rest of the arguments. */
4222 /* Free any temporary slots made in processing this argument. Show
4223 that we might have taken the address of something and pushed that
4225 preserve_temp_slots (NULL_RTX);
4229 return sibcall_failure;
4232 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4235 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4241 /* If the type has variable size... */
4242 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4245 /* If the type is marked as addressable (it is required
4246 to be constructed into the stack)... */
4247 if (TREE_ADDRESSABLE (type))
4253 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4254 takes trailing padding of a structure into account. */
4255 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4258 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4263 /* If the type has variable size... */
4264 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4267 /* If the type is marked as addressable (it is required
4268 to be constructed into the stack)... */
4269 if (TREE_ADDRESSABLE (type))
4272 /* If the padding and mode of the type is such that a copy into
4273 a register would put it into the wrong part of the register. */
4275 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4276 && (FUNCTION_ARG_PADDING (mode, type)
4277 == (BYTES_BIG_ENDIAN ? upward : downward)))