1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
29 #include "insn-flags.h"
33 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
34 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first.
40 They should if the stack and args grow in opposite directions, but
41 only if we have push insns. */
45 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
46 #define PUSH_ARGS_REVERSED /* If it's last to first */
51 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
52 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
54 /* Data structure and subroutines used within expand_call. */
58 /* Tree node for this argument. */
60 /* Mode for value; TYPE_MODE unless promoted. */
61 enum machine_mode mode;
62 /* Current RTL value for argument, or 0 if it isn't precomputed. */
64 /* Initially-compute RTL value for argument; only for const functions. */
66 /* Register to pass this argument in, 0 if passed on stack, or an
67 PARALLEL if the arg is to be copied into multiple non-contiguous
70 /* If REG was promoted from the actual mode of the argument expression,
71 indicates whether the promotion is sign- or zero-extended. */
73 /* Number of registers to use. 0 means put the whole arg in registers.
74 Also 0 if not passed in registers. */
76 /* Non-zero if argument must be passed on stack.
77 Note that some arguments may be passed on the stack
78 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
79 pass_on_stack identifies arguments that *cannot* go in registers. */
81 /* Offset of this argument from beginning of stack-args. */
82 struct args_size offset;
83 /* Similar, but offset to the start of the stack slot. Different from
84 OFFSET if this arg pads downward. */
85 struct args_size slot_offset;
86 /* Size of this argument on the stack, rounded up for any padding it gets,
87 parts of the argument passed in registers do not count.
88 If REG_PARM_STACK_SPACE is defined, then register parms
89 are counted here as well. */
90 struct args_size size;
91 /* Location on the stack at which parameter should be stored. The store
92 has already been done if STACK == VALUE. */
94 /* Location on the stack of the start of this argument slot. This can
95 differ from STACK if this arg pads downward. This location is known
96 to be aligned to FUNCTION_ARG_BOUNDARY. */
98 #ifdef ACCUMULATE_OUTGOING_ARGS
99 /* Place that this stack area has been saved, if needed. */
102 /* If an argument's alignment does not permit direct copying into registers,
103 copy in smaller-sized pieces into pseudos. These are stored in a
104 block pointed to by this field. The next field says how many
105 word-sized pseudos we made. */
110 #ifdef ACCUMULATE_OUTGOING_ARGS
111 /* A vector of one char per byte of stack space. A byte if non-zero if
112 the corresponding stack location has been used.
113 This vector is used to prevent a function call within an argument from
114 clobbering any stack already set up. */
115 static char *stack_usage_map;
117 /* Size of STACK_USAGE_MAP. */
118 static int highest_outgoing_arg_in_use;
120 /* stack_arg_under_construction is nonzero when an argument may be
121 initialized with a constructor call (including a C function that
122 returns a BLKmode struct) and expand_call must take special action
123 to make sure the object being constructed does not overlap the
124 argument list for the constructor call. */
125 int stack_arg_under_construction;
128 static int calls_function PROTO ((tree, int));
129 static int calls_function_1 PROTO ((tree, int));
130 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
131 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
132 rtx, int, rtx, int));
133 static void special_function_p PROTO ((char *, tree, int *, int *,
135 static void precompute_register_parameters PROTO ((int, struct arg_data *,
137 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
139 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
141 static int finalize_must_preallocate PROTO ((int, int,
143 struct args_size *));
144 static void precompute_arguments PROTO ((int, int, int,
146 struct args_size *));
147 static int compute_argument_block_size PROTO ((int,
148 struct args_size *));
149 static void initialize_argument_information PROTO ((int,
156 static void compute_argument_addresses PROTO ((struct arg_data *,
158 static rtx rtx_for_function_call PROTO ((tree, tree));
159 static void load_register_parameters PROTO ((struct arg_data *,
162 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
163 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
164 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
167 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
170 If WHICH is 0, return 1 if EXP contains a call to any function.
171 Actually, we only need return 1 if evaluating EXP would require pushing
172 arguments on the stack, but that is too difficult to compute, so we just
173 assume any function call might require the stack. */
175 static tree calls_function_save_exprs;
178 calls_function (exp, which)
183 calls_function_save_exprs = 0;
184 val = calls_function_1 (exp, which);
185 calls_function_save_exprs = 0;
190 calls_function_1 (exp, which)
195 enum tree_code code = TREE_CODE (exp);
196 int type = TREE_CODE_CLASS (code);
197 int length = tree_code_length[(int) code];
199 /* If this code is language-specific, we don't know what it will do. */
200 if ((int) code >= NUM_TREE_CODES)
203 /* Only expressions and references can contain calls. */
204 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
213 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
214 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
217 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
219 if ((DECL_BUILT_IN (fndecl)
220 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
221 || (DECL_SAVED_INSNS (fndecl)
222 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
223 & FUNCTION_FLAGS_CALLS_ALLOCA)))
227 /* Third operand is RTL. */
232 if (SAVE_EXPR_RTL (exp) != 0)
234 if (value_member (exp, calls_function_save_exprs))
236 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
237 calls_function_save_exprs);
238 return (TREE_OPERAND (exp, 0) != 0
239 && calls_function_1 (TREE_OPERAND (exp, 0), which));
245 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
246 if (DECL_INITIAL (local) != 0
247 && calls_function_1 (DECL_INITIAL (local), which))
251 register tree subblock;
253 for (subblock = BLOCK_SUBBLOCKS (exp);
255 subblock = TREE_CHAIN (subblock))
256 if (calls_function_1 (subblock, which))
261 case METHOD_CALL_EXPR:
265 case WITH_CLEANUP_EXPR:
276 for (i = 0; i < length; i++)
277 if (TREE_OPERAND (exp, i) != 0
278 && calls_function_1 (TREE_OPERAND (exp, i), which))
284 /* Force FUNEXP into a form suitable for the address of a CALL,
285 and return that as an rtx. Also load the static chain register
286 if FNDECL is a nested function.
288 CALL_FUSAGE points to a variable holding the prospective
289 CALL_INSN_FUNCTION_USAGE information. */
292 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
298 rtx static_chain_value = 0;
300 funexp = protect_from_queue (funexp, 0);
303 /* Get possible static chain value for nested function in C. */
304 static_chain_value = lookup_static_chain (fndecl);
306 /* Make a valid memory address and copy constants thru pseudo-regs,
307 but not for a constant address if -fno-function-cse. */
308 if (GET_CODE (funexp) != SYMBOL_REF)
309 /* If we are using registers for parameters, force the
310 function address into a register now. */
311 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
312 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
313 : memory_address (FUNCTION_MODE, funexp));
316 #ifndef NO_FUNCTION_CSE
317 if (optimize && ! flag_no_function_cse)
318 #ifdef NO_RECURSIVE_FUNCTION_CSE
319 if (fndecl != current_function_decl)
321 funexp = force_reg (Pmode, funexp);
325 if (static_chain_value != 0)
327 emit_move_insn (static_chain_rtx, static_chain_value);
329 if (GET_CODE (static_chain_rtx) == REG)
330 use_reg (call_fusage, static_chain_rtx);
336 /* Generate instructions to call function FUNEXP,
337 and optionally pop the results.
338 The CALL_INSN is the first insn generated.
340 FNDECL is the declaration node of the function. This is given to the
341 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
343 FUNTYPE is the data type of the function. This is given to the macro
344 RETURN_POPS_ARGS to determine whether this function pops its own args.
345 We used to allow an identifier for library functions, but that doesn't
346 work when the return type is an aggregate type and the calling convention
347 says that the pointer to this aggregate is to be popped by the callee.
349 STACK_SIZE is the number of bytes of arguments on the stack,
350 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
351 This is both to put into the call insn and
352 to generate explicit popping code if necessary.
354 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
355 It is zero if this call doesn't want a structure value.
357 NEXT_ARG_REG is the rtx that results from executing
358 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
359 just after all the args have had their registers assigned.
360 This could be whatever you like, but normally it is the first
361 arg-register beyond those used for args in this call,
362 or 0 if all the arg-registers are used in this call.
363 It is passed on to `gen_call' so you can put this info in the call insn.
365 VALREG is a hard register in which a value is returned,
366 or 0 if the call does not return a value.
368 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
369 the args to this call were processed.
370 We restore `inhibit_defer_pop' to that value.
372 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
373 denote registers used by the called function.
375 IS_CONST is true if this is a `const' call. */
378 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
379 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
380 call_fusage, is_const)
382 tree fndecl ATTRIBUTE_UNUSED;
383 tree funtype ATTRIBUTE_UNUSED;
384 HOST_WIDE_INT stack_size;
385 HOST_WIDE_INT rounded_stack_size;
386 HOST_WIDE_INT struct_value_size;
389 int old_inhibit_defer_pop;
393 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
394 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
396 #ifndef ACCUMULATE_OUTGOING_ARGS
397 int already_popped = 0;
398 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
407 #ifndef ACCUMULATE_OUTGOING_ARGS
408 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
409 /* If the target has "call" or "call_value" insns, then prefer them
410 if no arguments are actually popped. If the target does not have
411 "call" or "call_value" insns, then we must use the popping versions
412 even if the call has no arguments to pop. */
413 if (HAVE_call_pop && HAVE_call_value_pop
414 #if defined (HAVE_call) && defined (HAVE_call_value)
415 && (n_popped > 0 || ! HAVE_call || ! HAVE_call_value)
419 rtx n_pop = GEN_INT (n_popped);
422 /* If this subroutine pops its own args, record that in the call insn
423 if possible, for the sake of frame pointer elimination. */
426 pat = gen_call_value_pop (valreg,
427 gen_rtx_MEM (FUNCTION_MODE, funexp),
428 rounded_stack_size_rtx, next_arg_reg, n_pop);
430 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
431 rounded_stack_size_rtx, next_arg_reg, n_pop);
433 emit_call_insn (pat);
440 #if defined (HAVE_call) && defined (HAVE_call_value)
441 if (HAVE_call && HAVE_call_value)
444 emit_call_insn (gen_call_value (valreg,
445 gen_rtx_MEM (FUNCTION_MODE, funexp),
446 rounded_stack_size_rtx, next_arg_reg,
449 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
450 rounded_stack_size_rtx, next_arg_reg,
451 struct_value_size_rtx));
457 /* Find the CALL insn we just emitted. */
458 for (call_insn = get_last_insn ();
459 call_insn && GET_CODE (call_insn) != CALL_INSN;
460 call_insn = PREV_INSN (call_insn))
466 /* Put the register usage information on the CALL. If there is already
467 some usage information, put ours at the end. */
468 if (CALL_INSN_FUNCTION_USAGE (call_insn))
472 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
473 link = XEXP (link, 1))
476 XEXP (link, 1) = call_fusage;
479 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
481 /* If this is a const call, then set the insn's unchanging bit. */
483 CONST_CALL_P (call_insn) = 1;
485 /* Restore this now, so that we do defer pops for this call's args
486 if the context of the call as a whole permits. */
487 inhibit_defer_pop = old_inhibit_defer_pop;
489 #ifndef ACCUMULATE_OUTGOING_ARGS
490 /* If returning from the subroutine does not automatically pop the args,
491 we need an instruction to pop them sooner or later.
492 Perhaps do it now; perhaps just record how much space to pop later.
494 If returning from the subroutine does pop the args, indicate that the
495 stack pointer will be changed. */
500 CALL_INSN_FUNCTION_USAGE (call_insn)
501 = gen_rtx_EXPR_LIST (VOIDmode,
502 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
503 CALL_INSN_FUNCTION_USAGE (call_insn));
504 rounded_stack_size -= n_popped;
505 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
508 if (rounded_stack_size != 0)
510 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
511 pending_stack_adjust += rounded_stack_size;
513 adjust_stack (rounded_stack_size_rtx);
518 /* Determine if the function identified by NAME and FNDECL is one with
519 special properties we wish to know about.
521 For example, if the function might return more than one time (setjmp), then
522 set RETURNS_TWICE to a nonzero value.
524 Similarly set IS_LONGJMP for if the function is in the longjmp family.
526 Set IS_MALLOC for any of the standard memory allocation functions which
527 allocate from the heap.
529 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
530 space from the stack such as alloca. */
533 special_function_p (name, fndecl, returns_twice, is_longjmp,
534 is_malloc, may_be_alloca)
547 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
548 /* Exclude functions not at the file scope, or not `extern',
549 since they are not the magic functions we would otherwise
551 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
555 /* We assume that alloca will always be called by name. It
556 makes no sense to pass it as a pointer-to-function to
557 anything that does not understand its behavior. */
559 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
561 && ! strcmp (name, "alloca"))
562 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
564 && ! strcmp (name, "__builtin_alloca"))));
566 /* Disregard prefix _, __ or __x. */
569 if (name[1] == '_' && name[2] == 'x')
571 else if (name[1] == '_')
581 && (! strcmp (tname, "setjmp")
582 || ! strcmp (tname, "setjmp_syscall")))
584 && ! strcmp (tname, "sigsetjmp"))
586 && ! strcmp (tname, "savectx")));
588 && ! strcmp (tname, "siglongjmp"))
591 else if ((tname[0] == 'q' && tname[1] == 's'
592 && ! strcmp (tname, "qsetjmp"))
593 || (tname[0] == 'v' && tname[1] == 'f'
594 && ! strcmp (tname, "vfork")))
597 else if (tname[0] == 'l' && tname[1] == 'o'
598 && ! strcmp (tname, "longjmp"))
600 /* XXX should have "malloc" attribute on functions instead
601 of recognizing them by name. */
602 else if (! strcmp (tname, "malloc")
603 || ! strcmp (tname, "calloc")
604 || ! strcmp (tname, "realloc")
605 /* Note use of NAME rather than TNAME here. These functions
606 are only reserved when preceded with __. */
607 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
608 || ! strcmp (name, "__nw") /* mangled __builtin_new */
609 || ! strcmp (name, "__builtin_new")
610 || ! strcmp (name, "__builtin_vec_new"))
615 /* Precompute all register parameters as described by ARGS, storing values
616 into fields within the ARGS array.
618 NUM_ACTUALS indicates the total number elements in the ARGS array.
620 Set REG_PARM_SEEN if we encounter a register parameter. */
623 precompute_register_parameters (num_actuals, args, reg_parm_seen)
625 struct arg_data *args;
632 for (i = 0; i < num_actuals; i++)
633 if (args[i].reg != 0 && ! args[i].pass_on_stack)
637 if (args[i].value == 0)
640 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
642 preserve_temp_slots (args[i].value);
645 /* ANSI doesn't require a sequence point here,
646 but PCC has one, so this will avoid some problems. */
650 /* If we are to promote the function arg to a wider mode,
653 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
655 = convert_modes (args[i].mode,
656 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
657 args[i].value, args[i].unsignedp);
659 /* If the value is expensive, and we are inside an appropriately
660 short loop, put the value into a pseudo and then put the pseudo
663 For small register classes, also do this if this call uses
664 register parameters. This is to avoid reload conflicts while
665 loading the parameters registers. */
667 if ((! (GET_CODE (args[i].value) == REG
668 || (GET_CODE (args[i].value) == SUBREG
669 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
670 && args[i].mode != BLKmode
671 && rtx_cost (args[i].value, SET) > 2
672 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
673 || preserve_subexpressions_p ()))
674 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
678 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
680 /* The argument list is the property of the called routine and it
681 may clobber it. If the fixed area has been used for previous
682 parameters, we must save and restore it. */
684 save_fixed_argument_area (reg_parm_stack_space, argblock,
685 low_to_save, high_to_save)
686 int reg_parm_stack_space;
692 rtx save_area = NULL_RTX;
694 /* Compute the boundary of the that needs to be saved, if any. */
695 #ifdef ARGS_GROW_DOWNWARD
696 for (i = 0; i < reg_parm_stack_space + 1; i++)
698 for (i = 0; i < reg_parm_stack_space; i++)
701 if (i >= highest_outgoing_arg_in_use
702 || stack_usage_map[i] == 0)
705 if (*low_to_save == -1)
711 if (*low_to_save >= 0)
713 int num_to_save = *high_to_save - *low_to_save + 1;
714 enum machine_mode save_mode
715 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
718 /* If we don't have the required alignment, must do this in BLKmode. */
719 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
720 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
723 #ifdef ARGS_GROW_DOWNWARD
724 stack_area = gen_rtx_MEM (save_mode,
725 memory_address (save_mode,
726 plus_constant (argblock,
729 stack_area = gen_rtx_MEM (save_mode,
730 memory_address (save_mode,
731 plus_constant (argblock,
734 if (save_mode == BLKmode)
736 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
737 emit_block_move (validize_mem (save_area), stack_area,
738 GEN_INT (num_to_save),
739 PARM_BOUNDARY / BITS_PER_UNIT);
743 save_area = gen_reg_rtx (save_mode);
744 emit_move_insn (save_area, stack_area);
751 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
757 enum machine_mode save_mode = GET_MODE (save_area);
758 #ifdef ARGS_GROW_DOWNWARD
760 = gen_rtx_MEM (save_mode,
761 memory_address (save_mode,
762 plus_constant (argblock,
766 = gen_rtx_MEM (save_mode,
767 memory_address (save_mode,
768 plus_constant (argblock,
772 if (save_mode != BLKmode)
773 emit_move_insn (stack_area, save_area);
775 emit_block_move (stack_area, validize_mem (save_area),
776 GEN_INT (high_to_save - low_to_save + 1),
777 PARM_BOUNDARY / BITS_PER_UNIT);
781 /* If any elements in ARGS refer to parameters that are to be passed in
782 registers, but not in memory, and whose alignment does not permit a
783 direct copy into registers. Copy the values into a group of pseudos
784 which we will later copy into the appropriate hard registers.
786 Pseudos for each unaligned argument will be stored into the array
787 args[argnum].aligned_regs. The caller is responsible for deallocating
788 the aligned_regs array if it is nonzero. */
791 store_unaligned_arguments_into_pseudos (args, num_actuals)
792 struct arg_data *args;
797 for (i = 0; i < num_actuals; i++)
798 if (args[i].reg != 0 && ! args[i].pass_on_stack
799 && args[i].mode == BLKmode
800 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
801 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
803 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
804 int big_endian_correction = 0;
806 args[i].n_aligned_regs
807 = args[i].partial ? args[i].partial
808 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
810 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
811 * args[i].n_aligned_regs);
813 /* Structures smaller than a word are aligned to the least
814 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
815 this means we must skip the empty high order bytes when
816 calculating the bit offset. */
817 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
818 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
820 for (j = 0; j < args[i].n_aligned_regs; j++)
822 rtx reg = gen_reg_rtx (word_mode);
823 rtx word = operand_subword_force (args[i].value, j, BLKmode);
824 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
825 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
827 args[i].aligned_regs[j] = reg;
829 /* There is no need to restrict this code to loading items
830 in TYPE_ALIGN sized hunks. The bitfield instructions can
831 load up entire word sized registers efficiently.
833 ??? This may not be needed anymore.
834 We use to emit a clobber here but that doesn't let later
835 passes optimize the instructions we emit. By storing 0 into
836 the register later passes know the first AND to zero out the
837 bitfield being set in the register is unnecessary. The store
838 of 0 will be deleted as will at least the first AND. */
840 emit_move_insn (reg, const0_rtx);
842 bytes -= bitsize / BITS_PER_UNIT;
843 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
844 extract_bit_field (word, bitsize, 0, 1,
847 bitalign / BITS_PER_UNIT,
849 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
854 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
857 NUM_ACTUALS is the total number of parameters.
859 N_NAMED_ARGS is the total number of named arguments.
861 FNDECL is the tree code for the target of this call (if known)
863 ARGS_SO_FAR holds state needed by the target to know where to place
866 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
867 for arguments which are passed in registers.
869 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
870 and may be modified by this routine.
872 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
873 flags which may may be modified by this routine. */
876 initialize_argument_information (num_actuals, args, args_size, n_named_args,
877 actparms, fndecl, args_so_far,
878 reg_parm_stack_space, old_stack_level,
879 old_pending_adj, must_preallocate, is_const)
880 int num_actuals ATTRIBUTE_UNUSED;
881 struct arg_data *args;
882 struct args_size *args_size;
883 int n_named_args ATTRIBUTE_UNUSED;
886 CUMULATIVE_ARGS *args_so_far;
887 int reg_parm_stack_space;
888 rtx *old_stack_level;
889 int *old_pending_adj;
890 int *must_preallocate;
893 /* 1 if scanning parms front to back, -1 if scanning back to front. */
896 /* Count arg position in order args appear. */
902 args_size->constant = 0;
905 /* In this loop, we consider args in the order they are written.
906 We fill up ARGS from the front or from the back if necessary
907 so that in any case the first arg to be pushed ends up at the front. */
909 #ifdef PUSH_ARGS_REVERSED
910 i = num_actuals - 1, inc = -1;
911 /* In this case, must reverse order of args
912 so that we compute and push the last arg first. */
917 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
918 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
920 tree type = TREE_TYPE (TREE_VALUE (p));
922 enum machine_mode mode;
924 args[i].tree_value = TREE_VALUE (p);
926 /* Replace erroneous argument with constant zero. */
927 if (type == error_mark_node || TYPE_SIZE (type) == 0)
928 args[i].tree_value = integer_zero_node, type = integer_type_node;
930 /* If TYPE is a transparent union, pass things the way we would
931 pass the first field of the union. We have already verified that
932 the modes are the same. */
933 if (TYPE_TRANSPARENT_UNION (type))
934 type = TREE_TYPE (TYPE_FIELDS (type));
936 /* Decide where to pass this arg.
938 args[i].reg is nonzero if all or part is passed in registers.
940 args[i].partial is nonzero if part but not all is passed in registers,
941 and the exact value says how many words are passed in registers.
943 args[i].pass_on_stack is nonzero if the argument must at least be
944 computed on the stack. It may then be loaded back into registers
945 if args[i].reg is nonzero.
947 These decisions are driven by the FUNCTION_... macros and must agree
948 with those made by function.c. */
950 /* See if this argument should be passed by invisible reference. */
951 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
952 && contains_placeholder_p (TYPE_SIZE (type)))
953 || TREE_ADDRESSABLE (type)
954 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
955 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
956 type, argpos < n_named_args)
960 /* If we're compiling a thunk, pass through invisible
961 references instead of making a copy. */
962 if (current_function_is_thunk
963 #ifdef FUNCTION_ARG_CALLEE_COPIES
964 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
965 type, argpos < n_named_args)
966 /* If it's in a register, we must make a copy of it too. */
967 /* ??? Is this a sufficient test? Is there a better one? */
968 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
969 && REG_P (DECL_RTL (args[i].tree_value)))
970 && ! TREE_ADDRESSABLE (type))
974 /* C++ uses a TARGET_EXPR to indicate that we want to make a
975 new object from the argument. If we are passing by
976 invisible reference, the callee will do that for us, so we
977 can strip off the TARGET_EXPR. This is not always safe,
978 but it is safe in the only case where this is a useful
979 optimization; namely, when the argument is a plain object.
980 In that case, the frontend is just asking the backend to
981 make a bitwise copy of the argument. */
983 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
984 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
985 (args[i].tree_value, 1)))
987 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
988 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
990 args[i].tree_value = build1 (ADDR_EXPR,
991 build_pointer_type (type),
993 type = build_pointer_type (type);
997 /* We make a copy of the object and pass the address to the
998 function being called. */
1001 if (TYPE_SIZE (type) == 0
1002 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1003 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1004 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1005 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1006 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1008 /* This is a variable-sized object. Make space on the stack
1010 rtx size_rtx = expr_size (TREE_VALUE (p));
1012 if (*old_stack_level == 0)
1014 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1015 *old_pending_adj = pending_stack_adjust;
1016 pending_stack_adjust = 0;
1019 copy = gen_rtx_MEM (BLKmode,
1020 allocate_dynamic_stack_space (size_rtx,
1022 TYPE_ALIGN (type)));
1026 int size = int_size_in_bytes (type);
1027 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1030 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1032 store_expr (args[i].tree_value, copy, 0);
1035 args[i].tree_value = build1 (ADDR_EXPR,
1036 build_pointer_type (type),
1037 make_tree (type, copy));
1038 type = build_pointer_type (type);
1042 mode = TYPE_MODE (type);
1043 unsignedp = TREE_UNSIGNED (type);
1045 #ifdef PROMOTE_FUNCTION_ARGS
1046 mode = promote_mode (type, mode, &unsignedp, 1);
1049 args[i].unsignedp = unsignedp;
1050 args[i].mode = mode;
1051 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1052 argpos < n_named_args);
1053 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1056 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1057 argpos < n_named_args);
1060 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1062 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1063 it means that we are to pass this arg in the register(s) designated
1064 by the PARALLEL, but also to pass it in the stack. */
1065 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1066 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1067 args[i].pass_on_stack = 1;
1069 /* If this is an addressable type, we must preallocate the stack
1070 since we must evaluate the object into its final location.
1072 If this is to be passed in both registers and the stack, it is simpler
1074 if (TREE_ADDRESSABLE (type)
1075 || (args[i].pass_on_stack && args[i].reg != 0))
1076 *must_preallocate = 1;
1078 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1079 we cannot consider this function call constant. */
1080 if (TREE_ADDRESSABLE (type))
1083 /* Compute the stack-size of this argument. */
1084 if (args[i].reg == 0 || args[i].partial != 0
1085 || reg_parm_stack_space > 0
1086 || args[i].pass_on_stack)
1087 locate_and_pad_parm (mode, type,
1088 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1093 fndecl, args_size, &args[i].offset,
1096 #ifndef ARGS_GROW_DOWNWARD
1097 args[i].slot_offset = *args_size;
1100 /* If a part of the arg was put into registers,
1101 don't include that part in the amount pushed. */
1102 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1103 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1104 / (PARM_BOUNDARY / BITS_PER_UNIT)
1105 * (PARM_BOUNDARY / BITS_PER_UNIT));
1107 /* Update ARGS_SIZE, the total stack space for args so far. */
1109 args_size->constant += args[i].size.constant;
1110 if (args[i].size.var)
1112 ADD_PARM_SIZE (*args_size, args[i].size.var);
1115 /* Since the slot offset points to the bottom of the slot,
1116 we must record it after incrementing if the args grow down. */
1117 #ifdef ARGS_GROW_DOWNWARD
1118 args[i].slot_offset = *args_size;
1120 args[i].slot_offset.constant = -args_size->constant;
1123 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1127 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1128 have been used, etc. */
1130 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1131 argpos < n_named_args);
1135 /* Update ARGS_SIZE to contain the total size for the argument block.
1136 Return the original constant component of the argument block's size.
1138 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1139 for arguments passed in registers. */
1142 compute_argument_block_size (reg_parm_stack_space, args_size)
1143 int reg_parm_stack_space;
1144 struct args_size *args_size;
1146 int unadjusted_args_size = args_size->constant;
1148 /* Compute the actual size of the argument block required. The variable
1149 and constant sizes must be combined, the size may have to be rounded,
1150 and there may be a minimum required size. */
1154 args_size->var = ARGS_SIZE_TREE (*args_size);
1155 args_size->constant = 0;
1157 #ifdef PREFERRED_STACK_BOUNDARY
1158 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1159 args_size->var = round_up (args_size->var, STACK_BYTES);
1162 if (reg_parm_stack_space > 0)
1165 = size_binop (MAX_EXPR, args_size->var,
1166 size_int (reg_parm_stack_space));
1168 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1169 /* The area corresponding to register parameters is not to count in
1170 the size of the block we need. So make the adjustment. */
1172 = size_binop (MINUS_EXPR, args_size->var,
1173 size_int (reg_parm_stack_space));
1179 #ifdef PREFERRED_STACK_BOUNDARY
1180 args_size->constant = (((args_size->constant
1181 + pending_stack_adjust
1183 / STACK_BYTES * STACK_BYTES)
1184 - pending_stack_adjust);
1187 args_size->constant = MAX (args_size->constant,
1188 reg_parm_stack_space);
1190 #ifdef MAYBE_REG_PARM_STACK_SPACE
1191 if (reg_parm_stack_space == 0)
1192 args_size->constant = 0;
1195 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1196 args_size->constant -= reg_parm_stack_space;
1199 return unadjusted_args_size;
1202 /* Precompute parameters has needed for a function call.
1204 IS_CONST indicates the target function is a pure function.
1206 MUST_PREALLOCATE indicates that we must preallocate stack space for
1207 any stack arguments.
1209 NUM_ACTUALS is the number of arguments.
1211 ARGS is an array containing information for each argument; this routine
1212 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1214 ARGS_SIZE contains information about the size of the arg list. */
1217 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1219 int must_preallocate;
1221 struct arg_data *args;
1222 struct args_size *args_size;
1226 /* If this function call is cse'able, precompute all the parameters.
1227 Note that if the parameter is constructed into a temporary, this will
1228 cause an additional copy because the parameter will be constructed
1229 into a temporary location and then copied into the outgoing arguments.
1230 If a parameter contains a call to alloca and this function uses the
1231 stack, precompute the parameter. */
1233 /* If we preallocated the stack space, and some arguments must be passed
1234 on the stack, then we must precompute any parameter which contains a
1235 function call which will store arguments on the stack.
1236 Otherwise, evaluating the parameter may clobber previous parameters
1237 which have already been stored into the stack. */
1239 for (i = 0; i < num_actuals; i++)
1241 || ((args_size->var != 0 || args_size->constant != 0)
1242 && calls_function (args[i].tree_value, 1))
1243 || (must_preallocate
1244 && (args_size->var != 0 || args_size->constant != 0)
1245 && calls_function (args[i].tree_value, 0)))
1247 /* If this is an addressable type, we cannot pre-evaluate it. */
1248 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1253 args[i].initial_value = args[i].value
1254 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1256 preserve_temp_slots (args[i].value);
1259 /* ANSI doesn't require a sequence point here,
1260 but PCC has one, so this will avoid some problems. */
1263 args[i].initial_value = args[i].value
1264 = protect_from_queue (args[i].initial_value, 0);
1266 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1268 = convert_modes (args[i].mode,
1269 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1270 args[i].value, args[i].unsignedp);
1274 /* Given the current state of MUST_PREALLOCATE and information about
1275 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1276 compute and return the final value for MUST_PREALLOCATE. */
1279 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1280 int must_preallocate;
1282 struct arg_data *args;
1283 struct args_size *args_size;
1285 /* See if we have or want to preallocate stack space.
1287 If we would have to push a partially-in-regs parm
1288 before other stack parms, preallocate stack space instead.
1290 If the size of some parm is not a multiple of the required stack
1291 alignment, we must preallocate.
1293 If the total size of arguments that would otherwise create a copy in
1294 a temporary (such as a CALL) is more than half the total argument list
1295 size, preallocation is faster.
1297 Another reason to preallocate is if we have a machine (like the m88k)
1298 where stack alignment is required to be maintained between every
1299 pair of insns, not just when the call is made. However, we assume here
1300 that such machines either do not have push insns (and hence preallocation
1301 would occur anyway) or the problem is taken care of with
1304 if (! must_preallocate)
1306 int partial_seen = 0;
1307 int copy_to_evaluate_size = 0;
1310 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1312 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1314 else if (partial_seen && args[i].reg == 0)
1315 must_preallocate = 1;
1317 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1318 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1319 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1320 || TREE_CODE (args[i].tree_value) == COND_EXPR
1321 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1322 copy_to_evaluate_size
1323 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1326 if (copy_to_evaluate_size * 2 >= args_size->constant
1327 && args_size->constant > 0)
1328 must_preallocate = 1;
1330 return must_preallocate;
1333 /* If we preallocated stack space, compute the address of each argument
1334 and store it into the ARGS array.
1336 We need not ensure it is a valid memory address here; it will be
1337 validized when it is used.
1339 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1342 compute_argument_addresses (args, argblock, num_actuals)
1343 struct arg_data *args;
1349 rtx arg_reg = argblock;
1350 int i, arg_offset = 0;
1352 if (GET_CODE (argblock) == PLUS)
1353 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1355 for (i = 0; i < num_actuals; i++)
1357 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1358 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1361 /* Skip this parm if it will not be passed on the stack. */
1362 if (! args[i].pass_on_stack && args[i].reg != 0)
1365 if (GET_CODE (offset) == CONST_INT)
1366 addr = plus_constant (arg_reg, INTVAL (offset));
1368 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1370 addr = plus_constant (addr, arg_offset);
1371 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1374 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1376 if (GET_CODE (slot_offset) == CONST_INT)
1377 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1379 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1381 addr = plus_constant (addr, arg_offset);
1382 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1387 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1388 in a call instruction.
1390 FNDECL is the tree node for the target function. For an indirect call
1391 FNDECL will be NULL_TREE.
1393 EXP is the CALL_EXPR for this call. */
1396 rtx_for_function_call (fndecl, exp)
1402 /* Get the function to call, in the form of RTL. */
1405 /* If this is the first use of the function, see if we need to
1406 make an external definition for it. */
1407 if (! TREE_USED (fndecl))
1409 assemble_external (fndecl);
1410 TREE_USED (fndecl) = 1;
1413 /* Get a SYMBOL_REF rtx for the function address. */
1414 funexp = XEXP (DECL_RTL (fndecl), 0);
1417 /* Generate an rtx (probably a pseudo-register) for the address. */
1422 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1423 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1425 /* Check the function is executable. */
1426 if (current_function_check_memory_usage)
1428 #ifdef POINTERS_EXTEND_UNSIGNED
1429 /* It might be OK to convert funexp in place, but there's
1430 a lot going on between here and when it happens naturally
1431 that this seems safer. */
1432 funaddr = convert_memory_address (Pmode, funexp);
1434 emit_library_call (chkr_check_exec_libfunc, 1,
1443 /* Do the register loads required for any wholly-register parms or any
1444 parms which are passed both on the stack and in a register. Their
1445 expressions were already evaluated.
1447 Mark all register-parms as living through the call, putting these USE
1448 insns in the CALL_INSN_FUNCTION_USAGE field. */
1451 load_register_parameters (args, num_actuals, call_fusage)
1452 struct arg_data *args;
1458 #ifdef LOAD_ARGS_REVERSED
1459 for (i = num_actuals - 1; i >= 0; i--)
1461 for (i = 0; i < num_actuals; i++)
1464 rtx reg = args[i].reg;
1465 int partial = args[i].partial;
1470 /* Set to non-negative if must move a word at a time, even if just
1471 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1472 we just use a normal move insn. This value can be zero if the
1473 argument is a zero size structure with no fields. */
1474 nregs = (partial ? partial
1475 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1476 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1477 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1480 /* Handle calls that pass values in multiple non-contiguous
1481 locations. The Irix 6 ABI has examples of this. */
1483 if (GET_CODE (reg) == PARALLEL)
1485 emit_group_load (reg, args[i].value,
1486 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1487 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1491 /* If simple case, just do move. If normal partial, store_one_arg
1492 has already loaded the register for us. In all other cases,
1493 load the register(s) from memory. */
1495 else if (nregs == -1)
1496 emit_move_insn (reg, args[i].value);
1498 /* If we have pre-computed the values to put in the registers in
1499 the case of non-aligned structures, copy them in now. */
1501 else if (args[i].n_aligned_regs != 0)
1502 for (j = 0; j < args[i].n_aligned_regs; j++)
1503 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1504 args[i].aligned_regs[j]);
1506 else if (partial == 0 || args[i].pass_on_stack)
1507 move_block_to_reg (REGNO (reg),
1508 validize_mem (args[i].value), nregs,
1511 /* Handle calls that pass values in multiple non-contiguous
1512 locations. The Irix 6 ABI has examples of this. */
1513 if (GET_CODE (reg) == PARALLEL)
1514 use_group_regs (call_fusage, reg);
1515 else if (nregs == -1)
1516 use_reg (call_fusage, reg);
1518 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1523 /* Generate all the code for a function call
1524 and return an rtx for its value.
1525 Store the value in TARGET (specified as an rtx) if convenient.
1526 If the value is stored in TARGET then TARGET is returned.
1527 If IGNORE is nonzero, then we ignore the value of the function call. */
1530 expand_call (exp, target, ignore)
1535 /* List of actual parameters. */
1536 tree actparms = TREE_OPERAND (exp, 1);
1537 /* RTX for the function to be called. */
1539 /* Data type of the function. */
1541 /* Declaration of the function being called,
1542 or 0 if the function is computed (not known by name). */
1546 /* Register in which non-BLKmode value will be returned,
1547 or 0 if no value or if value is BLKmode. */
1549 /* Address where we should return a BLKmode value;
1550 0 if value not BLKmode. */
1551 rtx structure_value_addr = 0;
1552 /* Nonzero if that address is being passed by treating it as
1553 an extra, implicit first parameter. Otherwise,
1554 it is passed by being copied directly into struct_value_rtx. */
1555 int structure_value_addr_parm = 0;
1556 /* Size of aggregate value wanted, or zero if none wanted
1557 or if we are using the non-reentrant PCC calling convention
1558 or expecting the value in registers. */
1559 HOST_WIDE_INT struct_value_size = 0;
1560 /* Nonzero if called function returns an aggregate in memory PCC style,
1561 by returning the address of where to find it. */
1562 int pcc_struct_value = 0;
1564 /* Number of actual parameters in this call, including struct value addr. */
1566 /* Number of named args. Args after this are anonymous ones
1567 and they must all go on the stack. */
1570 /* Vector of information about each argument.
1571 Arguments are numbered in the order they will be pushed,
1572 not the order they are written. */
1573 struct arg_data *args;
1575 /* Total size in bytes of all the stack-parms scanned so far. */
1576 struct args_size args_size;
1577 /* Size of arguments before any adjustments (such as rounding). */
1578 int unadjusted_args_size;
1579 /* Data on reg parms scanned so far. */
1580 CUMULATIVE_ARGS args_so_far;
1581 /* Nonzero if a reg parm has been scanned. */
1583 /* Nonzero if this is an indirect function call. */
1585 /* Nonzero if we must avoid push-insns in the args for this call.
1586 If stack space is allocated for register parameters, but not by the
1587 caller, then it is preallocated in the fixed part of the stack frame.
1588 So the entire argument block must then be preallocated (i.e., we
1589 ignore PUSH_ROUNDING in that case). */
1591 #ifdef PUSH_ROUNDING
1592 int must_preallocate = 0;
1594 int must_preallocate = 1;
1597 /* Size of the stack reserved for parameter registers. */
1598 int reg_parm_stack_space = 0;
1600 /* Address of space preallocated for stack parms
1601 (on machines that lack push insns), or 0 if space not preallocated. */
1604 /* Nonzero if it is plausible that this is a call to alloca. */
1606 /* Nonzero if this is a call to malloc or a related function. */
1608 /* Nonzero if this is a call to setjmp or a related function. */
1610 /* Nonzero if this is a call to `longjmp'. */
1612 /* Nonzero if this is a call to an inline function. */
1613 int is_integrable = 0;
1614 /* Nonzero if this is a call to a `const' function.
1615 Note that only explicitly named functions are handled as `const' here. */
1617 /* Nonzero if this is a call to a `volatile' function. */
1618 int is_volatile = 0;
1619 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1620 /* Define the boundary of the register parm stack space that needs to be
1622 int low_to_save = -1, high_to_save;
1623 rtx save_area = 0; /* Place that it is saved */
1626 #ifdef ACCUMULATE_OUTGOING_ARGS
1627 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1628 char *initial_stack_usage_map = stack_usage_map;
1629 int old_stack_arg_under_construction;
1632 rtx old_stack_level = 0;
1633 int old_pending_adj = 0;
1634 int old_inhibit_defer_pop = inhibit_defer_pop;
1635 rtx call_fusage = 0;
1639 /* The value of the function call can be put in a hard register. But
1640 if -fcheck-memory-usage, code which invokes functions (and thus
1641 damages some hard registers) can be inserted before using the value.
1642 So, target is always a pseudo-register in that case. */
1643 if (current_function_check_memory_usage)
1646 /* See if we can find a DECL-node for the actual function.
1647 As a result, decide whether this is a call to an integrable function. */
1649 p = TREE_OPERAND (exp, 0);
1650 if (TREE_CODE (p) == ADDR_EXPR)
1652 fndecl = TREE_OPERAND (p, 0);
1653 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1658 && fndecl != current_function_decl
1659 && DECL_INLINE (fndecl)
1660 && DECL_SAVED_INSNS (fndecl)
1661 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
1663 else if (! TREE_ADDRESSABLE (fndecl))
1665 /* In case this function later becomes inlinable,
1666 record that there was already a non-inline call to it.
1668 Use abstraction instead of setting TREE_ADDRESSABLE
1670 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1673 warning_with_decl (fndecl, "can't inline call to `%s'");
1674 warning ("called from here");
1676 mark_addressable (fndecl);
1679 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1680 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1683 if (TREE_THIS_VOLATILE (fndecl))
1688 /* If we don't have specific function to call, see if we have a
1689 constant or `noreturn' function from the type. */
1692 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1693 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1696 #ifdef REG_PARM_STACK_SPACE
1697 #ifdef MAYBE_REG_PARM_STACK_SPACE
1698 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1700 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1704 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1705 if (reg_parm_stack_space > 0)
1706 must_preallocate = 1;
1709 /* Warn if this value is an aggregate type,
1710 regardless of which calling convention we are using for it. */
1711 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1712 warning ("function call has aggregate value");
1714 /* Set up a place to return a structure. */
1716 /* Cater to broken compilers. */
1717 if (aggregate_value_p (exp))
1719 /* This call returns a big structure. */
1722 #ifdef PCC_STATIC_STRUCT_RETURN
1724 pcc_struct_value = 1;
1725 /* Easier than making that case work right. */
1728 /* In case this is a static function, note that it has been
1730 if (! TREE_ADDRESSABLE (fndecl))
1731 mark_addressable (fndecl);
1735 #else /* not PCC_STATIC_STRUCT_RETURN */
1737 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1739 if (target && GET_CODE (target) == MEM)
1740 structure_value_addr = XEXP (target, 0);
1743 /* Assign a temporary to hold the value. */
1746 /* For variable-sized objects, we must be called with a target
1747 specified. If we were to allocate space on the stack here,
1748 we would have no way of knowing when to free it. */
1750 if (struct_value_size < 0)
1753 /* This DECL is just something to feed to mark_addressable;
1754 it doesn't get pushed. */
1755 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1756 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 5, 0, 1);
1757 mark_addressable (d);
1758 structure_value_addr = XEXP (DECL_RTL (d), 0);
1763 #endif /* not PCC_STATIC_STRUCT_RETURN */
1766 /* If called function is inline, try to integrate it. */
1771 #ifdef ACCUMULATE_OUTGOING_ARGS
1772 rtx before_call = get_last_insn ();
1775 temp = expand_inline_function (fndecl, actparms, target,
1776 ignore, TREE_TYPE (exp),
1777 structure_value_addr);
1779 /* If inlining succeeded, return. */
1780 if (temp != (rtx) (HOST_WIDE_INT) -1)
1782 #ifdef ACCUMULATE_OUTGOING_ARGS
1783 /* If the outgoing argument list must be preserved, push
1784 the stack before executing the inlined function if it
1787 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1788 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1791 if (stack_arg_under_construction || i >= 0)
1794 = before_call ? NEXT_INSN (before_call) : get_insns ();
1797 /* Look for a call in the inline function code.
1798 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
1799 nonzero then there is a call and it is not necessary
1800 to scan the insns. */
1802 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
1803 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1804 if (GET_CODE (insn) == CALL_INSN)
1809 /* Reserve enough stack space so that the largest
1810 argument list of any function call in the inline
1811 function does not overlap the argument list being
1812 evaluated. This is usually an overestimate because
1813 allocate_dynamic_stack_space reserves space for an
1814 outgoing argument list in addition to the requested
1815 space, but there is no way to ask for stack space such
1816 that an argument list of a certain length can be
1819 Add the stack space reserved for register arguments, if
1820 any, in the inline function. What is really needed is the
1821 largest value of reg_parm_stack_space in the inline
1822 function, but that is not available. Using the current
1823 value of reg_parm_stack_space is wrong, but gives
1824 correct results on all supported machines. */
1826 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
1827 + reg_parm_stack_space);
1830 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1831 allocate_dynamic_stack_space (GEN_INT (adjust),
1832 NULL_RTX, BITS_PER_UNIT);
1835 emit_insns_before (seq, first_insn);
1836 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1841 /* If the result is equivalent to TARGET, return TARGET to simplify
1842 checks in store_expr. They can be equivalent but not equal in the
1843 case of a function that returns BLKmode. */
1844 if (temp != target && rtx_equal_p (temp, target))
1849 /* If inlining failed, mark FNDECL as needing to be compiled
1850 separately after all. If function was declared inline,
1852 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1853 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1855 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1856 warning ("called from here");
1858 mark_addressable (fndecl);
1861 function_call_count++;
1863 if (fndecl && DECL_NAME (fndecl))
1864 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1866 /* See if this is a call to a function that can return more than once
1867 or a call to longjmp or malloc. */
1868 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1869 &is_malloc, &may_be_alloca);
1872 current_function_calls_alloca = 1;
1874 /* Operand 0 is a pointer-to-function; get the type of the function. */
1875 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1876 if (! POINTER_TYPE_P (funtype))
1878 funtype = TREE_TYPE (funtype);
1880 /* When calling a const function, we must pop the stack args right away,
1881 so that the pop is deleted or moved with the call. */
1885 /* Don't let pending stack adjusts add up to too much.
1886 Also, do all pending adjustments now
1887 if there is any chance this might be a call to alloca. */
1889 if (pending_stack_adjust >= 32
1890 || (pending_stack_adjust > 0 && may_be_alloca))
1891 do_pending_stack_adjust ();
1893 /* Push the temporary stack slot level so that we can free any temporaries
1897 /* Start updating where the next arg would go.
1899 On some machines (such as the PA) indirect calls have a different
1900 calling convention than normal calls. The last argument in
1901 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1903 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1905 /* If struct_value_rtx is 0, it means pass the address
1906 as if it were an extra parameter. */
1907 if (structure_value_addr && struct_value_rtx == 0)
1909 /* If structure_value_addr is a REG other than
1910 virtual_outgoing_args_rtx, we can use always use it. If it
1911 is not a REG, we must always copy it into a register.
1912 If it is virtual_outgoing_args_rtx, we must copy it to another
1913 register in some cases. */
1914 rtx temp = (GET_CODE (structure_value_addr) != REG
1915 #ifdef ACCUMULATE_OUTGOING_ARGS
1916 || (stack_arg_under_construction
1917 && structure_value_addr == virtual_outgoing_args_rtx)
1919 ? copy_addr_to_reg (structure_value_addr)
1920 : structure_value_addr);
1923 = tree_cons (error_mark_node,
1924 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1927 structure_value_addr_parm = 1;
1930 /* Count the arguments and set NUM_ACTUALS. */
1931 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1934 /* Compute number of named args.
1935 Normally, don't include the last named arg if anonymous args follow.
1936 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1937 (If no anonymous args follow, the result of list_length is actually
1938 one too large. This is harmless.)
1940 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1941 zero, this machine will be able to place unnamed args that were passed in
1942 registers into the stack. So treat all args as named. This allows the
1943 insns emitting for a specific argument list to be independent of the
1944 function declaration.
1946 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1947 way to pass unnamed args in registers, so we must force them into
1950 if ((STRICT_ARGUMENT_NAMING
1951 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1952 && TYPE_ARG_TYPES (funtype) != 0)
1954 = (list_length (TYPE_ARG_TYPES (funtype))
1955 /* Don't include the last named arg. */
1956 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1957 /* Count the struct value address, if it is passed as a parm. */
1958 + structure_value_addr_parm);
1960 /* If we know nothing, treat all args as named. */
1961 n_named_args = num_actuals;
1963 /* Make a vector to hold all the information about each arg. */
1964 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1965 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1967 /* Build up entries inthe ARGS array, compute the size of the arguments
1968 into ARGS_SIZE, etc. */
1969 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
1970 actparms, fndecl, &args_so_far,
1971 reg_parm_stack_space, &old_stack_level,
1972 &old_pending_adj, &must_preallocate,
1975 #ifdef FINAL_REG_PARM_STACK_SPACE
1976 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1982 /* If this function requires a variable-sized argument list, don't try to
1983 make a cse'able block for this call. We may be able to do this
1984 eventually, but it is too complicated to keep track of what insns go
1985 in the cse'able block and which don't. */
1988 must_preallocate = 1;
1991 /* Compute the actual size of the argument block required. The variable
1992 and constant sizes must be combined, the size may have to be rounded,
1993 and there may be a minimum required size. */
1994 unadjusted_args_size
1995 = compute_argument_block_size (reg_parm_stack_space, &args_size);
1997 /* Now make final decision about preallocating stack space. */
1998 must_preallocate = finalize_must_preallocate (must_preallocate,
1999 num_actuals, args, &args_size);
2001 /* If the structure value address will reference the stack pointer, we must
2002 stabilize it. We don't need to do this if we know that we are not going
2003 to adjust the stack pointer in processing this call. */
2005 if (structure_value_addr
2006 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2007 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2009 #ifndef ACCUMULATE_OUTGOING_ARGS
2010 || args_size.constant
2013 structure_value_addr = copy_to_reg (structure_value_addr);
2015 /* Precompute any arguments as needed. */
2016 precompute_arguments (is_const, must_preallocate, num_actuals,
2019 /* Now we are about to start emitting insns that can be deleted
2020 if a libcall is deleted. */
2021 if (is_const || is_malloc)
2024 /* If we have no actual push instructions, or shouldn't use them,
2025 make space for all args right now. */
2027 if (args_size.var != 0)
2029 if (old_stack_level == 0)
2031 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2032 old_pending_adj = pending_stack_adjust;
2033 pending_stack_adjust = 0;
2034 #ifdef ACCUMULATE_OUTGOING_ARGS
2035 /* stack_arg_under_construction says whether a stack arg is
2036 being constructed at the old stack level. Pushing the stack
2037 gets a clean outgoing argument block. */
2038 old_stack_arg_under_construction = stack_arg_under_construction;
2039 stack_arg_under_construction = 0;
2042 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2046 /* Note that we must go through the motions of allocating an argument
2047 block even if the size is zero because we may be storing args
2048 in the area reserved for register arguments, which may be part of
2051 int needed = args_size.constant;
2053 /* Store the maximum argument space used. It will be pushed by
2054 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2057 if (needed > current_function_outgoing_args_size)
2058 current_function_outgoing_args_size = needed;
2060 if (must_preallocate)
2062 #ifdef ACCUMULATE_OUTGOING_ARGS
2063 /* Since the stack pointer will never be pushed, it is possible for
2064 the evaluation of a parm to clobber something we have already
2065 written to the stack. Since most function calls on RISC machines
2066 do not use the stack, this is uncommon, but must work correctly.
2068 Therefore, we save any area of the stack that was already written
2069 and that we are using. Here we set up to do this by making a new
2070 stack usage map from the old one. The actual save will be done
2073 Another approach might be to try to reorder the argument
2074 evaluations to avoid this conflicting stack usage. */
2076 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2077 /* Since we will be writing into the entire argument area, the
2078 map must be allocated for its entire size, not just the part that
2079 is the responsibility of the caller. */
2080 needed += reg_parm_stack_space;
2083 #ifdef ARGS_GROW_DOWNWARD
2084 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2087 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2090 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2092 if (initial_highest_arg_in_use)
2093 bcopy (initial_stack_usage_map, stack_usage_map,
2094 initial_highest_arg_in_use);
2096 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2097 bzero (&stack_usage_map[initial_highest_arg_in_use],
2098 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2101 /* The address of the outgoing argument list must not be copied to a
2102 register here, because argblock would be left pointing to the
2103 wrong place after the call to allocate_dynamic_stack_space below.
2106 argblock = virtual_outgoing_args_rtx;
2108 #else /* not ACCUMULATE_OUTGOING_ARGS */
2109 if (inhibit_defer_pop == 0)
2111 /* Try to reuse some or all of the pending_stack_adjust
2112 to get this space. Maybe we can avoid any pushing. */
2113 if (needed > pending_stack_adjust)
2115 needed -= pending_stack_adjust;
2116 pending_stack_adjust = 0;
2120 pending_stack_adjust -= needed;
2124 /* Special case this because overhead of `push_block' in this
2125 case is non-trivial. */
2127 argblock = virtual_outgoing_args_rtx;
2129 argblock = push_block (GEN_INT (needed), 0, 0);
2131 /* We only really need to call `copy_to_reg' in the case where push
2132 insns are going to be used to pass ARGBLOCK to a function
2133 call in ARGS. In that case, the stack pointer changes value
2134 from the allocation point to the call point, and hence
2135 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2136 But might as well always do it. */
2137 argblock = copy_to_reg (argblock);
2138 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2142 #ifdef ACCUMULATE_OUTGOING_ARGS
2143 /* The save/restore code in store_one_arg handles all cases except one:
2144 a constructor call (including a C function returning a BLKmode struct)
2145 to initialize an argument. */
2146 if (stack_arg_under_construction)
2148 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2149 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2151 rtx push_size = GEN_INT (args_size.constant);
2153 if (old_stack_level == 0)
2155 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2156 old_pending_adj = pending_stack_adjust;
2157 pending_stack_adjust = 0;
2158 /* stack_arg_under_construction says whether a stack arg is
2159 being constructed at the old stack level. Pushing the stack
2160 gets a clean outgoing argument block. */
2161 old_stack_arg_under_construction = stack_arg_under_construction;
2162 stack_arg_under_construction = 0;
2163 /* Make a new map for the new argument list. */
2164 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2165 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2166 highest_outgoing_arg_in_use = 0;
2168 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2170 /* If argument evaluation might modify the stack pointer, copy the
2171 address of the argument list to a register. */
2172 for (i = 0; i < num_actuals; i++)
2173 if (args[i].pass_on_stack)
2175 argblock = copy_addr_to_reg (argblock);
2180 compute_argument_addresses (args, argblock, num_actuals);
2182 #ifdef PUSH_ARGS_REVERSED
2183 #ifdef PREFERRED_STACK_BOUNDARY
2184 /* If we push args individually in reverse order, perform stack alignment
2185 before the first push (the last arg). */
2187 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2191 /* Don't try to defer pops if preallocating, not even from the first arg,
2192 since ARGBLOCK probably refers to the SP. */
2196 funexp = rtx_for_function_call (fndecl, exp);
2198 /* Figure out the register where the value, if any, will come back. */
2200 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2201 && ! structure_value_addr)
2203 if (pcc_struct_value)
2204 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2207 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
2210 /* Precompute all register parameters. It isn't safe to compute anything
2211 once we have started filling any specific hard regs. */
2212 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2214 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2216 /* Save the fixed argument area if it's part of the caller's frame and
2217 is clobbered by argument setup for this call. */
2218 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2219 &low_to_save, &high_to_save);
2223 /* Now store (and compute if necessary) all non-register parms.
2224 These come before register parms, since they can require block-moves,
2225 which could clobber the registers used for register parms.
2226 Parms which have partial registers are not stored here,
2227 but we do preallocate space here if they want that. */
2229 for (i = 0; i < num_actuals; i++)
2230 if (args[i].reg == 0 || args[i].pass_on_stack)
2231 store_one_arg (&args[i], argblock, may_be_alloca,
2232 args_size.var != 0, reg_parm_stack_space);
2234 /* If we have a parm that is passed in registers but not in memory
2235 and whose alignment does not permit a direct copy into registers,
2236 make a group of pseudos that correspond to each register that we
2238 if (STRICT_ALIGNMENT)
2239 store_unaligned_arguments_into_pseudos (args, num_actuals);
2241 /* Now store any partially-in-registers parm.
2242 This is the last place a block-move can happen. */
2244 for (i = 0; i < num_actuals; i++)
2245 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2246 store_one_arg (&args[i], argblock, may_be_alloca,
2247 args_size.var != 0, reg_parm_stack_space);
2249 #ifndef PUSH_ARGS_REVERSED
2250 #ifdef PREFERRED_STACK_BOUNDARY
2251 /* If we pushed args in forward order, perform stack alignment
2252 after pushing the last arg. */
2254 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2258 /* If register arguments require space on the stack and stack space
2259 was not preallocated, allocate stack space here for arguments
2260 passed in registers. */
2261 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2262 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2263 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2266 /* Pass the function the address in which to return a structure value. */
2267 if (structure_value_addr && ! structure_value_addr_parm)
2269 emit_move_insn (struct_value_rtx,
2271 force_operand (structure_value_addr,
2274 /* Mark the memory for the aggregate as write-only. */
2275 if (current_function_check_memory_usage)
2276 emit_library_call (chkr_set_right_libfunc, 1,
2278 structure_value_addr, Pmode,
2279 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2280 GEN_INT (MEMORY_USE_WO),
2281 TYPE_MODE (integer_type_node));
2283 if (GET_CODE (struct_value_rtx) == REG)
2284 use_reg (&call_fusage, struct_value_rtx);
2287 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2289 load_register_parameters (args, num_actuals, &call_fusage);
2291 /* Perform postincrements before actually calling the function. */
2294 /* All arguments and registers used for the call must be set up by now! */
2296 if (warn_arglist_size_flag)
2297 if (unadjusted_args_size > warn_arglist_size)
2299 warning ("%d byte arglist in function call", unadjusted_args_size);
2300 warning ("exceeds user specified limit (%d bytes)",
2304 /* Generate the actual call instruction. */
2305 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2306 args_size.constant, struct_value_size,
2307 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2308 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2310 /* If call is cse'able, make appropriate pair of reg-notes around it.
2311 Test valreg so we don't crash; may safely ignore `const'
2312 if return type is void. Disable for PARALLEL return values, because
2313 we have no way to move such values into a pseudo register. */
2314 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2317 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2320 /* Mark the return value as a pointer if needed. */
2321 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2323 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2324 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2327 /* Construct an "equal form" for the value which mentions all the
2328 arguments in order as well as the function name. */
2329 #ifdef PUSH_ARGS_REVERSED
2330 for (i = 0; i < num_actuals; i++)
2331 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2333 for (i = num_actuals - 1; i >= 0; i--)
2334 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2336 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2338 insns = get_insns ();
2341 emit_libcall_block (insns, temp, valreg, note);
2347 /* Otherwise, just write out the sequence without a note. */
2348 rtx insns = get_insns ();
2355 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2358 /* The return value from a malloc-like function is a pointer. */
2359 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2360 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2362 emit_move_insn (temp, valreg);
2364 /* The return value from a malloc-like function can not alias
2366 last = get_last_insn ();
2368 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2370 /* Write out the sequence. */
2371 insns = get_insns ();
2377 /* For calls to `setjmp', etc., inform flow.c it should complain
2378 if nonvolatile values are live. */
2382 emit_note (name, NOTE_INSN_SETJMP);
2383 current_function_calls_setjmp = 1;
2387 current_function_calls_longjmp = 1;
2389 /* Notice functions that cannot return.
2390 If optimizing, insns emitted below will be dead.
2391 If not optimizing, they will exist, which is useful
2392 if the user uses the `return' command in the debugger. */
2394 if (is_volatile || is_longjmp)
2397 /* If value type not void, return an rtx for the value. */
2399 /* If there are cleanups to be called, don't use a hard reg as target.
2400 We need to double check this and see if it matters anymore. */
2401 if (any_pending_cleanups (1)
2402 && target && REG_P (target)
2403 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2406 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2409 target = const0_rtx;
2411 else if (structure_value_addr)
2413 if (target == 0 || GET_CODE (target) != MEM)
2415 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2416 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2417 structure_value_addr));
2418 MEM_SET_IN_STRUCT_P (target,
2419 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2422 else if (pcc_struct_value)
2424 /* This is the special C++ case where we need to
2425 know what the true target was. We take care to
2426 never use this value more than once in one expression. */
2427 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2428 copy_to_reg (valreg));
2429 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2431 /* Handle calls that return values in multiple non-contiguous locations.
2432 The Irix 6 ABI has examples of this. */
2433 else if (GET_CODE (valreg) == PARALLEL)
2435 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2439 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2440 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2441 preserve_temp_slots (target);
2444 emit_group_store (target, valreg, bytes,
2445 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2447 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2448 && GET_MODE (target) == GET_MODE (valreg))
2449 /* TARGET and VALREG cannot be equal at this point because the latter
2450 would not have REG_FUNCTION_VALUE_P true, while the former would if
2451 it were referring to the same register.
2453 If they refer to the same register, this move will be a no-op, except
2454 when function inlining is being done. */
2455 emit_move_insn (target, valreg);
2456 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2457 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2459 target = copy_to_reg (valreg);
2461 #ifdef PROMOTE_FUNCTION_RETURN
2462 /* If we promoted this return value, make the proper SUBREG. TARGET
2463 might be const0_rtx here, so be careful. */
2464 if (GET_CODE (target) == REG
2465 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2466 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2468 tree type = TREE_TYPE (exp);
2469 int unsignedp = TREE_UNSIGNED (type);
2471 /* If we don't promote as expected, something is wrong. */
2472 if (GET_MODE (target)
2473 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2476 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2477 SUBREG_PROMOTED_VAR_P (target) = 1;
2478 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2482 /* If size of args is variable or this was a constructor call for a stack
2483 argument, restore saved stack-pointer value. */
2485 if (old_stack_level)
2487 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2488 pending_stack_adjust = old_pending_adj;
2489 #ifdef ACCUMULATE_OUTGOING_ARGS
2490 stack_arg_under_construction = old_stack_arg_under_construction;
2491 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2492 stack_usage_map = initial_stack_usage_map;
2495 #ifdef ACCUMULATE_OUTGOING_ARGS
2498 #ifdef REG_PARM_STACK_SPACE
2500 restore_fixed_argument_area (save_area, argblock,
2501 high_to_save, low_to_save);
2504 /* If we saved any argument areas, restore them. */
2505 for (i = 0; i < num_actuals; i++)
2506 if (args[i].save_area)
2508 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2510 = gen_rtx_MEM (save_mode,
2511 memory_address (save_mode,
2512 XEXP (args[i].stack_slot, 0)));
2514 if (save_mode != BLKmode)
2515 emit_move_insn (stack_area, args[i].save_area);
2517 emit_block_move (stack_area, validize_mem (args[i].save_area),
2518 GEN_INT (args[i].size.constant),
2519 PARM_BOUNDARY / BITS_PER_UNIT);
2522 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2523 stack_usage_map = initial_stack_usage_map;
2527 /* If this was alloca, record the new stack level for nonlocal gotos.
2528 Check for the handler slots since we might not have a save area
2529 for non-local gotos. */
2531 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2532 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2536 /* Free up storage we no longer need. */
2537 for (i = 0; i < num_actuals; ++i)
2538 if (args[i].aligned_regs)
2539 free (args[i].aligned_regs);
2544 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2545 (emitting the queue unless NO_QUEUE is nonzero),
2546 for a value of mode OUTMODE,
2547 with NARGS different arguments, passed as alternating rtx values
2548 and machine_modes to convert them to.
2549 The rtx values should have been passed through protect_from_queue already.
2551 NO_QUEUE will be true if and only if the library call is a `const' call
2552 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2553 to the variable is_const in expand_call.
2555 NO_QUEUE must be true for const calls, because if it isn't, then
2556 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2557 and will be lost if the libcall sequence is optimized away.
2559 NO_QUEUE must be false for non-const calls, because if it isn't, the
2560 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2561 optimized. For instance, the instruction scheduler may incorrectly
2562 move memory references across the non-const call. */
2565 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2568 #ifndef ANSI_PROTOTYPES
2571 enum machine_mode outmode;
2575 /* Total size in bytes of all the stack-parms scanned so far. */
2576 struct args_size args_size;
2577 /* Size of arguments before any adjustments (such as rounding). */
2578 struct args_size original_args_size;
2579 register int argnum;
2584 CUMULATIVE_ARGS args_so_far;
2585 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2586 struct args_size offset; struct args_size size; rtx save_area; };
2588 int old_inhibit_defer_pop = inhibit_defer_pop;
2589 rtx call_fusage = 0;
2590 int reg_parm_stack_space = 0;
2591 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2592 /* Define the boundary of the register parm stack space that needs to be
2594 int low_to_save = -1, high_to_save;
2595 rtx save_area = 0; /* Place that it is saved */
2598 #ifdef ACCUMULATE_OUTGOING_ARGS
2599 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2600 char *initial_stack_usage_map = stack_usage_map;
2604 #ifdef REG_PARM_STACK_SPACE
2605 /* Size of the stack reserved for parameter registers. */
2606 #ifdef MAYBE_REG_PARM_STACK_SPACE
2607 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2609 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2613 VA_START (p, nargs);
2615 #ifndef ANSI_PROTOTYPES
2616 orgfun = va_arg (p, rtx);
2617 no_queue = va_arg (p, int);
2618 outmode = va_arg (p, enum machine_mode);
2619 nargs = va_arg (p, int);
2624 /* Copy all the libcall-arguments out of the varargs data
2625 and into a vector ARGVEC.
2627 Compute how to pass each argument. We only support a very small subset
2628 of the full argument passing conventions to limit complexity here since
2629 library functions shouldn't have many args. */
2631 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2632 bzero ((char *) argvec, nargs * sizeof (struct arg));
2635 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2637 args_size.constant = 0;
2642 for (count = 0; count < nargs; count++)
2644 rtx val = va_arg (p, rtx);
2645 enum machine_mode mode = va_arg (p, enum machine_mode);
2647 /* We cannot convert the arg value to the mode the library wants here;
2648 must do it earlier where we know the signedness of the arg. */
2650 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2653 /* On some machines, there's no way to pass a float to a library fcn.
2654 Pass it as a double instead. */
2655 #ifdef LIBGCC_NEEDS_DOUBLE
2656 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2657 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2660 /* There's no need to call protect_from_queue, because
2661 either emit_move_insn or emit_push_insn will do that. */
2663 /* Make sure it is a reasonable operand for a move or push insn. */
2664 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2665 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2666 val = force_operand (val, NULL_RTX);
2668 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2669 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2671 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2672 be viewed as just an efficiency improvement. */
2673 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2674 emit_move_insn (slot, val);
2675 val = force_operand (XEXP (slot, 0), NULL_RTX);
2680 argvec[count].value = val;
2681 argvec[count].mode = mode;
2683 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2684 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2686 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2687 argvec[count].partial
2688 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2690 argvec[count].partial = 0;
2693 locate_and_pad_parm (mode, NULL_TREE,
2694 argvec[count].reg && argvec[count].partial == 0,
2695 NULL_TREE, &args_size, &argvec[count].offset,
2696 &argvec[count].size);
2698 if (argvec[count].size.var)
2701 if (reg_parm_stack_space == 0 && argvec[count].partial)
2702 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2704 if (argvec[count].reg == 0 || argvec[count].partial != 0
2705 || reg_parm_stack_space > 0)
2706 args_size.constant += argvec[count].size.constant;
2708 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2712 #ifdef FINAL_REG_PARM_STACK_SPACE
2713 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2717 /* If this machine requires an external definition for library
2718 functions, write one out. */
2719 assemble_external_libcall (fun);
2721 original_args_size = args_size;
2722 #ifdef PREFERRED_STACK_BOUNDARY
2723 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2724 / STACK_BYTES) * STACK_BYTES);
2727 args_size.constant = MAX (args_size.constant,
2728 reg_parm_stack_space);
2730 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2731 args_size.constant -= reg_parm_stack_space;
2734 if (args_size.constant > current_function_outgoing_args_size)
2735 current_function_outgoing_args_size = args_size.constant;
2737 #ifdef ACCUMULATE_OUTGOING_ARGS
2738 /* Since the stack pointer will never be pushed, it is possible for
2739 the evaluation of a parm to clobber something we have already
2740 written to the stack. Since most function calls on RISC machines
2741 do not use the stack, this is uncommon, but must work correctly.
2743 Therefore, we save any area of the stack that was already written
2744 and that we are using. Here we set up to do this by making a new
2745 stack usage map from the old one.
2747 Another approach might be to try to reorder the argument
2748 evaluations to avoid this conflicting stack usage. */
2750 needed = args_size.constant;
2752 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2753 /* Since we will be writing into the entire argument area, the
2754 map must be allocated for its entire size, not just the part that
2755 is the responsibility of the caller. */
2756 needed += reg_parm_stack_space;
2759 #ifdef ARGS_GROW_DOWNWARD
2760 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2763 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2766 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2768 if (initial_highest_arg_in_use)
2769 bcopy (initial_stack_usage_map, stack_usage_map,
2770 initial_highest_arg_in_use);
2772 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2773 bzero (&stack_usage_map[initial_highest_arg_in_use],
2774 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2777 /* The address of the outgoing argument list must not be copied to a
2778 register here, because argblock would be left pointing to the
2779 wrong place after the call to allocate_dynamic_stack_space below.
2782 argblock = virtual_outgoing_args_rtx;
2783 #else /* not ACCUMULATE_OUTGOING_ARGS */
2784 #ifndef PUSH_ROUNDING
2785 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2789 #ifdef PUSH_ARGS_REVERSED
2790 #ifdef PREFERRED_STACK_BOUNDARY
2791 /* If we push args individually in reverse order, perform stack alignment
2792 before the first push (the last arg). */
2794 anti_adjust_stack (GEN_INT (args_size.constant
2795 - original_args_size.constant));
2799 #ifdef PUSH_ARGS_REVERSED
2807 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2808 /* The argument list is the property of the called routine and it
2809 may clobber it. If the fixed area has been used for previous
2810 parameters, we must save and restore it.
2812 Here we compute the boundary of the that needs to be saved, if any. */
2814 #ifdef ARGS_GROW_DOWNWARD
2815 for (count = 0; count < reg_parm_stack_space + 1; count++)
2817 for (count = 0; count < reg_parm_stack_space; count++)
2820 if (count >= highest_outgoing_arg_in_use
2821 || stack_usage_map[count] == 0)
2824 if (low_to_save == -1)
2825 low_to_save = count;
2827 high_to_save = count;
2830 if (low_to_save >= 0)
2832 int num_to_save = high_to_save - low_to_save + 1;
2833 enum machine_mode save_mode
2834 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2837 /* If we don't have the required alignment, must do this in BLKmode. */
2838 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2839 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2840 save_mode = BLKmode;
2842 #ifdef ARGS_GROW_DOWNWARD
2843 stack_area = gen_rtx_MEM (save_mode,
2844 memory_address (save_mode,
2845 plus_constant (argblock,
2848 stack_area = gen_rtx_MEM (save_mode,
2849 memory_address (save_mode,
2850 plus_constant (argblock,
2853 if (save_mode == BLKmode)
2855 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2856 emit_block_move (validize_mem (save_area), stack_area,
2857 GEN_INT (num_to_save),
2858 PARM_BOUNDARY / BITS_PER_UNIT);
2862 save_area = gen_reg_rtx (save_mode);
2863 emit_move_insn (save_area, stack_area);
2868 /* Push the args that need to be pushed. */
2870 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2871 are to be pushed. */
2872 for (count = 0; count < nargs; count++, argnum += inc)
2874 register enum machine_mode mode = argvec[argnum].mode;
2875 register rtx val = argvec[argnum].value;
2876 rtx reg = argvec[argnum].reg;
2877 int partial = argvec[argnum].partial;
2878 #ifdef ACCUMULATE_OUTGOING_ARGS
2879 int lower_bound, upper_bound, i;
2882 if (! (reg != 0 && partial == 0))
2884 #ifdef ACCUMULATE_OUTGOING_ARGS
2885 /* If this is being stored into a pre-allocated, fixed-size, stack
2886 area, save any previous data at that location. */
2888 #ifdef ARGS_GROW_DOWNWARD
2889 /* stack_slot is negative, but we want to index stack_usage_map
2890 with positive values. */
2891 upper_bound = -argvec[argnum].offset.constant + 1;
2892 lower_bound = upper_bound - argvec[argnum].size.constant;
2894 lower_bound = argvec[argnum].offset.constant;
2895 upper_bound = lower_bound + argvec[argnum].size.constant;
2898 for (i = lower_bound; i < upper_bound; i++)
2899 if (stack_usage_map[i]
2900 /* Don't store things in the fixed argument area at this point;
2901 it has already been saved. */
2902 && i > reg_parm_stack_space)
2905 if (i != upper_bound)
2907 /* We need to make a save area. See what mode we can make it. */
2908 enum machine_mode save_mode
2909 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2912 = gen_rtx_MEM (save_mode,
2913 memory_address (save_mode,
2914 plus_constant (argblock, argvec[argnum].offset.constant)));
2915 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2916 emit_move_insn (argvec[argnum].save_area, stack_area);
2919 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2920 argblock, GEN_INT (argvec[argnum].offset.constant),
2921 reg_parm_stack_space);
2923 #ifdef ACCUMULATE_OUTGOING_ARGS
2924 /* Now mark the segment we just used. */
2925 for (i = lower_bound; i < upper_bound; i++)
2926 stack_usage_map[i] = 1;
2933 #ifndef PUSH_ARGS_REVERSED
2934 #ifdef PREFERRED_STACK_BOUNDARY
2935 /* If we pushed args in forward order, perform stack alignment
2936 after pushing the last arg. */
2938 anti_adjust_stack (GEN_INT (args_size.constant
2939 - original_args_size.constant));
2943 #ifdef PUSH_ARGS_REVERSED
2949 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2951 /* Now load any reg parms into their regs. */
2953 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2954 are to be pushed. */
2955 for (count = 0; count < nargs; count++, argnum += inc)
2957 register rtx val = argvec[argnum].value;
2958 rtx reg = argvec[argnum].reg;
2959 int partial = argvec[argnum].partial;
2961 if (reg != 0 && partial == 0)
2962 emit_move_insn (reg, val);
2966 /* For version 1.37, try deleting this entirely. */
2970 /* Any regs containing parms remain in use through the call. */
2971 for (count = 0; count < nargs; count++)
2972 if (argvec[count].reg != 0)
2973 use_reg (&call_fusage, argvec[count].reg);
2975 /* Don't allow popping to be deferred, since then
2976 cse'ing of library calls could delete a call and leave the pop. */
2979 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2980 will set inhibit_defer_pop to that value. */
2982 /* The return type is needed to decide how many bytes the function pops.
2983 Signedness plays no role in that, so for simplicity, we pretend it's
2984 always signed. We also assume that the list of arguments passed has
2985 no impact, so we pretend it is unknown. */
2988 get_identifier (XSTR (orgfun, 0)),
2989 build_function_type (outmode == VOIDmode ? void_type_node
2990 : type_for_mode (outmode, 0), NULL_TREE),
2991 original_args_size.constant, args_size.constant, 0,
2992 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2993 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2994 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2998 /* Now restore inhibit_defer_pop to its actual original value. */
3001 #ifdef ACCUMULATE_OUTGOING_ARGS
3002 #ifdef REG_PARM_STACK_SPACE
3005 enum machine_mode save_mode = GET_MODE (save_area);
3006 #ifdef ARGS_GROW_DOWNWARD
3008 = gen_rtx_MEM (save_mode,
3009 memory_address (save_mode,
3010 plus_constant (argblock,
3014 = gen_rtx_MEM (save_mode,
3015 memory_address (save_mode,
3016 plus_constant (argblock, low_to_save)));
3019 if (save_mode != BLKmode)
3020 emit_move_insn (stack_area, save_area);
3022 emit_block_move (stack_area, validize_mem (save_area),
3023 GEN_INT (high_to_save - low_to_save + 1),
3024 PARM_BOUNDARY / BITS_PER_UNIT);
3028 /* If we saved any argument areas, restore them. */
3029 for (count = 0; count < nargs; count++)
3030 if (argvec[count].save_area)
3032 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3034 = gen_rtx_MEM (save_mode,
3035 memory_address (save_mode,
3036 plus_constant (argblock, argvec[count].offset.constant)));
3038 emit_move_insn (stack_area, argvec[count].save_area);
3041 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3042 stack_usage_map = initial_stack_usage_map;
3046 /* Like emit_library_call except that an extra argument, VALUE,
3047 comes second and says where to store the result.
3048 (If VALUE is zero, this function chooses a convenient way
3049 to return the value.
3051 This function returns an rtx for where the value is to be found.
3052 If VALUE is nonzero, VALUE is returned. */
3055 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3056 enum machine_mode outmode, int nargs, ...))
3058 #ifndef ANSI_PROTOTYPES
3062 enum machine_mode outmode;
3066 /* Total size in bytes of all the stack-parms scanned so far. */
3067 struct args_size args_size;
3068 /* Size of arguments before any adjustments (such as rounding). */
3069 struct args_size original_args_size;
3070 register int argnum;
3075 CUMULATIVE_ARGS args_so_far;
3076 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3077 struct args_size offset; struct args_size size; rtx save_area; };
3079 int old_inhibit_defer_pop = inhibit_defer_pop;
3080 rtx call_fusage = 0;
3082 int pcc_struct_value = 0;
3083 int struct_value_size = 0;
3085 int reg_parm_stack_space = 0;
3086 #ifdef ACCUMULATE_OUTGOING_ARGS
3090 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3091 /* Define the boundary of the register parm stack space that needs to be
3093 int low_to_save = -1, high_to_save;
3094 rtx save_area = 0; /* Place that it is saved */
3097 #ifdef ACCUMULATE_OUTGOING_ARGS
3098 /* Size of the stack reserved for parameter registers. */
3099 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3100 char *initial_stack_usage_map = stack_usage_map;
3103 #ifdef REG_PARM_STACK_SPACE
3104 #ifdef MAYBE_REG_PARM_STACK_SPACE
3105 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3107 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3111 VA_START (p, nargs);
3113 #ifndef ANSI_PROTOTYPES
3114 orgfun = va_arg (p, rtx);
3115 value = va_arg (p, rtx);
3116 no_queue = va_arg (p, int);
3117 outmode = va_arg (p, enum machine_mode);
3118 nargs = va_arg (p, int);
3121 is_const = no_queue;
3124 /* If this kind of value comes back in memory,
3125 decide where in memory it should come back. */
3126 if (aggregate_value_p (type_for_mode (outmode, 0)))
3128 #ifdef PCC_STATIC_STRUCT_RETURN
3130 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3132 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3133 pcc_struct_value = 1;
3135 value = gen_reg_rtx (outmode);
3136 #else /* not PCC_STATIC_STRUCT_RETURN */
3137 struct_value_size = GET_MODE_SIZE (outmode);
3138 if (value != 0 && GET_CODE (value) == MEM)
3141 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3144 /* This call returns a big structure. */
3148 /* ??? Unfinished: must pass the memory address as an argument. */
3150 /* Copy all the libcall-arguments out of the varargs data
3151 and into a vector ARGVEC.
3153 Compute how to pass each argument. We only support a very small subset
3154 of the full argument passing conventions to limit complexity here since
3155 library functions shouldn't have many args. */
3157 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3158 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3160 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3162 args_size.constant = 0;
3169 /* If there's a structure value address to be passed,
3170 either pass it in the special place, or pass it as an extra argument. */
3171 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3173 rtx addr = XEXP (mem_value, 0);
3176 /* Make sure it is a reasonable operand for a move or push insn. */
3177 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3178 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3179 addr = force_operand (addr, NULL_RTX);
3181 argvec[count].value = addr;
3182 argvec[count].mode = Pmode;
3183 argvec[count].partial = 0;
3185 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3186 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3187 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3191 locate_and_pad_parm (Pmode, NULL_TREE,
3192 argvec[count].reg && argvec[count].partial == 0,
3193 NULL_TREE, &args_size, &argvec[count].offset,
3194 &argvec[count].size);
3197 if (argvec[count].reg == 0 || argvec[count].partial != 0
3198 || reg_parm_stack_space > 0)
3199 args_size.constant += argvec[count].size.constant;
3201 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3206 for (; count < nargs; count++)
3208 rtx val = va_arg (p, rtx);
3209 enum machine_mode mode = va_arg (p, enum machine_mode);
3211 /* We cannot convert the arg value to the mode the library wants here;
3212 must do it earlier where we know the signedness of the arg. */
3214 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3217 /* On some machines, there's no way to pass a float to a library fcn.
3218 Pass it as a double instead. */
3219 #ifdef LIBGCC_NEEDS_DOUBLE
3220 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3221 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3224 /* There's no need to call protect_from_queue, because
3225 either emit_move_insn or emit_push_insn will do that. */
3227 /* Make sure it is a reasonable operand for a move or push insn. */
3228 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3229 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3230 val = force_operand (val, NULL_RTX);
3232 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3233 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3235 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3236 be viewed as just an efficiency improvement. */
3237 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3238 emit_move_insn (slot, val);
3239 val = XEXP (slot, 0);
3244 argvec[count].value = val;
3245 argvec[count].mode = mode;
3247 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3248 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3250 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3251 argvec[count].partial
3252 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3254 argvec[count].partial = 0;
3257 locate_and_pad_parm (mode, NULL_TREE,
3258 argvec[count].reg && argvec[count].partial == 0,
3259 NULL_TREE, &args_size, &argvec[count].offset,
3260 &argvec[count].size);
3262 if (argvec[count].size.var)
3265 if (reg_parm_stack_space == 0 && argvec[count].partial)
3266 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3268 if (argvec[count].reg == 0 || argvec[count].partial != 0
3269 || reg_parm_stack_space > 0)
3270 args_size.constant += argvec[count].size.constant;
3272 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3276 #ifdef FINAL_REG_PARM_STACK_SPACE
3277 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3280 /* If this machine requires an external definition for library
3281 functions, write one out. */
3282 assemble_external_libcall (fun);
3284 original_args_size = args_size;
3285 #ifdef PREFERRED_STACK_BOUNDARY
3286 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3287 / STACK_BYTES) * STACK_BYTES);
3290 args_size.constant = MAX (args_size.constant,
3291 reg_parm_stack_space);
3293 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3294 args_size.constant -= reg_parm_stack_space;
3297 if (args_size.constant > current_function_outgoing_args_size)
3298 current_function_outgoing_args_size = args_size.constant;
3300 #ifdef ACCUMULATE_OUTGOING_ARGS
3301 /* Since the stack pointer will never be pushed, it is possible for
3302 the evaluation of a parm to clobber something we have already
3303 written to the stack. Since most function calls on RISC machines
3304 do not use the stack, this is uncommon, but must work correctly.
3306 Therefore, we save any area of the stack that was already written
3307 and that we are using. Here we set up to do this by making a new
3308 stack usage map from the old one.
3310 Another approach might be to try to reorder the argument
3311 evaluations to avoid this conflicting stack usage. */
3313 needed = args_size.constant;
3315 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3316 /* Since we will be writing into the entire argument area, the
3317 map must be allocated for its entire size, not just the part that
3318 is the responsibility of the caller. */
3319 needed += reg_parm_stack_space;
3322 #ifdef ARGS_GROW_DOWNWARD
3323 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3326 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3329 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3331 if (initial_highest_arg_in_use)
3332 bcopy (initial_stack_usage_map, stack_usage_map,
3333 initial_highest_arg_in_use);
3335 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3336 bzero (&stack_usage_map[initial_highest_arg_in_use],
3337 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3340 /* The address of the outgoing argument list must not be copied to a
3341 register here, because argblock would be left pointing to the
3342 wrong place after the call to allocate_dynamic_stack_space below.
3345 argblock = virtual_outgoing_args_rtx;
3346 #else /* not ACCUMULATE_OUTGOING_ARGS */
3347 #ifndef PUSH_ROUNDING
3348 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3352 #ifdef PUSH_ARGS_REVERSED
3353 #ifdef PREFERRED_STACK_BOUNDARY
3354 /* If we push args individually in reverse order, perform stack alignment
3355 before the first push (the last arg). */
3357 anti_adjust_stack (GEN_INT (args_size.constant
3358 - original_args_size.constant));
3362 #ifdef PUSH_ARGS_REVERSED
3370 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3371 /* The argument list is the property of the called routine and it
3372 may clobber it. If the fixed area has been used for previous
3373 parameters, we must save and restore it.
3375 Here we compute the boundary of the that needs to be saved, if any. */
3377 #ifdef ARGS_GROW_DOWNWARD
3378 for (count = 0; count < reg_parm_stack_space + 1; count++)
3380 for (count = 0; count < reg_parm_stack_space; count++)
3383 if (count >= highest_outgoing_arg_in_use
3384 || stack_usage_map[count] == 0)
3387 if (low_to_save == -1)
3388 low_to_save = count;
3390 high_to_save = count;
3393 if (low_to_save >= 0)
3395 int num_to_save = high_to_save - low_to_save + 1;
3396 enum machine_mode save_mode
3397 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3400 /* If we don't have the required alignment, must do this in BLKmode. */
3401 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3402 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3403 save_mode = BLKmode;
3405 #ifdef ARGS_GROW_DOWNWARD
3406 stack_area = gen_rtx_MEM (save_mode,
3407 memory_address (save_mode,
3408 plus_constant (argblock,
3411 stack_area = gen_rtx_MEM (save_mode,
3412 memory_address (save_mode,
3413 plus_constant (argblock,
3416 if (save_mode == BLKmode)
3418 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3419 emit_block_move (validize_mem (save_area), stack_area,
3420 GEN_INT (num_to_save),
3421 PARM_BOUNDARY / BITS_PER_UNIT);
3425 save_area = gen_reg_rtx (save_mode);
3426 emit_move_insn (save_area, stack_area);
3431 /* Push the args that need to be pushed. */
3433 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3434 are to be pushed. */
3435 for (count = 0; count < nargs; count++, argnum += inc)
3437 register enum machine_mode mode = argvec[argnum].mode;
3438 register rtx val = argvec[argnum].value;
3439 rtx reg = argvec[argnum].reg;
3440 int partial = argvec[argnum].partial;
3441 #ifdef ACCUMULATE_OUTGOING_ARGS
3442 int lower_bound, upper_bound, i;
3445 if (! (reg != 0 && partial == 0))
3447 #ifdef ACCUMULATE_OUTGOING_ARGS
3448 /* If this is being stored into a pre-allocated, fixed-size, stack
3449 area, save any previous data at that location. */
3451 #ifdef ARGS_GROW_DOWNWARD
3452 /* stack_slot is negative, but we want to index stack_usage_map
3453 with positive values. */
3454 upper_bound = -argvec[argnum].offset.constant + 1;
3455 lower_bound = upper_bound - argvec[argnum].size.constant;
3457 lower_bound = argvec[argnum].offset.constant;
3458 upper_bound = lower_bound + argvec[argnum].size.constant;
3461 for (i = lower_bound; i < upper_bound; i++)
3462 if (stack_usage_map[i]
3463 /* Don't store things in the fixed argument area at this point;
3464 it has already been saved. */
3465 && i > reg_parm_stack_space)
3468 if (i != upper_bound)
3470 /* We need to make a save area. See what mode we can make it. */
3471 enum machine_mode save_mode
3472 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3475 = gen_rtx_MEM (save_mode,
3476 memory_address (save_mode,
3477 plus_constant (argblock,
3478 argvec[argnum].offset.constant)));
3479 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3480 emit_move_insn (argvec[argnum].save_area, stack_area);
3483 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3484 argblock, GEN_INT (argvec[argnum].offset.constant),
3485 reg_parm_stack_space);
3487 #ifdef ACCUMULATE_OUTGOING_ARGS
3488 /* Now mark the segment we just used. */
3489 for (i = lower_bound; i < upper_bound; i++)
3490 stack_usage_map[i] = 1;
3497 #ifndef PUSH_ARGS_REVERSED
3498 #ifdef PREFERRED_STACK_BOUNDARY
3499 /* If we pushed args in forward order, perform stack alignment
3500 after pushing the last arg. */
3502 anti_adjust_stack (GEN_INT (args_size.constant
3503 - original_args_size.constant));
3507 #ifdef PUSH_ARGS_REVERSED
3513 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3515 /* Now load any reg parms into their regs. */
3517 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3518 are to be pushed. */
3519 for (count = 0; count < nargs; count++, argnum += inc)
3521 register rtx val = argvec[argnum].value;
3522 rtx reg = argvec[argnum].reg;
3523 int partial = argvec[argnum].partial;
3525 if (reg != 0 && partial == 0)
3526 emit_move_insn (reg, val);
3531 /* For version 1.37, try deleting this entirely. */
3536 /* Any regs containing parms remain in use through the call. */
3537 for (count = 0; count < nargs; count++)
3538 if (argvec[count].reg != 0)
3539 use_reg (&call_fusage, argvec[count].reg);
3541 /* Pass the function the address in which to return a structure value. */
3542 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3544 emit_move_insn (struct_value_rtx,
3546 force_operand (XEXP (mem_value, 0),
3548 if (GET_CODE (struct_value_rtx) == REG)
3549 use_reg (&call_fusage, struct_value_rtx);
3552 /* Don't allow popping to be deferred, since then
3553 cse'ing of library calls could delete a call and leave the pop. */
3556 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3557 will set inhibit_defer_pop to that value. */
3558 /* See the comment in emit_library_call about the function type we build
3562 get_identifier (XSTR (orgfun, 0)),
3563 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3564 original_args_size.constant, args_size.constant,
3566 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3567 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3568 old_inhibit_defer_pop + 1, call_fusage, is_const);
3570 /* Now restore inhibit_defer_pop to its actual original value. */
3575 /* Copy the value to the right place. */
3576 if (outmode != VOIDmode)
3582 if (value != mem_value)
3583 emit_move_insn (value, mem_value);
3585 else if (value != 0)
3586 emit_move_insn (value, hard_libcall_value (outmode));
3588 value = hard_libcall_value (outmode);
3591 #ifdef ACCUMULATE_OUTGOING_ARGS
3592 #ifdef REG_PARM_STACK_SPACE
3595 enum machine_mode save_mode = GET_MODE (save_area);
3596 #ifdef ARGS_GROW_DOWNWARD
3598 = gen_rtx_MEM (save_mode,
3599 memory_address (save_mode,
3600 plus_constant (argblock,
3604 = gen_rtx_MEM (save_mode,
3605 memory_address (save_mode,
3606 plus_constant (argblock, low_to_save)));
3608 if (save_mode != BLKmode)
3609 emit_move_insn (stack_area, save_area);
3611 emit_block_move (stack_area, validize_mem (save_area),
3612 GEN_INT (high_to_save - low_to_save + 1),
3613 PARM_BOUNDARY / BITS_PER_UNIT);
3617 /* If we saved any argument areas, restore them. */
3618 for (count = 0; count < nargs; count++)
3619 if (argvec[count].save_area)
3621 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3623 = gen_rtx_MEM (save_mode,
3624 memory_address (save_mode, plus_constant (argblock,
3625 argvec[count].offset.constant)));
3627 emit_move_insn (stack_area, argvec[count].save_area);
3630 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3631 stack_usage_map = initial_stack_usage_map;
3638 /* Return an rtx which represents a suitable home on the stack
3639 given TYPE, the type of the argument looking for a home.
3640 This is called only for BLKmode arguments.
3642 SIZE is the size needed for this target.
3643 ARGS_ADDR is the address of the bottom of the argument block for this call.
3644 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3645 if this machine uses push insns. */
3648 target_for_arg (type, size, args_addr, offset)
3652 struct args_size offset;
3655 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3657 /* We do not call memory_address if possible,
3658 because we want to address as close to the stack
3659 as possible. For non-variable sized arguments,
3660 this will be stack-pointer relative addressing. */
3661 if (GET_CODE (offset_rtx) == CONST_INT)
3662 target = plus_constant (args_addr, INTVAL (offset_rtx));
3665 /* I have no idea how to guarantee that this
3666 will work in the presence of register parameters. */
3667 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3668 target = memory_address (QImode, target);
3671 return gen_rtx_MEM (BLKmode, target);
3675 /* Store a single argument for a function call
3676 into the register or memory area where it must be passed.
3677 *ARG describes the argument value and where to pass it.
3679 ARGBLOCK is the address of the stack-block for all the arguments,
3680 or 0 on a machine where arguments are pushed individually.
3682 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3683 so must be careful about how the stack is used.
3685 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3686 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3687 that we need not worry about saving and restoring the stack.
3689 FNDECL is the declaration of the function we are calling. */
3692 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3693 reg_parm_stack_space)
3694 struct arg_data *arg;
3697 int variable_size ATTRIBUTE_UNUSED;
3698 int reg_parm_stack_space;
3700 register tree pval = arg->tree_value;
3704 #ifdef ACCUMULATE_OUTGOING_ARGS
3705 int i, lower_bound, upper_bound;
3708 if (TREE_CODE (pval) == ERROR_MARK)
3711 /* Push a new temporary level for any temporaries we make for
3715 #ifdef ACCUMULATE_OUTGOING_ARGS
3716 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3717 save any previous data at that location. */
3718 if (argblock && ! variable_size && arg->stack)
3720 #ifdef ARGS_GROW_DOWNWARD
3721 /* stack_slot is negative, but we want to index stack_usage_map
3722 with positive values. */
3723 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3724 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3728 lower_bound = upper_bound - arg->size.constant;
3730 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3731 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3735 upper_bound = lower_bound + arg->size.constant;
3738 for (i = lower_bound; i < upper_bound; i++)
3739 if (stack_usage_map[i]
3740 /* Don't store things in the fixed argument area at this point;
3741 it has already been saved. */
3742 && i > reg_parm_stack_space)
3745 if (i != upper_bound)
3747 /* We need to make a save area. See what mode we can make it. */
3748 enum machine_mode save_mode
3749 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3751 = gen_rtx_MEM (save_mode,
3752 memory_address (save_mode,
3753 XEXP (arg->stack_slot, 0)));
3755 if (save_mode == BLKmode)
3757 arg->save_area = assign_stack_temp (BLKmode,
3758 arg->size.constant, 0);
3759 MEM_SET_IN_STRUCT_P (arg->save_area,
3760 AGGREGATE_TYPE_P (TREE_TYPE
3761 (arg->tree_value)));
3762 preserve_temp_slots (arg->save_area);
3763 emit_block_move (validize_mem (arg->save_area), stack_area,
3764 GEN_INT (arg->size.constant),
3765 PARM_BOUNDARY / BITS_PER_UNIT);
3769 arg->save_area = gen_reg_rtx (save_mode);
3770 emit_move_insn (arg->save_area, stack_area);
3775 /* Now that we have saved any slots that will be overwritten by this
3776 store, mark all slots this store will use. We must do this before
3777 we actually expand the argument since the expansion itself may
3778 trigger library calls which might need to use the same stack slot. */
3779 if (argblock && ! variable_size && arg->stack)
3780 for (i = lower_bound; i < upper_bound; i++)
3781 stack_usage_map[i] = 1;
3784 /* If this isn't going to be placed on both the stack and in registers,
3785 set up the register and number of words. */
3786 if (! arg->pass_on_stack)
3787 reg = arg->reg, partial = arg->partial;
3789 if (reg != 0 && partial == 0)
3790 /* Being passed entirely in a register. We shouldn't be called in
3794 /* If this arg needs special alignment, don't load the registers
3796 if (arg->n_aligned_regs != 0)
3799 /* If this is being passed partially in a register, we can't evaluate
3800 it directly into its stack slot. Otherwise, we can. */
3801 if (arg->value == 0)
3803 #ifdef ACCUMULATE_OUTGOING_ARGS
3804 /* stack_arg_under_construction is nonzero if a function argument is
3805 being evaluated directly into the outgoing argument list and
3806 expand_call must take special action to preserve the argument list
3807 if it is called recursively.
3809 For scalar function arguments stack_usage_map is sufficient to
3810 determine which stack slots must be saved and restored. Scalar
3811 arguments in general have pass_on_stack == 0.
3813 If this argument is initialized by a function which takes the
3814 address of the argument (a C++ constructor or a C function
3815 returning a BLKmode structure), then stack_usage_map is
3816 insufficient and expand_call must push the stack around the
3817 function call. Such arguments have pass_on_stack == 1.
3819 Note that it is always safe to set stack_arg_under_construction,
3820 but this generates suboptimal code if set when not needed. */
3822 if (arg->pass_on_stack)
3823 stack_arg_under_construction++;
3825 arg->value = expand_expr (pval,
3827 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3828 ? NULL_RTX : arg->stack,
3831 /* If we are promoting object (or for any other reason) the mode
3832 doesn't agree, convert the mode. */
3834 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3835 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3836 arg->value, arg->unsignedp);
3838 #ifdef ACCUMULATE_OUTGOING_ARGS
3839 if (arg->pass_on_stack)
3840 stack_arg_under_construction--;
3844 /* Don't allow anything left on stack from computation
3845 of argument to alloca. */
3847 do_pending_stack_adjust ();
3849 if (arg->value == arg->stack)
3851 /* If the value is already in the stack slot, we are done moving
3853 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3855 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3856 XEXP (arg->stack, 0), Pmode,
3857 ARGS_SIZE_RTX (arg->size),
3858 TYPE_MODE (sizetype),
3859 GEN_INT (MEMORY_USE_RW),
3860 TYPE_MODE (integer_type_node));
3863 else if (arg->mode != BLKmode)
3867 /* Argument is a scalar, not entirely passed in registers.
3868 (If part is passed in registers, arg->partial says how much
3869 and emit_push_insn will take care of putting it there.)
3871 Push it, and if its size is less than the
3872 amount of space allocated to it,
3873 also bump stack pointer by the additional space.
3874 Note that in C the default argument promotions
3875 will prevent such mismatches. */
3877 size = GET_MODE_SIZE (arg->mode);
3878 /* Compute how much space the push instruction will push.
3879 On many machines, pushing a byte will advance the stack
3880 pointer by a halfword. */
3881 #ifdef PUSH_ROUNDING
3882 size = PUSH_ROUNDING (size);
3886 /* Compute how much space the argument should get:
3887 round up to a multiple of the alignment for arguments. */
3888 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3889 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3890 / (PARM_BOUNDARY / BITS_PER_UNIT))
3891 * (PARM_BOUNDARY / BITS_PER_UNIT));
3893 /* This isn't already where we want it on the stack, so put it there.
3894 This can either be done with push or copy insns. */
3895 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3896 partial, reg, used - size, argblock,
3897 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3901 /* BLKmode, at least partly to be pushed. */
3903 register int excess;
3906 /* Pushing a nonscalar.
3907 If part is passed in registers, PARTIAL says how much
3908 and emit_push_insn will take care of putting it there. */
3910 /* Round its size up to a multiple
3911 of the allocation unit for arguments. */
3913 if (arg->size.var != 0)
3916 size_rtx = ARGS_SIZE_RTX (arg->size);
3920 /* PUSH_ROUNDING has no effect on us, because
3921 emit_push_insn for BLKmode is careful to avoid it. */
3922 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3923 + partial * UNITS_PER_WORD);
3924 size_rtx = expr_size (pval);
3927 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3928 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3929 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3930 reg_parm_stack_space);
3934 /* Unless this is a partially-in-register argument, the argument is now
3937 ??? Note that this can change arg->value from arg->stack to
3938 arg->stack_slot and it matters when they are not the same.
3939 It isn't totally clear that this is correct in all cases. */
3941 arg->value = arg->stack_slot;
3943 /* Once we have pushed something, pops can't safely
3944 be deferred during the rest of the arguments. */
3947 /* ANSI doesn't require a sequence point here,
3948 but PCC has one, so this will avoid some problems. */
3951 /* Free any temporary slots made in processing this argument. Show
3952 that we might have taken the address of something and pushed that
3954 preserve_temp_slots (NULL_RTX);