1 /* RTL buffer overflow protection function for GNU C compiler
2 Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "insn-config.h"
30 #include "insn-flags.h"
34 #include "hard-reg-set.h"
39 #include "conditions.h"
40 #include "insn-attr.h"
42 #include "protector.h"
45 rtx assign_stack_local_for_pseudo_reg PARAMS ((enum machine_mode, HOST_WIDE_INT, int));
48 /* Warn when not issuing stack smashing protection for some reason */
49 int warn_stack_protector;
51 /* Round a value to the lowest integer less than it that is a multiple of
52 the required alignment. Avoid using division in case the value is
53 negative. Assume the alignment is a power of two. */
54 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
56 /* Similar, but round to the next highest integer that meets the
58 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
61 /* Nonzero means use propolice as a stack protection method */
62 extern int flag_propolice_protection;
64 /* This file contains several memory arrangement functions to protect
65 the return address and the frame pointer of the stack
66 from a stack-smashing attack. It also
67 provides the function that protects pointer variables. */
69 /* Nonzero if function being compiled can define string buffers that may be
70 damaged by the stack-smash attack */
71 static int current_function_defines_vulnerable_string;
72 static int current_function_defines_short_string;
73 static int current_function_has_variable_string;
74 static int current_function_defines_vsized_array;
75 static int current_function_is_inlinable;
77 static rtx guard_area, _guard;
78 static rtx function_first_insn, prologue_insert_point;
82 static HOST_WIDE_INT sweep_frame_offset;
83 static HOST_WIDE_INT push_allocated_offset = 0;
84 static HOST_WIDE_INT push_frame_offset = 0;
85 static int saved_cse_not_expected = 0;
87 static int search_string_from_argsandvars PARAMS ((int caller));
88 static int search_string_from_local_vars PARAMS ((tree block));
89 static int search_pointer_def PARAMS ((tree names));
90 static int search_func_pointer PARAMS ((tree type, int mark));
91 static void reset_used_flags_for_insns PARAMS ((rtx insn));
92 static void reset_used_flags_for_decls PARAMS ((tree block));
93 static void reset_used_flags_of_plus PARAMS ((rtx x));
94 static void rtl_prologue PARAMS ((rtx insn));
95 static void rtl_epilogue PARAMS ((rtx fnlastinsn));
96 static void arrange_var_order PARAMS ((tree blocks));
97 static void copy_args_for_protection PARAMS ((void));
98 static void sweep_string_variable PARAMS ((rtx sweep_var, HOST_WIDE_INT var_size));
99 static void sweep_string_in_decls PARAMS ((tree block, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
100 static void sweep_string_in_args PARAMS ((tree parms, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
101 static void sweep_string_use_of_insns PARAMS ((rtx insn, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
102 static void sweep_string_in_operand PARAMS ((rtx insn, rtx *loc, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
103 static void move_arg_location PARAMS ((rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size));
104 static void change_arg_use_of_insns PARAMS ((rtx insn, rtx orig, rtx new, HOST_WIDE_INT size));
105 static void change_arg_use_in_operand PARAMS ((rtx x, rtx orig, rtx new, HOST_WIDE_INT size));
106 static void expand_value_return PARAMS ((rtx val));
107 static int replace_return_reg PARAMS ((rtx insn, rtx return_save));
108 static void validate_insns_of_varrefs PARAMS ((rtx insn));
109 static void validate_operand_of_varrefs PARAMS ((rtx insn, rtx *loc));
111 #define SUSPICIOUS_BUF_SIZE 8
113 #define AUTO_BASEPTR(X) \
114 (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
115 #define AUTO_OFFSET(X) \
116 (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
117 #undef PARM_PASSED_IN_MEMORY
118 #define PARM_PASSED_IN_MEMORY(PARM) \
119 (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
120 #define VIRTUAL_STACK_VARS_P(X) \
121 ((X) == virtual_stack_vars_rtx || (GET_CODE (X) == REG && (X)->used))
126 prepare_stack_protection (inlinable)
129 tree blocks = DECL_INITIAL (current_function_decl);
130 current_function_is_inlinable = inlinable && !flag_no_inline;
131 push_frame_offset = push_allocated_offset = 0;
132 saved_cse_not_expected = 0;
135 skip the protection if the function has no block or it is an inline function
137 if (current_function_is_inlinable) validate_insns_of_varrefs (get_insns ());
138 if (! blocks || current_function_is_inlinable) return;
140 current_function_defines_vulnerable_string = search_string_from_argsandvars (0);
142 if (current_function_defines_vulnerable_string)
144 HOST_WIDE_INT offset;
145 function_first_insn = get_insns ();
147 if (current_function_contains_functions) {
148 if (warn_stack_protector)
149 warning ("not protecting function: it contains functions");
153 /* Initialize recognition, indicating that volatile is OK. */
156 sweep_frame_offset = 0;
158 #ifdef STACK_GROWS_DOWNWARD
160 frame_offset: offset to end of allocated area of stack frame.
161 It is defined in the function.c
164 /* the location must be before buffers */
165 guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
166 PUT_MODE (guard_area, GUARD_m);
167 MEM_VOLATILE_P (guard_area) = 1;
169 #ifndef FRAME_GROWS_DOWNWARD
170 sweep_frame_offset = frame_offset;
173 /* For making room for guard value, scan all insns and fix the offset address
174 of the variable that is based on frame pointer.
175 Scan all declarations of variables and fix the offset address of the variable that
176 is based on the frame pointer */
177 sweep_string_variable (guard_area, UNITS_PER_GUARD);
180 /* the location of guard area moves to the beginning of stack frame */
181 if ((offset = AUTO_OFFSET(XEXP (guard_area, 0))))
182 XEXP (XEXP (guard_area, 0), 1) = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
185 /* Insert prologue rtl instructions */
186 rtl_prologue (function_first_insn);
188 if (! current_function_has_variable_string)
190 /* Generate argument saving instruction */
191 copy_args_for_protection ();
193 #ifndef FRAME_GROWS_DOWNWARD
194 /* If frame grows upward, character string copied from an arg stays top of
195 the guard variable. So sweep the guard variable again */
196 sweep_frame_offset = CEIL_ROUND (frame_offset, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
197 sweep_string_variable (guard_area, UNITS_PER_GUARD);
200 else if (warn_stack_protector)
201 warning ("not protecting variables: it has a variable length buffer");
203 #ifndef FRAME_GROWS_DOWNWARD
204 if (STARTING_FRAME_OFFSET == 0)
206 /* this may be only for alpha */
207 push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
208 assign_stack_local (BLKmode, push_allocated_offset, -1);
209 sweep_frame_offset = frame_offset;
210 sweep_string_variable (const0_rtx, -push_allocated_offset);
211 sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
215 /* Arrange the order of local variables */
216 arrange_var_order (blocks);
218 #ifdef STACK_GROWS_DOWNWARD
219 /* Insert epilogue rtl instructions */
220 rtl_epilogue (get_last_insn ());
222 init_recog_no_volatile ();
224 else if (current_function_defines_short_string
225 && warn_stack_protector)
226 warning ("not protecting function: buffer is less than %d bytes long",
227 SUSPICIOUS_BUF_SIZE);
231 search string from arguments and local variables
232 caller: 0 means call from protector_stack_protection
233 1 means call from push_frame
236 search_string_from_argsandvars (caller)
242 /* saves a latest search result as a cached infomation */
243 static tree __latest_search_decl = 0;
244 static int __latest_search_result = FALSE;
246 if (__latest_search_decl == current_function_decl)
247 return __latest_search_result;
248 else if (caller) return FALSE;
249 __latest_search_decl = current_function_decl;
250 __latest_search_result = TRUE;
252 current_function_defines_short_string = FALSE;
253 current_function_has_variable_string = FALSE;
254 current_function_defines_vsized_array = FALSE;
257 search a string variable from local variables
259 blocks = DECL_INITIAL (current_function_decl);
260 string_p = search_string_from_local_vars (blocks);
262 if (!current_function_defines_vsized_array && current_function_calls_alloca)
264 current_function_has_variable_string = TRUE;
268 if (string_p) return TRUE;
270 #ifdef STACK_GROWS_DOWNWARD
272 search a string variable from arguments
274 parms = DECL_ARGUMENTS (current_function_decl);
276 for (; parms; parms = TREE_CHAIN (parms))
277 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
279 if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
281 string_p = search_string_def (TREE_TYPE(parms));
282 if (string_p) return TRUE;
287 __latest_search_result = FALSE;
293 search_string_from_local_vars (block)
301 types = BLOCK_VARS(block);
305 /* skip the declaration that refers an external variable */
306 /* name: types.decl.name.identifier.id */
307 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
308 && TREE_CODE (types) == VAR_DECL
309 && ! DECL_ARTIFICIAL (types)
311 && GET_CODE (DECL_RTL (types)) == MEM)
313 if (search_string_def (TREE_TYPE (types)))
315 rtx home = DECL_RTL (types);
317 if (GET_CODE (home) == MEM
318 && (GET_CODE (XEXP (home, 0)) == MEM
319 || (GET_CODE (XEXP (home, 0)) == REG
320 && XEXP (home, 0) != virtual_stack_vars_rtx
321 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
322 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
323 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
324 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
327 /* If the value is indirect by memory or by a register
328 that isn't the frame pointer
329 then it means the object is variable-sized and address through
330 that register or stack slot. The protection has no way to hide pointer variables
331 behind the array, so all we can do is staying arguments. */
333 current_function_has_variable_string = TRUE;
335 /* found character array */
340 types = TREE_CHAIN(types);
343 if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
348 block = BLOCK_CHAIN (block);
356 * search a character array from the specified type tree
359 search_string_def (type)
367 switch (TREE_CODE (type))
370 /* Check if the array is a variable-sized array */
371 if (TYPE_DOMAIN (type) == 0 ||
372 TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR)
373 current_function_defines_vsized_array = TRUE;
375 if (TREE_TYPE (type) == char_type_node
377 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
378 && TYPE_PRECISION (TREE_TYPE (type)) == 8))
380 /* Check if the string is a variable string */
381 if (TYPE_DOMAIN (type) == 0 ||
382 TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR)
385 /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE */
386 if (TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1 >= SUSPICIOUS_BUF_SIZE)
389 current_function_defines_short_string = TRUE;
391 return search_string_def(TREE_TYPE(type));
394 case QUAL_UNION_TYPE:
396 /* Output the name, type, position (in bits), size (in bits) of each
398 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
400 /* Omit here local type decls until we know how to support them. */
401 if ((TREE_CODE (tem) == TYPE_DECL)
402 || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
405 if (search_string_def(TREE_TYPE(tem))) return TRUE;
411 /* I'm not sure whether OFFSET_TYPE needs this treatment,
412 so I'll play safe and return 1. */
422 * examine whether the input contains frame pointer addressing
428 register enum rtx_code code;
442 if (XEXP (x, 0) == virtual_stack_vars_rtx
443 && CONSTANT_P (XEXP (x, 1)))
451 /* Scan all subexpressions. */
452 fmt = GET_RTX_FORMAT (code);
453 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
456 if (contains_fp (XEXP (x, i))) return TRUE;
458 else if (*fmt == 'E')
459 for (j = 0; j < XVECLEN (x, i); j++)
460 if (contains_fp (XVECEXP (x, i, j))) return TRUE;
467 search_pointer_def (type)
475 switch (TREE_CODE (type))
478 case QUAL_UNION_TYPE:
480 /* Output the name, type, position (in bits), size (in bits) of each
482 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
484 /* Omit here local type decls until we know how to support them. */
485 if ((TREE_CODE (tem) == TYPE_DECL)
486 || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
489 if (search_pointer_def (TREE_TYPE(tem))) return TRUE;
494 return search_pointer_def (TREE_TYPE(type));
498 /* I'm not sure whether OFFSET_TYPE needs this treatment,
499 so I'll play safe and return 1. */
501 if (TYPE_READONLY (TREE_TYPE (type)))
503 int funcp = search_func_pointer (TREE_TYPE (type), 1);
504 /* Un-mark the type as having been visited already */
505 search_func_pointer (TREE_TYPE (type), 0);
519 search_func_pointer (type, mark)
528 switch (TREE_CODE (type))
531 case QUAL_UNION_TYPE:
533 if (TREE_ASM_WRITTEN (type) != mark)
535 /* mark the type as having been visited already */
536 TREE_ASM_WRITTEN (type) = mark;
538 /* Output the name, type, position (in bits), size (in bits) of
540 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
542 /* Omit here local type decls until we know how to support them. */
543 if (TREE_CODE (tem) == FIELD_DECL
544 && search_func_pointer (TREE_TYPE(tem), mark)) return TRUE;
550 return search_func_pointer (TREE_TYPE(type), mark);
554 /* I'm not sure whether OFFSET_TYPE needs this treatment,
555 so I'll play safe and return 1. */
557 return TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE;
568 reset_used_flags_for_insns (insn)
572 register enum rtx_code code;
573 register const char *format_ptr;
575 for (; insn; insn = NEXT_INSN (insn))
576 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
577 || GET_CODE (insn) == CALL_INSN)
579 code = GET_CODE (insn);
581 format_ptr = GET_RTX_FORMAT (code);
583 for (i = 0; i < GET_RTX_LENGTH (code); i++)
585 switch (*format_ptr++) {
587 reset_used_flags_of_plus (XEXP (insn, i));
591 for (j = 0; j < XVECLEN (insn, i); j++)
592 reset_used_flags_of_plus (XVECEXP (insn, i, j));
600 reset_used_flags_for_decls (block)
608 types = BLOCK_VARS(block);
612 /* skip the declaration that refers an external variable and
613 also skip an global variable */
614 if (! DECL_EXTERNAL (types))
616 home = DECL_RTL (types);
617 if (home == 0) goto next;
619 if (GET_CODE (home) == MEM
620 && GET_CODE (XEXP (home, 0)) == PLUS
621 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
623 XEXP (home, 0)->used = 0;
627 types = TREE_CHAIN(types);
630 reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
632 block = BLOCK_CHAIN (block);
636 /* Clear the USED bits only of type PLUS in X */
639 reset_used_flags_of_plus (x)
643 register enum rtx_code code;
644 register const char *format_ptr;
651 /* These types may be freely shared so we needn't do any resetting
672 /* The chain of insns is not being copied. */
679 case CALL_PLACEHOLDER:
680 reset_used_flags_for_insns (XEXP (x, 0));
681 reset_used_flags_for_insns (XEXP (x, 1));
682 reset_used_flags_for_insns (XEXP (x, 2));
689 format_ptr = GET_RTX_FORMAT (code);
690 for (i = 0; i < GET_RTX_LENGTH (code); i++)
692 switch (*format_ptr++)
695 reset_used_flags_of_plus (XEXP (x, i));
699 for (j = 0; j < XVECLEN (x, i); j++)
700 reset_used_flags_of_plus (XVECEXP (x, i, j));
711 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
712 #undef HAS_INIT_SECTION
713 #define HAS_INIT_SECTION
718 for (; insn; insn = NEXT_INSN (insn))
719 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
722 #if !defined (HAS_INIT_SECTION)
723 /* If this function is `main', skip a call to `__main'
724 to run guard instruments after global initializers, etc. */
725 if (DECL_NAME (current_function_decl)
726 && strcmp (IDENTIFIER_POINTER (DECL_NAME (current_function_decl)), "main") == 0
727 && DECL_CONTEXT (current_function_decl) == NULL_TREE)
730 for (; insn; insn = NEXT_INSN (insn))
731 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
733 if (insn == 0) insn = fbinsn;
737 prologue_insert_point = NEXT_INSN (insn); /* mark the next insn of FUNCTION_BEG insn */
741 _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
742 emit_move_insn ( guard_area, _guard);
744 _val = gen_sequence ();
747 emit_insn_before (_val, prologue_insert_point);
758 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl)), return_save;
759 int flag_have_return = FALSE;
767 return_label = gen_label_rtx ();
769 for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
770 if (GET_CODE (insn) == JUMP_INSN
771 && GET_CODE (PATTERN (insn)) == RETURN
772 && GET_MODE (PATTERN (insn)) == VOIDmode)
774 rtx pat = gen_rtx_SET (VOIDmode,
776 gen_rtx_LABEL_REF (VOIDmode,
778 PATTERN (insn) = pat;
779 flag_have_return = TRUE;
783 emit_label (return_label);
788 && ! (current_function_returns_struct
789 || current_function_returns_pcc_struct))
791 return_save = GET_CODE (return_reg)==REG?
792 gen_reg_rtx (GET_MODE (return_reg)):return_reg;
794 if (! replace_return_reg (prologue_insert_point, return_save))
795 emit_move_insn (return_save, return_reg);
798 compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, 0, 0); /* if (guard_area != _guard) */
800 if_false_label = gen_label_rtx (); /* { */
801 emit_jump_insn ( gen_beq(if_false_label));
804 In the function force_const_mem in varasm.c of egcs-1.1.2-30, there is a
805 failure to assign the guard_area variable to eax register, which destroys
806 the return value of the function.
808 The BUG preceding comment is an apropriate processes.
809 When the bug is fixed, removes the comment
812 /* generate string for the current function name */
813 funcstr = build_string (strlen(current_function_name)+1, current_function_name);
814 TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);/* = char_array_type_node;*/
815 funcname = output_constant_def (funcstr);
817 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__stack_smash_handler"),
819 XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
821 /* generate RTL to return from the current function */
823 emit_barrier (); /* } */
824 emit_label (if_false_label);
826 /* generate RTL to return from the current function */
829 if (!current_function_returns_struct && !current_function_returns_pcc_struct)
830 expand_value_return (return_save);
833 /* If returning a structure, arrange to return the address of the value
834 in a place where debuggers expect to find it.
836 If returning a structure PCC style,
837 the caller also depends on this value.
838 And current_function_returns_pcc_struct is not necessarily set. */
841 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
842 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
843 #ifdef FUNCTION_OUTGOING_VALUE
845 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
846 current_function_decl);
849 = FUNCTION_VALUE (build_pointer_type (type),
850 current_function_decl);
853 /* Mark this as a function return value so integrate will delete the
854 assignment and USE below when inlining this function. */
855 REG_FUNCTION_VALUE_P (outgoing) = 1;
857 emit_move_insn (outgoing, value_address);
858 use_variable (outgoing);
863 if (HAVE_return && flag_have_return)
865 emit_jump_insn (gen_return ());
870 _val = gen_sequence ();
873 emit_insn_after (_val, insn);
878 arrange_var_order (block)
882 HOST_WIDE_INT offset;
886 types = BLOCK_VARS (block);
890 /* skip the declaration that refers an external variable */
891 /* name: types.decl.assembler_name.id */
892 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
893 && TREE_CODE (types) == VAR_DECL
894 && ! DECL_ARTIFICIAL (types)
895 && ! DECL_INLINE (types) /* don't sweep inlined string */
897 && GET_CODE (DECL_RTL (types)) == MEM)
899 if (search_string_def (TREE_TYPE (types)))
901 rtx home = DECL_RTL (types);
903 if (! (GET_CODE (home) == MEM
904 && (GET_CODE (XEXP (home, 0)) == MEM
905 || (GET_CODE (XEXP (home, 0)) == REG
906 && XEXP (home, 0) != virtual_stack_vars_rtx
907 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
908 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
909 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
910 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
914 /* found a string variable */
915 HOST_WIDE_INT var_size =
916 ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
919 if (GET_MODE (DECL_RTL (types)) == BLKmode)
921 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
922 var_size = CEIL_ROUND (var_size, alignment);
925 /* skip the variable if it is top of the region
926 specified by sweep_frame_offset */
927 offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
928 if (offset == sweep_frame_offset - var_size)
929 sweep_frame_offset -= var_size;
931 else if (offset < sweep_frame_offset - var_size)
932 sweep_string_variable (DECL_RTL (types), var_size);
937 types = TREE_CHAIN(types);
940 arrange_var_order (BLOCK_SUBBLOCKS (block));
942 block = BLOCK_CHAIN (block);
948 copy_args_for_protection (void)
950 tree parms = DECL_ARGUMENTS (current_function_decl);
954 parms = DECL_ARGUMENTS (current_function_decl);
955 for (idx = 0; parms; parms = TREE_CHAIN (parms))
956 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
958 if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
963 skip arguemnt protection if the last argument is used
964 for the variable argument
968 if (TREE_CHAIN (parms) == 0)
970 fntype = TREE_TYPE (current_function_decl);
972 if ((TYPE_ARG_TYPES (fntype) != 0 &&
973 TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) != void_type_node)
974 || current_function_varargs)
979 string_p = search_string_def (TREE_TYPE(parms));
981 /* check if it is a candidate to move */
982 if (string_p || search_pointer_def (TREE_TYPE (parms)))
985 = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
990 if (GET_CODE (DECL_RTL (parms)) == REG)
993 rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
995 /* generate codes for copying the content */
996 movinsn = emit_move_insn (safe, DECL_RTL (parms));
997 PATTERN (movinsn)->volatil = 1; /* avoid register elimination in gcse.c (COPY-PROP)*/
999 change_arg_use_of_insns (prologue_insert_point, DECL_RTL (parms), safe, 0);
1001 /* save debugger info */
1002 DECL_INCOMING_RTL (parms) = safe;
1005 else if (GET_CODE (DECL_RTL (parms)) == MEM
1006 && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1009 rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1011 /* generate codes for copying the content */
1012 movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1013 PATTERN (movinsn)->volatil = 1; /* avoid register elimination in gcse.c (COPY-PROP)*/
1015 /* change the addressof information to the newly allocated pseudo register */
1016 emit_move_insn (DECL_RTL (parms), safe);
1018 /* save debugger info */
1019 DECL_INCOMING_RTL (parms) = safe;
1024 /* declare temporary local variable DECL_NAME (parms) for it */
1026 = assign_stack_local (DECL_MODE (parms), arg_size,
1027 DECL_MODE (parms) == BLKmode ? -1 : 0);
1029 MEM_IN_STRUCT_P (temp_rtx) = AGGREGATE_TYPE_P (TREE_TYPE (parms));
1030 MEM_ALIAS_SET (temp_rtx) = get_alias_set (parms);
1032 /* generate codes for copying the content */
1033 store_expr (parms, temp_rtx, 0);
1035 /* change the reference for each instructions */
1036 move_arg_location (prologue_insert_point, DECL_RTL (parms),
1037 temp_rtx, arg_size);
1039 /* change the location of parms variable */
1040 DECL_RTL (parms) = temp_rtx;
1042 /* change debugger info */
1043 DECL_INCOMING_RTL (parms) = temp_rtx;
1046 emit_insn_before (gen_sequence (), prologue_insert_point);
1049 #ifdef FRAME_GROWS_DOWNWARD
1050 /* process the string argument */
1051 if (string_p && DECL_MODE (parms) == BLKmode)
1053 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1054 arg_size = CEIL_ROUND (arg_size, alignment);
1056 /* change the reference for each instructions */
1057 sweep_string_variable (DECL_RTL (parms), arg_size);
1067 sweep a string variable to the local variable addressed by sweep_frame_offset, that is
1068 a last position of string variables.
1071 sweep_string_variable (sweep_var, var_size)
1073 HOST_WIDE_INT var_size;
1075 HOST_WIDE_INT sweep_offset;
1077 switch (GET_CODE (sweep_var))
1080 if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
1081 && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
1083 sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
1086 sweep_offset = INTVAL (sweep_var);
1092 /* scan all declarations of variables and fix the offset address of
1093 the variable based on the frame pointer */
1094 sweep_string_in_decls (DECL_INITIAL (current_function_decl), sweep_offset, var_size);
1096 /* scan all argument variable and fix the offset address based on the frame pointer */
1097 sweep_string_in_args (DECL_ARGUMENTS (current_function_decl), sweep_offset, var_size);
1099 /* For making room for sweep variable, scan all insns and fix the offset address
1100 of the variable that is based on frame pointer*/
1101 sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
1104 /* Clear all the USED bits in operands of all insns and declarations of local vars */
1105 reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
1106 reset_used_flags_for_insns (function_first_insn);
1108 sweep_frame_offset -= var_size;
1114 move an argument to the local variable addressed by frame_offset
1117 move_arg_location (insn, orig, new, var_size)
1118 rtx insn, orig, new;
1119 HOST_WIDE_INT var_size;
1121 /* For making room for sweep variable, scan all insns and fix the offset address
1122 of the variable that is based on frame pointer*/
1123 change_arg_use_of_insns (insn, orig, new, var_size);
1126 /* Clear all the USED bits in operands of all insns and declarations of local vars */
1127 reset_used_flags_for_insns (insn);
1132 sweep_string_in_decls (block, sweep_offset, sweep_size)
1134 HOST_WIDE_INT sweep_offset, sweep_size;
1137 HOST_WIDE_INT offset;
1142 types = BLOCK_VARS(block);
1146 /* skip the declaration that refers an external variable and
1147 also skip an global variable */
1148 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
1150 home = DECL_RTL (types);
1151 if (home == 0) goto next;
1153 /* process for static local variable */
1154 if (GET_CODE (home) == MEM
1155 && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
1158 if (GET_CODE (home) == MEM
1159 && XEXP (home, 0) == virtual_stack_vars_rtx)
1163 /* the operand related to the sweep variable */
1164 if (sweep_offset <= offset
1165 && offset < sweep_offset + sweep_size)
1167 offset = sweep_frame_offset - sweep_size - sweep_offset;
1169 XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx, offset);
1170 XEXP (home, 0)->used = 1;
1172 else if (sweep_offset <= offset
1173 && offset < sweep_frame_offset)
1174 { /* the rest of variables under sweep_frame_offset, so shift the location */
1175 XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx, -sweep_size);
1176 XEXP (home, 0)->used = 1;
1180 if (GET_CODE (home) == MEM
1181 && GET_CODE (XEXP (home, 0)) == MEM)
1183 /* process for dynamically allocated aray */
1184 home = XEXP (home, 0);
1187 if (GET_CODE (home) == MEM
1188 && GET_CODE (XEXP (home, 0)) == PLUS
1189 && XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
1190 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1192 if (! XEXP (home, 0)->used)
1194 offset = AUTO_OFFSET(XEXP (home, 0));
1196 /* the operand related to the sweep variable */
1197 if (sweep_offset <= offset
1198 && offset < sweep_offset + sweep_size)
1201 offset += sweep_frame_offset - sweep_size - sweep_offset;
1202 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1205 XEXP (home, 0)->used = 1;
1207 else if (sweep_offset <= offset
1208 && offset < sweep_frame_offset)
1209 { /* the rest of variables under sweep_frame_offset,
1210 so shift the location */
1212 XEXP (XEXP (home, 0), 1)
1213 = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1216 XEXP (home, 0)->used = 1;
1223 types = TREE_CHAIN(types);
1226 sweep_string_in_decls (BLOCK_SUBBLOCKS (block), sweep_offset, sweep_size);
1227 block = BLOCK_CHAIN (block);
1233 sweep_string_in_args (parms, sweep_offset, sweep_size)
1235 HOST_WIDE_INT sweep_offset, sweep_size;
1238 HOST_WIDE_INT offset;
1240 for (; parms; parms = TREE_CHAIN (parms))
1241 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1243 if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1245 home = DECL_INCOMING_RTL (parms);
1247 if (XEXP (home, 0)->used) continue;
1249 offset = AUTO_OFFSET(XEXP (home, 0));
1251 /* the operand related to the sweep variable */
1252 if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
1254 if (sweep_offset <= offset
1255 && offset < sweep_offset + sweep_size)
1257 offset += sweep_frame_offset - sweep_size - sweep_offset;
1258 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1261 XEXP (home, 0)->used = 1;
1263 else if (sweep_offset <= offset
1264 && offset < sweep_frame_offset)
1265 { /* the rest of variables under sweep_frame_offset, so shift the location */
1266 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1269 XEXP (home, 0)->used = 1;
1277 static int has_virtual_reg;
1280 sweep_string_use_of_insns (insn, sweep_offset, sweep_size)
1282 HOST_WIDE_INT sweep_offset, sweep_size;
1284 for (; insn; insn = NEXT_INSN (insn))
1285 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1286 || GET_CODE (insn) == CALL_INSN)
1288 has_virtual_reg = FALSE;
1289 sweep_string_in_operand (insn, &PATTERN (insn), sweep_offset, sweep_size);
1295 sweep_string_in_operand (insn, loc, sweep_offset, sweep_size)
1297 HOST_WIDE_INT sweep_offset, sweep_size;
1299 register rtx x = *loc;
1300 register enum rtx_code code;
1302 HOST_WIDE_INT offset;
1308 code = GET_CODE (x);
1327 if (x == virtual_incoming_args_rtx
1328 || x == virtual_stack_vars_rtx
1329 || x == virtual_stack_dynamic_rtx
1330 || x == virtual_outgoing_args_rtx
1331 || x == virtual_cfa_rtx)
1332 has_virtual_reg = TRUE;
1337 skip setjmp setup insn and setjmp restore insn
1339 (set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
1340 (set (virtual_stack_vars_rtx) (REG))
1342 if (GET_CODE (XEXP (x, 0)) == MEM
1343 && XEXP (x, 1) == virtual_stack_vars_rtx)
1345 if (XEXP (x, 0) == virtual_stack_vars_rtx
1346 && GET_CODE (XEXP (x, 1)) == REG)
1351 /* Handle typical case of frame register plus constant. */
1352 if (XEXP (x, 0) == virtual_stack_vars_rtx
1353 && CONSTANT_P (XEXP (x, 1)))
1355 if (x->used) goto single_use_of_virtual_reg;
1357 offset = AUTO_OFFSET(x);
1358 if (RTX_INTEGRATED_P (x)) k = -1; /* for inline base ptr */
1360 /* the operand related to the sweep variable */
1361 if (sweep_offset <= offset + k
1362 && offset + k < sweep_offset + sweep_size)
1364 offset += sweep_frame_offset - sweep_size - sweep_offset;
1366 XEXP (x, 0) = virtual_stack_vars_rtx;
1367 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1370 else if (sweep_offset <= offset + k
1371 && offset + k < sweep_frame_offset)
1372 { /* the rest of variables under sweep_frame_offset, so shift the location */
1373 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1377 single_use_of_virtual_reg:
1378 if (has_virtual_reg) {
1379 /* excerpt from insn_invalid_p in recog.c */
1380 int icode = recog_memoized (insn);
1382 if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1387 temp = force_operand (x, NULL_RTX);
1391 emit_insns_before (seq, insn);
1392 if (! validate_change (insn, loc, temp, 0)
1393 && ! validate_replace_rtx (x, temp, insn))
1394 fatal_insn ("sweep_string_in_operand", insn);
1398 has_virtual_reg = TRUE;
1402 #ifdef FRAME_GROWS_DOWNWARD
1404 special case of frame register plus constant given by reg.
1406 else if (XEXP (x, 0) == virtual_stack_vars_rtx
1407 && GET_CODE (XEXP (x, 1)) == REG)
1408 fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
1412 process further subtree:
1413 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1418 case CALL_PLACEHOLDER:
1419 sweep_string_use_of_insns (XEXP (x, 0), sweep_offset, sweep_size);
1420 sweep_string_use_of_insns (XEXP (x, 1), sweep_offset, sweep_size);
1421 sweep_string_use_of_insns (XEXP (x, 2), sweep_offset, sweep_size);
1428 /* Scan all subexpressions. */
1429 fmt = GET_RTX_FORMAT (code);
1430 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1434 virtual_stack_vars_rtx without offset
1436 (set (reg:SI xx) (reg:SI 78))
1437 (set (reg:SI xx) (MEM (reg:SI 78)))
1439 if (XEXP (x, i) == virtual_stack_vars_rtx)
1440 fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
1441 sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
1443 else if (*fmt == 'E')
1444 for (j = 0; j < XVECLEN (x, i); j++)
1445 sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
1450 change a argument variable to the local variable addressed by the "new" variable.
1453 change_arg_use_of_insns (insn, orig, new, size)
1454 rtx insn, orig, new;
1457 for (; insn; insn = NEXT_INSN (insn))
1458 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1459 || GET_CODE (insn) == CALL_INSN)
1461 change_arg_use_in_operand (PATTERN (insn), orig, new, size);
1467 change_arg_use_in_operand (x, orig, new, size)
1471 register enum rtx_code code;
1473 HOST_WIDE_INT offset;
1479 code = GET_CODE (x);
1499 /* Handle special case of MEM (incoming_args) */
1500 if (GET_CODE (orig) == MEM
1501 && XEXP (x, 0) == virtual_incoming_args_rtx)
1505 /* the operand related to the sweep variable */
1506 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1507 offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1509 offset = AUTO_OFFSET(XEXP (new, 0))
1510 + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1512 XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
1513 XEXP (x, 0)->used = 1;
1521 /* Handle special case of frame register plus constant. */
1522 if (GET_CODE (orig) == MEM /* skip if orig is register variable in the optimization */
1523 && XEXP (x, 0) == virtual_incoming_args_rtx && CONSTANT_P (XEXP (x, 1))
1526 offset = AUTO_OFFSET(x);
1528 /* the operand related to the sweep variable */
1529 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1530 offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1532 offset = AUTO_OFFSET(XEXP (new, 0))
1533 + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1535 XEXP (x, 0) = virtual_stack_vars_rtx;
1536 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1543 process further subtree:
1544 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1550 case CALL_PLACEHOLDER:
1551 change_arg_use_of_insns (XEXP (x, 0), orig, new, size);
1552 change_arg_use_of_insns (XEXP (x, 1), orig, new, size);
1553 change_arg_use_of_insns (XEXP (x, 2), orig, new, size);
1560 /* Scan all subexpressions. */
1561 fmt = GET_RTX_FORMAT (code);
1562 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1565 if (XEXP (x, i) == orig)
1570 change_arg_use_in_operand (XEXP (x, i), orig, new, size);
1572 else if (*fmt == 'E')
1573 for (j = 0; j < XVECLEN (x, i); j++)
1576 if (XVECEXP (x, i, j) == orig)
1578 XVECEXP (x, i, j) = new;
1581 change_arg_use_in_operand (XVECEXP (x, i, j), orig, new, size);
1586 replace_return_reg (first, return_save)
1587 rtx first, return_save;
1589 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1592 /* comfirm that insn patterns are the expected order */
1593 for (insn = first; insn; insn = NEXT_INSN (insn))
1595 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1600 if (PREV_INSN (insn)) prev = PREV_INSN (insn);
1602 if (GET_CODE (PATTERN (insn)) == USE && XEXP (PATTERN (insn), 0) == return_reg)
1603 if (!(prev && GET_CODE (PATTERN (prev)) == SET && XEXP (PATTERN (prev), 0) == return_reg))
1608 /* replace return register */
1609 for (insn = first; insn; insn = NEXT_INSN (insn))
1611 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1615 if (PREV_INSN (insn)) prev = PREV_INSN (insn);
1616 if (GET_CODE (PATTERN (insn)) == USE
1617 && XEXP (PATTERN (insn), 0) == return_reg
1619 && GET_CODE (PATTERN (prev)) == SET
1620 && XEXP (PATTERN (prev), 0) == return_reg)
1622 XEXP (PATTERN (prev), 0) = return_save;
1624 /* change use insn to NOTE_INSN_DELETED */
1625 PUT_CODE (insn, NOTE);
1626 NOTE_SOURCE_FILE (insn) = 0;
1627 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1637 Generate RTL to return from the current function, with value VAL.
1638 It is copied and modified based on expand_value_return function of stmt.c
1642 expand_value_return (val)
1645 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1647 /* Copy the value to the return location
1648 unless it's already there. */
1650 if (return_reg != val)
1652 #ifdef PROMOTE_FUNCTION_RETURN
1653 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1654 int unsignedp = TREE_UNSIGNED (type);
1655 enum machine_mode mode
1656 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
1659 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
1660 convert_move (return_reg, val, unsignedp);
1663 emit_move_insn (return_reg, val);
1665 if (GET_CODE (return_reg) == REG
1666 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
1667 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
1668 /* Handle calls that return values in multiple non-contiguous locations.
1669 The Irix 6 ABI has examples of this. */
1670 else if (GET_CODE (return_reg) == PARALLEL)
1674 for (i = 0; i < XVECLEN (return_reg, 0); i++)
1676 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
1678 if (GET_CODE (x) == REG
1679 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1680 emit_insn (gen_rtx_USE (VOIDmode, x));
1687 validate_insns_of_varrefs (insn)
1692 /* Initialize recognition, indicating that volatile is OK. */
1695 for (; insn; insn = next)
1697 next = NEXT_INSN (insn);
1698 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1699 || GET_CODE (insn) == CALL_INSN)
1701 /* excerpt from insn_invalid_p in recog.c */
1702 int icode = recog_memoized (insn);
1704 if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1705 validate_operand_of_varrefs (insn, &PATTERN (insn));
1709 init_recog_no_volatile ();
1714 validate_operand_of_varrefs (insn, loc)
1717 register enum rtx_code code;
1726 code = GET_CODE (x);
1747 /* validate insn of frame register plus constant. */
1748 if (GET_CODE (x) == PLUS
1749 && XEXP (x, 0) == virtual_stack_vars_rtx
1750 && CONSTANT_P (XEXP (x, 1)))
1753 /* temp = force_operand (x, NULL_RTX); */
1754 { /* excerpt from expand_binop in optabs.c */
1755 optab binoptab = add_optab;
1756 enum machine_mode mode = GET_MODE (x);
1757 int icode = (int) binoptab->handlers[(int) mode].insn_code;
1758 enum machine_mode mode1 = insn_operand_mode[icode][2];
1760 rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
1761 temp = gen_reg_rtx (mode);
1763 /* Now, if insn's predicates don't allow offset operands, put them into
1766 if (! (*insn_operand_predicate[icode][2]) (xop1, mode1)
1767 && mode1 != VOIDmode)
1768 xop1 = copy_to_mode_reg (mode1, xop1);
1770 pat = GEN_FCN (icode) (temp, xop0, xop1);
1777 emit_insns_before (seq, insn);
1778 if (! validate_change (insn, loc, temp, 0))
1785 case CALL_PLACEHOLDER:
1786 validate_insns_of_varrefs (XEXP (x, 0));
1787 validate_insns_of_varrefs (XEXP (x, 1));
1788 validate_insns_of_varrefs (XEXP (x, 2));
1795 /* Scan all subexpressions. */
1796 fmt = GET_RTX_FORMAT (code);
1797 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1799 validate_operand_of_varrefs (insn, &XEXP (x, i));
1800 else if (*fmt == 'E')
1801 for (j = 0; j < XVECLEN (x, i); j++)
1802 validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
1809 The following codes are invoked after the instantiation of pseuso registers.
1811 Reorder local variables to place a peudo register after buffers to avoid
1812 the corruption of local variables that could be used to further corrupt
1813 arbitrary memory locations.
1815 #if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
1816 static void push_frame PARAMS ((HOST_WIDE_INT var_size, HOST_WIDE_INT boundary));
1817 static void push_frame_in_decls PARAMS ((tree block, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1818 static void push_frame_in_args PARAMS ((tree parms, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1819 static void push_frame_of_insns PARAMS ((rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1820 static void push_frame_in_operand PARAMS ((rtx insn, rtx orig, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1821 static void push_frame_of_reg_equiv_memory_loc PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1822 static void push_frame_of_reg_equiv_constant PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1823 static void reset_used_flags_for_push_frame PARAMS ((void));
1824 static int check_out_of_frame_access PARAMS ((rtx insn, HOST_WIDE_INT boundary));
1825 static int check_out_of_frame_access_in_operand PARAMS ((rtx, HOST_WIDE_INT boundary));
1829 assign_stack_local_for_pseudo_reg (mode, size, align)
1830 enum machine_mode mode;
1834 #if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
1835 return assign_stack_local (mode, size, align);
1837 tree blocks = DECL_INITIAL (current_function_decl);
1839 HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
1840 int first_call_from_purge_addressof, first_call_from_global_alloc;
1842 if (! flag_propolice_protection
1844 || ! blocks || TREE_CODE (blocks) != BLOCK
1845 || current_function_is_inlinable
1846 || ! search_string_from_argsandvars (1)
1847 || current_function_contains_functions)
1848 return assign_stack_local (mode, size, align);
1850 first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
1851 first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
1852 saved_cse_not_expected = cse_not_expected;
1854 starting_frame = (STARTING_FRAME_OFFSET)?STARTING_FRAME_OFFSET:BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1855 units_per_push = MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1856 GET_MODE_SIZE (mode));
1858 if (first_call_from_purge_addressof)
1860 push_frame_offset = push_allocated_offset;
1861 if (check_out_of_frame_access (get_insns (), starting_frame))
1863 /* if there is an access beyond frame, push dummy region to seperate
1864 the address of instantiated variables */
1865 push_frame (GET_MODE_SIZE (DImode), 0);
1866 assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
1870 if (first_call_from_global_alloc)
1872 push_frame_offset = push_allocated_offset = 0;
1873 if (check_out_of_frame_access (get_insns (), starting_frame))
1875 if (STARTING_FRAME_OFFSET)
1877 /* if there is an access beyond frame, push dummy region
1878 to seperate the address of instantiated variables */
1879 push_frame (GET_MODE_SIZE (DImode), 0);
1880 assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
1883 push_allocated_offset = starting_frame;
1887 saved_frame_offset = frame_offset;
1888 frame_offset = push_frame_offset;
1890 new = assign_stack_local (mode, size, align);
1892 push_frame_offset = frame_offset;
1893 frame_offset = saved_frame_offset;
1895 if (push_frame_offset > push_allocated_offset)
1897 push_frame (units_per_push, push_allocated_offset + STARTING_FRAME_OFFSET);
1899 assign_stack_local (BLKmode, units_per_push, -1);
1900 push_allocated_offset += units_per_push;
1903 /* At the second call from global alloc, alpha push frame and assign
1904 a local variable to the top of the stack */
1905 if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
1906 push_frame_offset = push_allocated_offset = 0;
1913 #if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
1915 push frame infomation for instantiating pseudo register at the top of stack.
1916 This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
1919 It is called by purge_addressof function and global_alloc (or reload)
1923 push_frame (var_size, boundary)
1924 HOST_WIDE_INT var_size, boundary;
1926 reset_used_flags_for_push_frame();
1928 /* scan all declarations of variables and fix the offset address of the variable based on the frame pointer */
1929 push_frame_in_decls (DECL_INITIAL (current_function_decl), var_size, boundary);
1931 /* scan all argument variable and fix the offset address based on the frame pointer */
1932 push_frame_in_args (DECL_ARGUMENTS (current_function_decl), var_size, boundary);
1934 /* scan all operands of all insns and fix the offset address based on the frame pointer */
1935 push_frame_of_insns (get_insns (), var_size, boundary);
1937 /* scan all reg_equiv_memory_loc and reg_equiv_constant*/
1938 push_frame_of_reg_equiv_memory_loc (var_size, boundary);
1939 push_frame_of_reg_equiv_constant (var_size, boundary);
1941 reset_used_flags_for_push_frame();
1945 reset_used_flags_for_push_frame()
1948 extern rtx *reg_equiv_memory_loc;
1949 extern rtx *reg_equiv_constant;
1951 /* Clear all the USED bits in operands of all insns and declarations of local vars */
1952 reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
1953 reset_used_flags_for_insns (get_insns ());
1956 /* The following codes are processed if the push_frame is called from
1957 global_alloc (or reload) function */
1958 if (reg_equiv_memory_loc == 0) return;
1960 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
1961 if (reg_equiv_memory_loc[i])
1963 rtx x = reg_equiv_memory_loc[i];
1965 if (GET_CODE (x) == MEM
1966 && GET_CODE (XEXP (x, 0)) == PLUS
1967 && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
1970 XEXP (x, 0)->used = 0;
1975 if (reg_equiv_constant == 0) return;
1977 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
1978 if (reg_equiv_constant[i])
1980 rtx x = reg_equiv_constant[i];
1982 if (GET_CODE (x) == PLUS
1983 && AUTO_BASEPTR (x) == frame_pointer_rtx)
1992 push_frame_in_decls (block, push_size, boundary)
1994 HOST_WIDE_INT push_size, boundary;
1997 HOST_WIDE_INT offset;
2002 types = BLOCK_VARS(block);
2006 /* skip the declaration that refers an external variable and
2007 also skip an global variable */
2008 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
2011 home = DECL_RTL (types);
2012 if (home == 0) goto next;
2014 /* process for static local variable */
2015 if (GET_CODE (home) == MEM
2016 && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2019 if (GET_CODE (home) == MEM
2020 && GET_CODE (XEXP (home, 0)) == REG)
2022 if (XEXP (home, 0) != frame_pointer_rtx
2026 XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2030 XEXP (home, 0)->used = 1;
2033 if (GET_CODE (home) == MEM
2034 && GET_CODE (XEXP (home, 0)) == MEM)
2037 /* process for dynamically allocated aray */
2038 home = XEXP (home, 0);
2041 if (GET_CODE (home) == MEM
2042 && GET_CODE (XEXP (home, 0)) == PLUS
2043 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2045 offset = AUTO_OFFSET(XEXP (home, 0));
2047 if (! XEXP (home, 0)->used
2048 && offset >= boundary)
2050 offset += push_size;
2051 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2054 XEXP (home, 0)->used = 1;
2060 types = TREE_CHAIN(types);
2063 push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
2064 block = BLOCK_CHAIN (block);
2070 push_frame_in_args (parms, push_size, boundary)
2072 HOST_WIDE_INT push_size, boundary;
2075 HOST_WIDE_INT offset;
2077 for (; parms; parms = TREE_CHAIN (parms))
2078 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2080 if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
2082 home = DECL_INCOMING_RTL (parms);
2083 offset = AUTO_OFFSET(XEXP (home, 0));
2085 if (XEXP (home, 0)->used || offset < boundary) continue;
2087 /* the operand related to the sweep variable */
2088 if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
2090 offset += push_size;
2091 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2094 XEXP (home, 0)->used = 1;
2101 static int insn_pushed;
2102 static int *fp_equiv = 0;
2105 push_frame_of_insns (insn, push_size, boundary)
2107 HOST_WIDE_INT push_size, boundary;
2110 fp_equiv = (int *) alloca (max_reg_num () * sizeof (int));
2111 bzero ((char *) fp_equiv, max_reg_num () * sizeof (int));
2113 for (; insn; insn = NEXT_INSN (insn))
2114 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2115 || GET_CODE (insn) == CALL_INSN)
2117 insn_pushed = FALSE; debuginsn = insn;
2118 push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
2123 rtx seq = split_insns (PATTERN (insn), insn);
2125 if (seq && GET_CODE (seq) == SEQUENCE)
2129 /* replace the pattern of the insn */
2130 PATTERN (insn) = PATTERN (XVECEXP (seq, 0, 0));
2132 if (XVECLEN (seq, 0) == 2)
2134 rtx pattern = PATTERN (XVECEXP (seq, 0, 1));
2136 if (GET_CODE (pattern) == SET
2137 && GET_CODE (XEXP (pattern, 0)) == REG
2138 && GET_CODE (XEXP (pattern, 1)) == PLUS
2139 && XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
2140 && CONSTANT_P (XEXP (XEXP (pattern, 1), 1)))
2142 rtx offset = XEXP (XEXP (pattern, 1), 1);
2143 fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
2148 for (i = 1; i < XVECLEN (seq, 0); i++)
2150 rtx insn = XVECEXP (seq, 0, i);
2151 add_insn_after (insn, after);
2155 /* Recursively call try_split for each new insn created */
2156 insn = NEXT_INSN (insn);
2157 for (i = 1; i < XVECLEN (seq, 0); i++, insn = NEXT_INSN (insn))
2158 insn = try_split (PATTERN (insn), insn, 1);
2163 /* push frame in NOTE */
2164 push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
2166 /* push frame in CALL EXPR_LIST */
2167 if (GET_CODE (insn) == CALL_INSN)
2168 push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn), push_size, boundary);
2174 push_frame_in_operand (insn, orig, push_size, boundary)
2176 HOST_WIDE_INT push_size, boundary;
2178 register rtx x = orig;
2179 register enum rtx_code code;
2181 HOST_WIDE_INT offset;
2187 code = GET_CODE (x);
2209 skip setjmp setup insn and setjmp restore insn
2211 (set (MEM (reg:SI xx)) (frame_pointer_rtx)))
2212 (set (frame_pointer_rtx) (REG))
2214 if (GET_CODE (XEXP (x, 0)) == MEM
2215 && XEXP (x, 1) == frame_pointer_rtx)
2217 if (XEXP (x, 0) == frame_pointer_rtx
2218 && GET_CODE (XEXP (x, 1)) == REG)
2222 powerpc case: restores setjmp address
2223 (set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
2225 (set (reg) (plus frame_pointer_rtx const_int -n))
2226 (set (frame_pointer_rtx) (reg))
2228 if (GET_CODE (XEXP (x, 0)) == REG
2229 && GET_CODE (XEXP (x, 1)) == PLUS
2230 && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
2231 && CONSTANT_P (XEXP (XEXP (x, 1), 1))
2232 && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
2235 offset = AUTO_OFFSET(x);
2236 if (x->used || abs (offset) < boundary)
2239 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
2240 x->used = 1; insn_pushed = TRUE;
2244 /* reset fp_equiv register */
2245 else if (GET_CODE (XEXP (x, 0)) == REG
2246 && fp_equiv[REGNO (XEXP (x, 0))])
2247 fp_equiv[REGNO (XEXP (x, 0))] = 0;
2249 /* propagete fp_equiv register */
2250 else if (GET_CODE (XEXP (x, 0)) == REG
2251 && GET_CODE (XEXP (x, 1)) == REG
2252 && fp_equiv[REGNO (XEXP (x, 1))])
2253 if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
2254 || reg_renumber[REGNO (XEXP (x, 0))] > 0)
2255 fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
2259 if (XEXP (x, 0) == frame_pointer_rtx
2262 XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2263 XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2269 offset = AUTO_OFFSET(x);
2271 /* Handle special case of frame register plus constant. */
2272 if (CONSTANT_P (XEXP (x, 1))
2273 && XEXP (x, 0) == frame_pointer_rtx)
2275 if (x->used || offset < boundary)
2278 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2279 x->used = 1; insn_pushed = TRUE;
2285 (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
2287 if (CONSTANT_P (XEXP (x, 1))
2288 && GET_CODE (XEXP (x, 0)) == SUBREG
2289 && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
2291 if (x->used || offset < boundary)
2294 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2295 x->used = 1; insn_pushed = TRUE;
2300 Handle powerpc case:
2301 (set (reg x) (plus fp const))
2302 (set (.....) (... (plus (reg x) (const B))))
2304 else if (CONSTANT_P (XEXP (x, 1))
2305 && GET_CODE (XEXP (x, 0)) == REG
2306 && fp_equiv[REGNO (XEXP (x, 0))])
2308 if (x->used) return;
2310 offset += fp_equiv[REGNO (XEXP (x, 0))];
2312 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2313 x->used = 1; insn_pushed = TRUE;
2318 Handle special case of frame register plus reg (constant).
2319 (set (reg x) (const B))
2320 (set (....) (...(plus fp (reg x))))
2322 else if (XEXP (x, 0) == frame_pointer_rtx
2323 && GET_CODE (XEXP (x, 1)) == REG
2325 && PATTERN (PREV_INSN (insn))
2326 && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
2327 && CONSTANT_P (SET_SRC (PATTERN (PREV_INSN (insn)))))
2329 HOST_WIDE_INT offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
2331 if (x->used || offset < boundary)
2334 SET_SRC (PATTERN (PREV_INSN (insn)))
2335 = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2337 XEXP (x, 1)->used = 1;
2341 /* Handle special case of frame register plus reg (used). */
2342 else if (XEXP (x, 0) == frame_pointer_rtx
2343 && XEXP (x, 1)->used)
2349 process further subtree:
2350 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2355 case CALL_PLACEHOLDER:
2356 push_frame_of_insns (XEXP (x, 0), push_size, boundary);
2357 push_frame_of_insns (XEXP (x, 1), push_size, boundary);
2358 push_frame_of_insns (XEXP (x, 2), push_size, boundary);
2365 /* Scan all subexpressions. */
2366 fmt = GET_RTX_FORMAT (code);
2367 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2370 if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
2371 fatal_insn ("push_frame_in_operand", insn);
2372 push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
2374 else if (*fmt == 'E')
2375 for (j = 0; j < XVECLEN (x, i); j++)
2376 push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
2380 push_frame_of_reg_equiv_memory_loc (push_size, boundary)
2381 HOST_WIDE_INT push_size, boundary;
2384 extern rtx *reg_equiv_memory_loc;
2386 /* This function is processed if the push_frame is called from
2387 global_alloc (or reload) function */
2388 if (reg_equiv_memory_loc == 0) return;
2390 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2391 if (reg_equiv_memory_loc[i])
2393 rtx x = reg_equiv_memory_loc[i];
2396 if (GET_CODE (x) == MEM
2397 && GET_CODE (XEXP (x, 0)) == PLUS
2398 && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
2400 offset = AUTO_OFFSET(XEXP (x, 0));
2402 if (! XEXP (x, 0)->used
2403 && offset >= boundary)
2405 offset += push_size;
2406 XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2409 XEXP (x, 0)->used = 1;
2412 else if (GET_CODE (x) == MEM
2413 && XEXP (x, 0) == frame_pointer_rtx
2416 XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2417 XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2423 push_frame_of_reg_equiv_constant (push_size, boundary)
2424 HOST_WIDE_INT push_size, boundary;
2427 extern rtx *reg_equiv_constant;
2429 /* This function is processed if the push_frame is called from
2430 global_alloc (or reload) function */
2431 if (reg_equiv_constant == 0) return;
2433 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2434 if (reg_equiv_constant[i])
2436 rtx x = reg_equiv_constant[i];
2439 if (GET_CODE (x) == PLUS
2440 && XEXP (x, 0) == frame_pointer_rtx)
2442 offset = AUTO_OFFSET(x);
2445 && offset >= boundary)
2447 offset += push_size;
2448 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2454 else if (x == frame_pointer_rtx
2457 reg_equiv_constant[i]
2458 = plus_constant (frame_pointer_rtx, push_size);
2459 reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
2465 check_out_of_frame_access (insn, boundary)
2467 HOST_WIDE_INT boundary;
2469 for (; insn; insn = NEXT_INSN (insn))
2470 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2471 || GET_CODE (insn) == CALL_INSN)
2473 if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
2481 check_out_of_frame_access_in_operand (orig, boundary)
2483 HOST_WIDE_INT boundary;
2485 register rtx x = orig;
2486 register enum rtx_code code;
2493 code = GET_CODE (x);
2513 if (XEXP (x, 0) == frame_pointer_rtx)
2514 if (0 < boundary) return TRUE;
2518 /* Handle special case of frame register plus constant. */
2519 if (CONSTANT_P (XEXP (x, 1))
2520 && XEXP (x, 0) == frame_pointer_rtx)
2522 if (0 <= AUTO_OFFSET(x)
2523 && AUTO_OFFSET(x) < boundary) return TRUE;
2527 process further subtree:
2528 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2533 case CALL_PLACEHOLDER:
2534 if (check_out_of_frame_access (XEXP (x, 0), boundary)) return TRUE;
2535 if (check_out_of_frame_access (XEXP (x, 1), boundary)) return TRUE;
2536 if (check_out_of_frame_access (XEXP (x, 2), boundary)) return TRUE;
2543 /* Scan all subexpressions. */
2544 fmt = GET_RTX_FORMAT (code);
2545 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2548 if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
2551 else if (*fmt == 'E')
2552 for (j = 0; j < XVECLEN (x, i); j++)
2553 if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))