1 /* $DragonFly: src/gnu/usr.bin/cc34/cc_prep/protector.c,v 1.3 2007/01/20 03:16:33 corecode Exp $ */
2 /* RTL buffer overflow protection function for GNU C compiler
3 Copyright (C) 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains several memory arrangement functions to protect
23 the return address and the frame pointer of the stack
24 from a stack-smashing attack. It also
25 provides the function that protects pointer variables. */
29 #include "coretypes.h"
37 #include "insn-config.h"
38 #include "insn-flags.h"
42 #include "hard-reg-set.h"
47 #include "conditions.h"
48 #include "insn-attr.h"
51 #include "protector.h"
54 /* Round a value to the lowest integer less than it that is a multiple of
55 the required alignment. Avoid using division in case the value is
56 negative. Assume the alignment is a power of two. */
57 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
59 /* Similar, but round to the next highest integer that meets the
61 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
64 /* Nonzero if function being compiled can define string buffers that may be
65 damaged by the stack-smash attack. */
66 static int current_function_defines_vulnerable_string;
67 static int current_function_defines_short_string;
68 static int current_function_has_variable_string;
69 static int current_function_defines_vsized_array;
70 static int current_function_is_inlinable;
72 /* Nonzero if search_string_def finds the variable which contains an array. */
75 /* Nonzero if search_string_def finds a byte-pointer variable,
76 which may be assigned to alloca output. */
77 static int may_have_alloca_pointer;
79 static rtx guard_area, _guard;
80 static rtx function_first_insn, prologue_insert_point;
82 /* Offset to end of sweeped area for gathering character arrays. */
83 static HOST_WIDE_INT sweep_frame_offset;
85 /* Offset to end of allocated area for instantiating pseudo registers. */
86 static HOST_WIDE_INT push_allocated_offset = 0;
88 /* Offset to end of assigned area for instantiating pseudo registers. */
89 static HOST_WIDE_INT push_frame_offset = 0;
91 /* Set to 1 after cse_not_expected becomes nonzero. it is used to identify
92 which stage assign_stack_local_for_pseudo_reg is called from. */
93 static int saved_cse_not_expected = 0;
95 static int search_string_from_argsandvars (int);
96 static int search_string_from_local_vars (tree);
97 static int search_pointer_def (tree);
98 static int search_func_pointer (tree);
99 static int check_used_flag (rtx);
100 static void reset_used_flags_for_insns (rtx);
101 static void reset_used_flags_for_decls (tree);
102 static void reset_used_flags_of_plus (rtx);
103 static void rtl_prologue (rtx);
104 static void rtl_epilogue (rtx);
105 static void arrange_var_order (tree);
106 static void copy_args_for_protection (void);
107 static void sweep_string_variable (rtx, HOST_WIDE_INT);
108 static void sweep_string_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
109 static void sweep_string_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
110 static void sweep_string_use_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
111 static void sweep_string_in_operand (rtx, rtx *, HOST_WIDE_INT, HOST_WIDE_INT);
112 static void move_arg_location (rtx, rtx, rtx, HOST_WIDE_INT);
113 static void change_arg_use_of_insns (rtx, rtx, rtx *, HOST_WIDE_INT);
114 static void change_arg_use_in_operand (rtx, rtx, rtx, rtx *, HOST_WIDE_INT);
115 static void validate_insns_of_varrefs (rtx);
116 static void validate_operand_of_varrefs (rtx, rtx *);
118 /* Specify which size of buffers should be protected from a stack smashing
119 attack. Because small buffers are not used in situations which may
120 overflow buffer, the default size sets to the size of 64 bit register. */
121 #ifndef SUSPICIOUS_BUF_SIZE
122 #define SUSPICIOUS_BUF_SIZE 8
125 #define AUTO_BASEPTR(X) \
126 (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
127 #define AUTO_OFFSET(X) \
128 (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
129 #undef PARM_PASSED_IN_MEMORY
130 #define PARM_PASSED_IN_MEMORY(PARM) \
131 (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
132 #define TREE_VISITED(NODE) ((NODE)->common.unused_0)
134 /* Argument values for calling search_string_from_argsandvars. */
135 #define CALL_FROM_PREPARE_STACK_PROTECTION 0
136 #define CALL_FROM_PUSH_FRAME 1
139 /* Prepare several stack protection instruments for the current function
140 if the function has an array as a local variable, which may be vulnerable
141 from a stack smashing attack, and it is not inlinable.
143 The overall steps are as follows;
145 (2)insert guard_area on the stack,
146 (3)duplicate pointer arguments into local variables, and
147 (4)arrange the location of local variables. */
149 prepare_stack_protection (int inlinable)
151 tree blocks = DECL_INITIAL (current_function_decl);
152 current_function_is_inlinable = inlinable && !flag_no_inline;
153 push_frame_offset = push_allocated_offset = 0;
154 saved_cse_not_expected = 0;
156 /* Skip the protection if the function has no block
157 or it is an inline function. */
158 if (current_function_is_inlinable)
159 validate_insns_of_varrefs (get_insns ());
160 if (! blocks || current_function_is_inlinable)
163 current_function_defines_vulnerable_string
164 = search_string_from_argsandvars (CALL_FROM_PREPARE_STACK_PROTECTION);
166 if (current_function_defines_vulnerable_string
167 || flag_stack_protection)
169 function_first_insn = get_insns ();
171 if (current_function_contains_functions)
173 if (warn_stack_protector)
174 warning ("not protecting function: it contains functions");
178 /* Initialize recognition, indicating that volatile is OK. */
181 sweep_frame_offset = 0;
183 #ifdef STACK_GROWS_DOWNWARD
184 /* frame_offset: offset to end of allocated area of stack frame.
185 It is defined in the function.c. */
187 /* the location must be before buffers. */
188 guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
189 PUT_MODE (guard_area, GUARD_m);
190 MEM_VOLATILE_P (guard_area) = 1;
192 #ifndef FRAME_GROWS_DOWNWARD
193 sweep_frame_offset = frame_offset;
196 /* For making room for guard value, scan all insns and fix the offset
197 address of the variable that is based on frame pointer.
198 Scan all declarations of variables and fix the offset address
199 of the variable that is based on the frame pointer. */
200 sweep_string_variable (guard_area, UNITS_PER_GUARD);
203 /* the location of guard area moves to the beginning of stack frame. */
204 if (AUTO_OFFSET(XEXP (guard_area, 0)))
205 XEXP (XEXP (guard_area, 0), 1)
206 = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
209 /* Insert prologue rtl instructions. */
210 rtl_prologue (function_first_insn);
212 if (! current_function_has_variable_string)
214 /* Generate argument saving instruction. */
215 copy_args_for_protection ();
217 #ifndef FRAME_GROWS_DOWNWARD
218 /* If frame grows upward, character arrays for protecting args
219 may copy to the top of the guard variable.
220 So sweep the guard variable again. */
221 sweep_frame_offset = CEIL_ROUND (frame_offset,
222 BIGGEST_ALIGNMENT / BITS_PER_UNIT);
223 sweep_string_variable (guard_area, UNITS_PER_GUARD);
226 /* Variable can't be protected from the overflow of variable length
227 buffer. But variable reordering is still effective against
228 the overflow of fixed size character arrays. */
229 else if (warn_stack_protector)
230 warning ("not protecting variables: it has a variable length buffer");
232 #ifndef FRAME_GROWS_DOWNWARD
233 if (STARTING_FRAME_OFFSET == 0)
235 /* This part may be only for alpha. */
236 push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
237 assign_stack_local (BLKmode, push_allocated_offset, -1);
238 sweep_frame_offset = frame_offset;
239 sweep_string_variable (const0_rtx, -push_allocated_offset);
240 sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
244 /* Arrange the order of local variables. */
245 arrange_var_order (blocks);
247 #ifdef STACK_GROWS_DOWNWARD
248 /* Insert epilogue rtl instructions. */
249 rtl_epilogue (get_last_insn ());
251 init_recog_no_volatile ();
253 else if (current_function_defines_short_string
254 && warn_stack_protector)
255 warning ("not protecting function: buffer is less than %d bytes long",
256 SUSPICIOUS_BUF_SIZE);
260 Search string from arguments and local variables.
261 caller: CALL_FROM_PREPARE_STACK_PROTECTION (0)
262 CALL_FROM_PUSH_FRAME (1)
265 search_string_from_argsandvars (int caller)
270 /* Saves a latest search result as a cached infomation. */
271 static tree __latest_search_decl = 0;
272 static int __latest_search_result = FALSE;
274 if (__latest_search_decl == current_function_decl)
275 return __latest_search_result;
277 if (caller == CALL_FROM_PUSH_FRAME)
280 __latest_search_decl = current_function_decl;
281 __latest_search_result = TRUE;
283 current_function_defines_short_string = FALSE;
284 current_function_has_variable_string = FALSE;
285 current_function_defines_vsized_array = FALSE;
286 may_have_alloca_pointer = FALSE;
288 /* Search a string variable from local variables. */
289 blocks = DECL_INITIAL (current_function_decl);
290 string_p = search_string_from_local_vars (blocks);
292 if (! current_function_defines_vsized_array
293 && may_have_alloca_pointer
294 && current_function_calls_alloca)
296 current_function_has_variable_string = TRUE;
303 #ifdef STACK_GROWS_DOWNWARD
304 /* Search a string variable from arguments. */
305 parms = DECL_ARGUMENTS (current_function_decl);
307 for (; parms; parms = TREE_CHAIN (parms))
308 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
310 if (PARM_PASSED_IN_MEMORY (parms))
312 string_p = search_string_def (TREE_TYPE(parms));
319 __latest_search_result = FALSE;
324 /* Search string from local variables in the specified scope. */
326 search_string_from_local_vars (tree block)
331 while (block && TREE_CODE(block)==BLOCK)
333 for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
335 /* Skip the declaration that refers an external variable. */
336 /* name: types.decl.name.identifier.id */
337 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
338 && TREE_CODE (types) == VAR_DECL
339 && ! DECL_ARTIFICIAL (types)
340 && DECL_RTL_SET_P (types)
341 && GET_CODE (DECL_RTL (types)) == MEM
343 && search_string_def (TREE_TYPE (types)))
345 rtx home = DECL_RTL (types);
347 if (GET_CODE (home) == MEM
348 && (GET_CODE (XEXP (home, 0)) == MEM
349 || (GET_CODE (XEXP (home, 0)) == REG
350 && XEXP (home, 0) != virtual_stack_vars_rtx
351 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
352 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
353 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
354 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
357 /* If the value is indirect by memory or by a register
358 that isn't the frame pointer then it means the object is
359 variable-sized and address through
360 that register or stack slot.
361 The protection has no way to hide pointer variables
362 behind the array, so all we can do is staying
363 the order of variables and arguments. */
365 current_function_has_variable_string = TRUE;
368 /* Found character array. */
373 if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
378 block = BLOCK_CHAIN (block);
385 /* Search a character array from the specified type tree. */
387 search_string_def (tree type)
394 switch (TREE_CODE (type))
397 /* Check if the array is a variable-sized array. */
398 if (TYPE_DOMAIN (type) == 0
399 || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
400 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
401 current_function_defines_vsized_array = TRUE;
403 /* Check if the array is related to char array. */
404 if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
405 || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
406 || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
408 /* Check if the string is a variable string. */
409 if (TYPE_DOMAIN (type) == 0
410 || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
411 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
414 /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE. */
415 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
416 && (TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
417 >= SUSPICIOUS_BUF_SIZE))
420 current_function_defines_short_string = TRUE;
423 /* to protect every functions, sweep any arrays to the frame top. */
426 return search_string_def(TREE_TYPE(type));
429 case QUAL_UNION_TYPE:
431 /* Check if each field has character arrays. */
432 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
434 /* Omit here local type decls until we know how to support them. */
435 if ((TREE_CODE (tem) == TYPE_DECL)
436 || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
439 if (search_string_def(TREE_TYPE(tem)))
445 /* Check if pointer variables, which may be a pointer assigned
446 by alloca function call, are declared. */
447 if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
448 || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
449 || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
450 may_have_alloca_pointer = TRUE;
463 /* Examine whether the input contains frame pointer addressing. */
490 /* This case is not generated at the stack protection.
491 see plus_constant_wide and simplify_plus_minus function. */
492 if (XEXP (x, 0) == virtual_stack_vars_rtx)
496 if (XEXP (x, 0) == virtual_stack_vars_rtx
497 && GET_CODE (XEXP (x, 1)) == CONST_INT)
504 /* Scan all subexpressions. */
505 fmt = GET_RTX_FORMAT (code);
506 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
509 if (contains_fp (XEXP (x, i)))
512 else if (*fmt == 'E')
513 for (j = 0; j < XVECLEN (x, i); j++)
514 if (contains_fp (XVECEXP (x, i, j)))
521 /* Examine whether the input contains any pointer. */
523 search_pointer_def (tree type)
530 switch (TREE_CODE (type))
533 case QUAL_UNION_TYPE:
535 /* Check if each field has a pointer. */
536 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
538 if ((TREE_CODE (tem) == TYPE_DECL)
539 || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
542 if (search_pointer_def (TREE_TYPE(tem)))
548 return search_pointer_def (TREE_TYPE(type));
553 if (TYPE_READONLY (TREE_TYPE (type)))
555 /* If this pointer contains function pointer,
556 it should be protected. */
557 return search_func_pointer (TREE_TYPE (type));
569 /* Examine whether the input contains function pointer. */
571 search_func_pointer (tree type)
578 switch (TREE_CODE (type))
581 case QUAL_UNION_TYPE:
583 if (! TREE_VISITED (type))
585 /* Mark the type as having been visited already. */
586 TREE_VISITED (type) = 1;
588 /* Check if each field has a function pointer. */
589 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
591 if (TREE_CODE (tem) == FIELD_DECL
592 && search_func_pointer (TREE_TYPE(tem)))
594 TREE_VISITED (type) = 0;
599 TREE_VISITED (type) = 0;
604 return search_func_pointer (TREE_TYPE(type));
609 if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
611 return search_func_pointer (TREE_TYPE(type));
621 /* Check whether the specified rtx contains PLUS rtx with used flag. */
623 check_used_flag (rtx x)
626 register enum rtx_code code;
627 register const char *format_ptr;
654 format_ptr = GET_RTX_FORMAT (code);
655 for (i = 0; i < GET_RTX_LENGTH (code); i++)
657 switch (*format_ptr++)
660 if (check_used_flag (XEXP (x, i)))
665 for (j = 0; j < XVECLEN (x, i); j++)
666 if (check_used_flag (XVECEXP (x, i, j)))
676 /* Reset used flag of every insns after the spcecified insn. */
678 reset_used_flags_for_insns (rtx insn)
682 const char *format_ptr;
684 for (; insn; insn = NEXT_INSN (insn))
685 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
686 || GET_CODE (insn) == CALL_INSN)
688 code = GET_CODE (insn);
690 format_ptr = GET_RTX_FORMAT (code);
692 for (i = 0; i < GET_RTX_LENGTH (code); i++)
694 switch (*format_ptr++)
697 reset_used_flags_of_plus (XEXP (insn, i));
701 for (j = 0; j < XVECLEN (insn, i); j++)
702 reset_used_flags_of_plus (XVECEXP (insn, i, j));
710 /* Reset used flag of every variables in the specified block. */
712 reset_used_flags_for_decls (tree block)
717 while (block && TREE_CODE(block)==BLOCK)
719 types = BLOCK_VARS(block);
721 for (types= BLOCK_VARS(block); types; types = TREE_CHAIN(types))
723 /* Skip the declaration that refers an external variable and
724 also skip an global variable. */
725 if (! DECL_EXTERNAL (types))
727 if (! DECL_RTL_SET_P (types))
729 home = DECL_RTL (types);
731 if (GET_CODE (home) == MEM
732 && GET_CODE (XEXP (home, 0)) == PLUS
733 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
735 XEXP (home, 0)->used = 0;
740 reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
742 block = BLOCK_CHAIN (block);
747 /* Reset the used flag of every PLUS rtx derived from the specified rtx. */
749 reset_used_flags_of_plus (rtx x)
753 const char *format_ptr;
762 /* These types may be freely shared so we needn't do any resetting
780 /* The chain of insns is not being copied. */
787 case CALL_PLACEHOLDER:
788 reset_used_flags_for_insns (XEXP (x, 0));
789 reset_used_flags_for_insns (XEXP (x, 1));
790 reset_used_flags_for_insns (XEXP (x, 2));
797 format_ptr = GET_RTX_FORMAT (code);
798 for (i = 0; i < GET_RTX_LENGTH (code); i++)
800 switch (*format_ptr++)
803 reset_used_flags_of_plus (XEXP (x, i));
807 for (j = 0; j < XVECLEN (x, i); j++)
808 reset_used_flags_of_plus (XVECEXP (x, i, j));
815 /* Generate the prologue insns of the protector into the specified insn. */
817 rtl_prologue (rtx insn)
819 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
820 #undef HAS_INIT_SECTION
821 #define HAS_INIT_SECTION
826 for (; insn; insn = NEXT_INSN (insn))
827 if (GET_CODE (insn) == NOTE
828 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
831 #if !defined (HAS_INIT_SECTION)
832 /* If this function is `main', skip a call to `__main'
833 to run guard instruments after global initializers, etc. */
834 if (DECL_NAME (current_function_decl)
835 && MAIN_NAME_P (DECL_NAME (current_function_decl))
836 && DECL_CONTEXT (current_function_decl) == NULL_TREE)
839 for (; insn; insn = NEXT_INSN (insn))
840 if (GET_CODE (insn) == NOTE
841 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
848 /* Mark the next insn of FUNCTION_BEG insn. */
849 prologue_insert_point = NEXT_INSN (insn);
853 _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
854 emit_move_insn ( guard_area, _guard);
859 emit_insn_before (_val, prologue_insert_point);
863 /* Generate the epilogue insns of the protector into the specified insn. */
865 rtl_epilogue (rtx insn)
871 int flag_have_return = FALSE;
879 return_label = gen_label_rtx ();
881 for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
882 if (GET_CODE (insn) == JUMP_INSN
883 && GET_CODE (PATTERN (insn)) == RETURN
884 && GET_MODE (PATTERN (insn)) == VOIDmode)
886 rtx pat = gen_rtx_SET (VOIDmode,
888 gen_rtx_LABEL_REF (VOIDmode,
890 PATTERN (insn) = pat;
891 flag_have_return = TRUE;
895 emit_label (return_label);
899 /* if (guard_area != _guard) */
900 compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
902 if_false_label = gen_label_rtx (); /* { */
903 emit_jump_insn ( gen_beq(if_false_label));
905 /* generate string for the current function name */
906 funcstr = build_string (strlen(current_function_name ())+1,
907 current_function_name ());
908 TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
909 funcname = output_constant_def (funcstr, 1);
911 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__stack_smash_handler"),
913 XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
915 /* generate RTL to return from the current function */
917 emit_barrier (); /* } */
918 emit_label (if_false_label);
920 /* generate RTL to return from the current function */
921 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
922 use_return_register ();
925 if (HAVE_return && flag_have_return)
927 emit_jump_insn (gen_return ());
935 emit_insn_after (_val, insn);
939 /* For every variable which type is character array, moves its location
940 in the stack frame to the sweep_frame_offset position. */
942 arrange_var_order (tree block)
945 HOST_WIDE_INT offset;
947 while (block && TREE_CODE(block)==BLOCK)
949 /* arrange the location of character arrays in depth first. */
950 arrange_var_order (BLOCK_SUBBLOCKS (block));
952 for (types = BLOCK_VARS (block); types; types = TREE_CHAIN(types))
954 /* Skip the declaration that refers an external variable. */
955 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
956 && TREE_CODE (types) == VAR_DECL
957 && ! DECL_ARTIFICIAL (types)
958 && DECL_RTL_SET_P (types)
959 && GET_CODE (DECL_RTL (types)) == MEM
960 && GET_MODE (DECL_RTL (types)) == BLKmode
963 search_string_def (TREE_TYPE (types))
964 || (! current_function_defines_vulnerable_string && is_array)))
966 rtx home = DECL_RTL (types);
968 if (!(GET_CODE (home) == MEM
969 && (GET_CODE (XEXP (home, 0)) == MEM
970 || (GET_CODE (XEXP (home, 0)) == REG
971 && XEXP (home, 0) != virtual_stack_vars_rtx
972 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
973 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
974 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
975 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
979 /* Found a string variable. */
980 HOST_WIDE_INT var_size =
981 ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
984 /* Confirmed it is BLKmode. */
985 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
986 var_size = CEIL_ROUND (var_size, alignment);
988 /* Skip the variable if it is top of the region
989 specified by sweep_frame_offset. */
990 offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
991 if (offset == sweep_frame_offset - var_size)
992 sweep_frame_offset -= var_size;
994 else if (offset < sweep_frame_offset - var_size)
995 sweep_string_variable (DECL_RTL (types), var_size);
1000 block = BLOCK_CHAIN (block);
1005 /* To protect every pointer argument and move character arrays in the argument,
1006 Copy those variables to the top of the stack frame and move the location of
1007 character arrays to the posion of sweep_frame_offset. */
1009 copy_args_for_protection (void)
1011 tree parms = DECL_ARGUMENTS (current_function_decl);
1014 parms = DECL_ARGUMENTS (current_function_decl);
1015 for (; parms; parms = TREE_CHAIN (parms))
1016 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1018 if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1023 string_p = search_string_def (TREE_TYPE(parms));
1025 /* Check if it is a candidate to move. */
1026 if (string_p || search_pointer_def (TREE_TYPE (parms)))
1029 = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
1031 tree passed_type = DECL_ARG_TYPE (parms);
1032 tree nominal_type = TREE_TYPE (parms);
1036 if (GET_CODE (DECL_RTL (parms)) == REG)
1040 change_arg_use_of_insns (prologue_insert_point,
1041 DECL_RTL (parms), &safe, 0);
1044 /* Generate codes for copying the content. */
1045 rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
1047 /* Avoid register elimination in gcse.c. */
1048 PATTERN (movinsn)->volatil = 1;
1050 /* Save debugger info. */
1051 SET_DECL_RTL (parms, safe);
1054 else if (GET_CODE (DECL_RTL (parms)) == MEM
1055 && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1058 rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1060 /* Generate codes for copying the content. */
1061 movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1062 /* Avoid register elimination in gcse.c. */
1063 PATTERN (movinsn)->volatil = 1;
1065 /* Change the addressof information to the newly
1066 allocated pseudo register. */
1067 emit_move_insn (DECL_RTL (parms), safe);
1069 /* Save debugger info. */
1070 SET_DECL_RTL (parms, safe);
1073 /* See if the frontend wants to pass this by invisible
1075 else if (passed_type != nominal_type
1076 && POINTER_TYPE_P (passed_type)
1077 && TREE_TYPE (passed_type) == nominal_type)
1079 rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
1081 change_arg_use_of_insns (prologue_insert_point,
1085 /* Generate codes for copying the content. */
1086 rtx movinsn = emit_move_insn (safe, orig);
1088 /* Avoid register elimination in gcse.c */
1089 PATTERN (movinsn)->volatil = 1;
1091 /* Save debugger info. */
1092 SET_DECL_RTL (parms, safe);
1098 /* Declare temporary local variable for parms. */
1100 = assign_stack_local (DECL_MODE (parms), arg_size,
1101 DECL_MODE (parms) == BLKmode ?
1104 MEM_IN_STRUCT_P (temp_rtx)
1105 = AGGREGATE_TYPE_P (TREE_TYPE (parms));
1106 set_mem_alias_set (temp_rtx, get_alias_set (parms));
1108 /* Generate codes for copying the content. */
1109 store_expr (parms, temp_rtx, 0);
1111 /* Change the reference for each instructions. */
1112 move_arg_location (prologue_insert_point, DECL_RTL (parms),
1113 temp_rtx, arg_size);
1115 /* Change the location of parms variable. */
1116 SET_DECL_RTL (parms, temp_rtx);
1121 emit_insn_before (seq, prologue_insert_point);
1123 #ifdef FRAME_GROWS_DOWNWARD
1124 /* Process the string argument. */
1125 if (string_p && DECL_MODE (parms) == BLKmode)
1127 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1128 arg_size = CEIL_ROUND (arg_size, alignment);
1130 /* Change the reference for each instructions. */
1131 sweep_string_variable (DECL_RTL (parms), arg_size);
1140 /* Sweep a string variable to the positon of sweep_frame_offset in the
1141 stack frame, that is a last position of string variables. */
1143 sweep_string_variable (rtx sweep_var, HOST_WIDE_INT var_size)
1145 HOST_WIDE_INT sweep_offset;
1147 switch (GET_CODE (sweep_var))
1150 /* Kevin F. Quinn May 2006
1151 * arrange_var_order can clearly call this function with
1152 * the code REG in sweep_var, so we need to handle the case
1153 * at least. This does nothing, as it appears there's nothing
1155 * Reached if a class variable is passed by value.
1157 if (warn_stack_protector)
1159 warning ("sweep string type REG (%d) ignored - rtl:\n",
1160 GET_CODE(sweep_var));
1161 print_rtl(stderr,sweep_var);
1167 if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
1168 && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
1170 sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
1173 sweep_offset = INTVAL (sweep_var);
1176 if (warn_stack_protector)
1178 warning ("sweep string type %d unexpected - rtl:\n",
1179 GET_CODE(sweep_var));
1180 print_rtl(stderr,sweep_var);
1186 /* Scan all declarations of variables and fix the offset address of
1187 the variable based on the frame pointer. */
1188 sweep_string_in_decls (DECL_INITIAL (current_function_decl),
1189 sweep_offset, var_size);
1191 /* Scan all argument variable and fix the offset address based on
1192 the frame pointer. */
1193 sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
1194 sweep_offset, var_size);
1196 /* For making room for sweep variable, scan all insns and
1197 fix the offset address of the variable that is based on frame pointer. */
1198 sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
1201 /* Clear all the USED bits in operands of all insns and declarations of
1203 reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
1204 reset_used_flags_for_insns (function_first_insn);
1206 sweep_frame_offset -= var_size;
1211 /* Move an argument to the local variable addressed by frame_offset. */
1213 move_arg_location (rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size)
1215 /* For making room for sweep variable, scan all insns and
1216 fix the offset address of the variable that is based on frame pointer. */
1217 change_arg_use_of_insns (insn, orig, &new, var_size);
1220 /* Clear all the USED bits in operands of all insns and declarations
1221 of local variables. */
1222 reset_used_flags_for_insns (insn);
1226 /* Sweep character arrays declared as local variable. */
1228 sweep_string_in_decls (tree block, HOST_WIDE_INT sweep_offset,
1229 HOST_WIDE_INT sweep_size)
1232 HOST_WIDE_INT offset;
1235 while (block && TREE_CODE(block)==BLOCK)
1237 for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
1239 /* Skip the declaration that refers an external variable and
1240 also skip an global variable. */
1241 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
1243 if (! DECL_RTL_SET_P (types))
1246 home = DECL_RTL (types);
1248 /* Process for static local variable. */
1249 if (GET_CODE (home) == MEM
1250 && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
1253 if (GET_CODE (home) == MEM
1254 && XEXP (home, 0) == virtual_stack_vars_rtx)
1258 /* the operand related to the sweep variable. */
1259 if (sweep_offset <= offset
1260 && offset < sweep_offset + sweep_size)
1262 offset = sweep_frame_offset - sweep_size - sweep_offset;
1264 XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1266 XEXP (home, 0)->used = 1;
1268 else if (sweep_offset <= offset
1269 && offset < sweep_frame_offset)
1271 /* the rest of variables under sweep_frame_offset,
1272 shift the location. */
1273 XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1275 XEXP (home, 0)->used = 1;
1279 if (GET_CODE (home) == MEM
1280 && GET_CODE (XEXP (home, 0)) == MEM)
1282 /* Process for dynamically allocated array. */
1283 home = XEXP (home, 0);
1286 if (GET_CODE (home) == MEM
1287 && GET_CODE (XEXP (home, 0)) == PLUS
1288 && XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
1289 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1291 if (! XEXP (home, 0)->used)
1293 offset = AUTO_OFFSET(XEXP (home, 0));
1295 /* the operand related to the sweep variable. */
1296 if (sweep_offset <= offset
1297 && offset < sweep_offset + sweep_size)
1301 += sweep_frame_offset - sweep_size - sweep_offset;
1302 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
1306 XEXP (home, 0)->used = 1;
1308 else if (sweep_offset <= offset
1309 && offset < sweep_frame_offset)
1311 /* the rest of variables under sweep_frame_offset,
1312 so shift the location. */
1314 XEXP (XEXP (home, 0), 1)
1315 = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1318 XEXP (home, 0)->used = 1;
1325 sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
1326 sweep_offset, sweep_size);
1328 block = BLOCK_CHAIN (block);
1333 /* Sweep character arrays declared as argument. */
1335 sweep_string_in_args (tree parms, HOST_WIDE_INT sweep_offset,
1336 HOST_WIDE_INT sweep_size)
1339 HOST_WIDE_INT offset;
1341 for (; parms; parms = TREE_CHAIN (parms))
1342 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1344 if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1346 home = DECL_INCOMING_RTL (parms);
1348 if (XEXP (home, 0)->used)
1351 offset = AUTO_OFFSET(XEXP (home, 0));
1353 /* the operand related to the sweep variable. */
1354 if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
1356 if (sweep_offset <= offset
1357 && offset < sweep_offset + sweep_size)
1359 offset += sweep_frame_offset - sweep_size - sweep_offset;
1360 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
1364 XEXP (home, 0)->used = 1;
1366 else if (sweep_offset <= offset
1367 && offset < sweep_frame_offset)
1369 /* the rest of variables under sweep_frame_offset,
1370 shift the location. */
1371 XEXP (XEXP (home, 0), 1)
1372 = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1375 XEXP (home, 0)->used = 1;
1383 /* Set to 1 when the instruction contains virtual registers. */
1384 static int has_virtual_reg;
1386 /* Sweep the specified character array for every insns. The array starts from
1387 the sweep_offset and its size is sweep_size. */
1389 sweep_string_use_of_insns (rtx insn, HOST_WIDE_INT sweep_offset,
1390 HOST_WIDE_INT sweep_size)
1392 for (; insn; insn = NEXT_INSN (insn))
1393 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1394 || GET_CODE (insn) == CALL_INSN)
1396 has_virtual_reg = FALSE;
1397 sweep_string_in_operand (insn, &PATTERN (insn),
1398 sweep_offset, sweep_size);
1399 sweep_string_in_operand (insn, ®_NOTES (insn),
1400 sweep_offset, sweep_size);
1405 /* Sweep the specified character array, which starts from the sweep_offset and
1406 its size is sweep_size.
1408 When a pointer is given,
1409 if it points the address higher than the array, it stays.
1410 if it points the address inside the array, it changes to point inside
1412 if it points the address lower than the array, it shifts higher address by
1415 sweep_string_in_operand (rtx insn, rtx *loc,
1416 HOST_WIDE_INT sweep_offset, HOST_WIDE_INT sweep_size)
1421 HOST_WIDE_INT offset;
1427 code = GET_CODE (x);
1446 if (x == virtual_incoming_args_rtx
1447 || x == virtual_stack_vars_rtx
1448 || x == virtual_stack_dynamic_rtx
1449 || x == virtual_outgoing_args_rtx
1450 || x == virtual_cfa_rtx)
1451 has_virtual_reg = TRUE;
1456 skip setjmp setup insn and setjmp restore insn
1458 (set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
1459 (set (virtual_stack_vars_rtx) (REG))
1461 if (GET_CODE (XEXP (x, 0)) == MEM
1462 && XEXP (x, 1) == virtual_stack_vars_rtx)
1464 if (XEXP (x, 0) == virtual_stack_vars_rtx
1465 && GET_CODE (XEXP (x, 1)) == REG)
1470 /* Handle typical case of frame register plus constant. */
1471 if (XEXP (x, 0) == virtual_stack_vars_rtx
1472 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1475 goto single_use_of_virtual_reg;
1477 offset = AUTO_OFFSET(x);
1479 /* When arguments grow downward, the virtual incoming
1480 args pointer points to the top of the argument block,
1481 so block is identified by the pointer - 1.
1482 The flag is set at the copy_rtx_and_substitute in integrate.c */
1483 if (RTX_INTEGRATED_P (x))
1486 /* the operand related to the sweep variable. */
1487 if (sweep_offset <= offset + k
1488 && offset + k < sweep_offset + sweep_size)
1490 offset += sweep_frame_offset - sweep_size - sweep_offset;
1492 XEXP (x, 0) = virtual_stack_vars_rtx;
1493 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1496 else if (sweep_offset <= offset + k
1497 && offset + k < sweep_frame_offset)
1499 /* the rest of variables under sweep_frame_offset,
1500 shift the location. */
1501 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1505 single_use_of_virtual_reg:
1506 if (has_virtual_reg) {
1507 /* excerpt from insn_invalid_p in recog.c */
1508 int icode = recog_memoized (insn);
1510 if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1515 temp = force_operand (x, NULL_RTX);
1519 emit_insn_before (seq, insn);
1520 if (! validate_change (insn, loc, temp, 0)
1521 && !validate_replace_rtx (x, temp, insn))
1522 fatal_insn ("sweep_string_in_operand", insn);
1526 has_virtual_reg = TRUE;
1530 #ifdef FRAME_GROWS_DOWNWARD
1531 /* Alert the case of frame register plus constant given by reg. */
1532 else if (XEXP (x, 0) == virtual_stack_vars_rtx
1533 && GET_CODE (XEXP (x, 1)) == REG)
1534 fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
1538 process further subtree:
1539 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1544 case CALL_PLACEHOLDER:
1545 for (i = 0; i < 3; i++)
1547 rtx seq = XEXP (x, i);
1550 push_to_sequence (seq);
1551 sweep_string_use_of_insns (XEXP (x, i),
1552 sweep_offset, sweep_size);
1553 XEXP (x, i) = get_insns ();
1563 /* Scan all subexpressions. */
1564 fmt = GET_RTX_FORMAT (code);
1565 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1569 virtual_stack_vars_rtx without offset
1571 (set (reg:SI xx) (reg:SI 78))
1572 (set (reg:SI xx) (MEM (reg:SI 78)))
1574 if (XEXP (x, i) == virtual_stack_vars_rtx)
1575 fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
1576 sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
1578 else if (*fmt == 'E')
1579 for (j = 0; j < XVECLEN (x, i); j++)
1580 sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
1584 /* Change the use of an argument to the use of the duplicated variable for
1585 every insns, The variable is addressed by new rtx. */
1587 change_arg_use_of_insns (rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size)
1589 for (; insn; insn = NEXT_INSN (insn))
1590 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1591 || GET_CODE (insn) == CALL_INSN)
1596 change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
1600 emit_insn_before (seq, insn);
1602 /* load_multiple insn from virtual_incoming_args_rtx have several
1603 load insns. If every insn change the load address of arg
1604 to frame region, those insns are moved before the PARALLEL insn
1605 and remove the PARALLEL insn. */
1606 if (GET_CODE (PATTERN (insn)) == PARALLEL
1607 && XVECLEN (PATTERN (insn), 0) == 0)
1613 /* Change the use of an argument to the use of the duplicated variable for
1614 every rtx derived from the x. */
1616 change_arg_use_in_operand (rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size)
1620 HOST_WIDE_INT offset;
1626 code = GET_CODE (x);
1646 /* Handle special case of MEM (incoming_args). */
1647 if (GET_CODE (orig) == MEM
1648 && XEXP (x, 0) == virtual_incoming_args_rtx)
1652 /* the operand related to the sweep variable. */
1653 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1654 offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1656 offset = AUTO_OFFSET(XEXP (*new, 0))
1657 + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1659 XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
1660 XEXP (x, 0)->used = 1;
1668 /* Handle special case of frame register plus constant. */
1669 if (GET_CODE (orig) == MEM
1670 && XEXP (x, 0) == virtual_incoming_args_rtx
1671 && GET_CODE (XEXP (x, 1)) == CONST_INT
1674 offset = AUTO_OFFSET(x);
1676 /* the operand related to the sweep variable. */
1677 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1678 offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
1681 offset = (AUTO_OFFSET(XEXP (*new, 0))
1682 + (offset - AUTO_OFFSET(XEXP (orig, 0))));
1684 XEXP (x, 0) = virtual_stack_vars_rtx;
1685 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1692 process further subtree:
1693 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1700 /* Handle special case of "set (REG or MEM) (incoming_args)".
1701 It means that the the address of the 1st argument is stored. */
1702 if (GET_CODE (orig) == MEM
1703 && XEXP (x, 1) == virtual_incoming_args_rtx)
1707 /* the operand related to the sweep variable. */
1708 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1709 offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
1711 offset = (AUTO_OFFSET(XEXP (*new, 0))
1712 + (offset - AUTO_OFFSET(XEXP (orig, 0))));
1714 XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
1716 XEXP (x, 1)->used = 1;
1723 case CALL_PLACEHOLDER:
1724 for (i = 0; i < 3; i++)
1726 rtx seq = XEXP (x, i);
1729 push_to_sequence (seq);
1730 change_arg_use_of_insns (XEXP (x, i), orig, new, size);
1731 XEXP (x, i) = get_insns ();
1738 for (j = 0; j < XVECLEN (x, 0); j++)
1740 change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
1742 if (recog_memoized (insn) < 0)
1744 for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
1746 /* if parallel insn has a insn used virtual_incoming_args_rtx,
1747 the insn is removed from this PARALLEL insn. */
1748 if (check_used_flag (XVECEXP (x, 0, j)))
1750 emit_insn (XVECEXP (x, 0, j));
1751 XVECEXP (x, 0, j) = NULL;
1754 XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
1756 PUT_NUM_ELEM (XVEC (x, 0), i);
1764 /* Scan all subexpressions. */
1765 fmt = GET_RTX_FORMAT (code);
1766 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1769 if (XEXP (x, i) == orig)
1772 *new = gen_reg_rtx (GET_MODE (orig));
1776 change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
1778 else if (*fmt == 'E')
1779 for (j = 0; j < XVECLEN (x, i); j++)
1781 if (XVECEXP (x, i, j) == orig)
1784 *new = gen_reg_rtx (GET_MODE (orig));
1785 XVECEXP (x, i, j) = *new;
1788 change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
1793 /* Validate every instructions from the specified instruction.
1795 The stack protector prohibits to generate machine specific frame addressing
1796 for the first rtl generation. The prepare_stack_protection must convert
1797 machine independent frame addressing to machine specific frame addressing,
1798 so instructions for inline functions, which skip the conversion of
1799 the stack protection, validate every instructions. */
1801 validate_insns_of_varrefs (rtx insn)
1805 /* Initialize recognition, indicating that volatile is OK. */
1808 for (; insn; insn = next)
1810 next = NEXT_INSN (insn);
1811 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1812 || GET_CODE (insn) == CALL_INSN)
1814 /* excerpt from insn_invalid_p in recog.c */
1815 int icode = recog_memoized (insn);
1817 if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1818 validate_operand_of_varrefs (insn, &PATTERN (insn));
1822 init_recog_no_volatile ();
1826 /* Validate frame addressing of the rtx and covert it to machine specific one. */
1828 validate_operand_of_varrefs (rtx insn, rtx *loc)
1839 code = GET_CODE (x);
1860 /* validate insn of frame register plus constant. */
1861 if (GET_CODE (x) == PLUS
1862 && XEXP (x, 0) == virtual_stack_vars_rtx
1863 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1867 { /* excerpt from expand_binop in optabs.c */
1868 optab binoptab = add_optab;
1869 enum machine_mode mode = GET_MODE (x);
1870 int icode = (int) binoptab->handlers[(int) mode].insn_code;
1871 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1873 rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
1874 temp = gen_reg_rtx (mode);
1876 /* Now, if insn's predicates don't allow offset operands,
1877 put them into pseudo regs. */
1879 if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
1880 && mode1 != VOIDmode)
1881 xop1 = copy_to_mode_reg (mode1, xop1);
1883 pat = GEN_FCN (icode) (temp, xop0, xop1);
1887 abort (); /* there must be add_optab handler. */
1892 emit_insn_before (seq, insn);
1893 if (! validate_change (insn, loc, temp, 0))
1900 case CALL_PLACEHOLDER:
1901 for (i = 0; i < 3; i++)
1903 rtx seq = XEXP (x, i);
1906 push_to_sequence (seq);
1907 validate_insns_of_varrefs (XEXP (x, i));
1908 XEXP (x, i) = get_insns ();
1918 /* Scan all subexpressions. */
1919 fmt = GET_RTX_FORMAT (code);
1920 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1922 validate_operand_of_varrefs (insn, &XEXP (x, i));
1923 else if (*fmt == 'E')
1924 for (j = 0; j < XVECLEN (x, i); j++)
1925 validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
1930 /* Return size that is not allocated for stack frame. It will be allocated
1931 to modify the home of pseudo registers called from global_alloc. */
1933 get_frame_free_size (void)
1935 if (! flag_propolice_protection)
1938 return push_allocated_offset - push_frame_offset;
1942 /* The following codes are invoked after the instantiation of pseudo registers.
1944 Reorder local variables to place a peudo register after buffers to avoid
1945 the corruption of local variables that could be used to further corrupt
1946 arbitrary memory locations. */
1947 #if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
1948 static void push_frame (HOST_WIDE_INT, HOST_WIDE_INT);
1949 static void push_frame_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
1950 static void push_frame_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
1951 static void push_frame_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
1952 static void push_frame_in_operand (rtx, rtx, HOST_WIDE_INT, HOST_WIDE_INT);
1953 static void push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT, HOST_WIDE_INT);
1954 static void push_frame_of_reg_equiv_constant (HOST_WIDE_INT, HOST_WIDE_INT);
1955 static void reset_used_flags_for_push_frame (void);
1956 static int check_out_of_frame_access (rtx, HOST_WIDE_INT);
1957 static int check_out_of_frame_access_in_operand (rtx, HOST_WIDE_INT);
1961 /* Assign stack local at the stage of register allocater. if a pseudo reg is
1962 spilled out from such an allocation, it is allocated on the stack.
1963 The protector keep the location be lower stack region than the location of
1966 assign_stack_local_for_pseudo_reg (enum machine_mode mode,
1967 HOST_WIDE_INT size, int align)
1969 #if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
1970 return assign_stack_local (mode, size, align);
1972 tree blocks = DECL_INITIAL (current_function_decl);
1974 HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
1975 int first_call_from_purge_addressof, first_call_from_global_alloc;
1977 if (! flag_propolice_protection
1980 || current_function_is_inlinable
1981 || ! search_string_from_argsandvars (CALL_FROM_PUSH_FRAME)
1982 || current_function_contains_functions)
1983 return assign_stack_local (mode, size, align);
1985 first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
1986 first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
1987 saved_cse_not_expected = cse_not_expected;
1989 starting_frame = ((STARTING_FRAME_OFFSET)
1990 ? STARTING_FRAME_OFFSET : BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1991 units_per_push = MAX (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1992 GET_MODE_SIZE (mode));
1994 if (first_call_from_purge_addressof)
1996 push_frame_offset = push_allocated_offset;
1997 if (check_out_of_frame_access (get_insns (), starting_frame))
1999 /* After the purge_addressof stage, there may be an instruction which
2000 have the pointer less than the starting_frame.
2001 if there is an access below frame, push dummy region to seperate
2002 the address of instantiated variables. */
2003 push_frame (GET_MODE_SIZE (DImode), 0);
2004 assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2008 if (first_call_from_global_alloc)
2010 push_frame_offset = push_allocated_offset = 0;
2011 if (check_out_of_frame_access (get_insns (), starting_frame))
2013 if (STARTING_FRAME_OFFSET)
2015 /* if there is an access below frame, push dummy region
2016 to seperate the address of instantiated variables. */
2017 push_frame (GET_MODE_SIZE (DImode), 0);
2018 assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2021 push_allocated_offset = starting_frame;
2025 saved_frame_offset = frame_offset;
2026 frame_offset = push_frame_offset;
2028 new = assign_stack_local (mode, size, align);
2030 push_frame_offset = frame_offset;
2031 frame_offset = saved_frame_offset;
2033 if (push_frame_offset > push_allocated_offset)
2035 push_frame (units_per_push,
2036 push_allocated_offset + STARTING_FRAME_OFFSET);
2038 assign_stack_local (BLKmode, units_per_push, -1);
2039 push_allocated_offset += units_per_push;
2042 /* At the second call from global alloc, alpha push frame and assign
2043 a local variable to the top of the stack. */
2044 if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
2045 push_frame_offset = push_allocated_offset = 0;
2052 #if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2054 /* push frame infomation for instantiating pseudo register at the top of stack.
2055 This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
2058 It is called by purge_addressof function and global_alloc (or reload)
2061 push_frame (HOST_WIDE_INT var_size, HOST_WIDE_INT boundary)
2063 reset_used_flags_for_push_frame();
2065 /* Scan all declarations of variables and fix the offset address of
2066 the variable based on the frame pointer. */
2067 push_frame_in_decls (DECL_INITIAL (current_function_decl),
2068 var_size, boundary);
2070 /* Scan all argument variable and fix the offset address based on
2071 the frame pointer. */
2072 push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
2073 var_size, boundary);
2075 /* Scan all operands of all insns and fix the offset address
2076 based on the frame pointer. */
2077 push_frame_of_insns (get_insns (), var_size, boundary);
2079 /* Scan all reg_equiv_memory_loc and reg_equiv_constant. */
2080 push_frame_of_reg_equiv_memory_loc (var_size, boundary);
2081 push_frame_of_reg_equiv_constant (var_size, boundary);
2083 reset_used_flags_for_push_frame();
2087 /* Reset used flag of every insns, reg_equiv_memory_loc,
2088 and reg_equiv_constant. */
2090 reset_used_flags_for_push_frame(void)
2093 extern rtx *reg_equiv_memory_loc;
2094 extern rtx *reg_equiv_constant;
2096 /* Clear all the USED bits in operands of all insns and declarations of
2098 reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2099 reset_used_flags_for_insns (get_insns ());
2102 /* The following codes are processed if the push_frame is called from
2103 global_alloc (or reload) function. */
2104 if (reg_equiv_memory_loc == 0)
2107 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2108 if (reg_equiv_memory_loc[i])
2110 rtx x = reg_equiv_memory_loc[i];
2112 if (GET_CODE (x) == MEM
2113 && GET_CODE (XEXP (x, 0)) == PLUS
2114 && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
2117 XEXP (x, 0)->used = 0;
2122 if (reg_equiv_constant == 0)
2125 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2126 if (reg_equiv_constant[i])
2128 rtx x = reg_equiv_constant[i];
2130 if (GET_CODE (x) == PLUS
2131 && AUTO_BASEPTR (x) == frame_pointer_rtx)
2140 /* Push every variables declared as a local variable and make a room for
2141 instantiated register. */
2143 push_frame_in_decls (tree block, HOST_WIDE_INT push_size,
2144 HOST_WIDE_INT boundary)
2147 HOST_WIDE_INT offset;
2150 while (block && TREE_CODE(block)==BLOCK)
2152 for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
2154 /* Skip the declaration that refers an external variable and
2155 also skip an global variable. */
2156 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
2158 if (! DECL_RTL_SET_P (types))
2161 home = DECL_RTL (types);
2163 /* Process for static local variable. */
2164 if (GET_CODE (home) == MEM
2165 && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2168 if (GET_CODE (home) == MEM
2169 && GET_CODE (XEXP (home, 0)) == REG)
2171 if (XEXP (home, 0) != frame_pointer_rtx
2175 XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2179 XEXP (home, 0)->used = 1;
2182 if (GET_CODE (home) == MEM
2183 && GET_CODE (XEXP (home, 0)) == MEM)
2185 /* Process for dynamically allocated array. */
2186 home = XEXP (home, 0);
2189 if (GET_CODE (home) == MEM
2190 && GET_CODE (XEXP (home, 0)) == PLUS
2191 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2193 offset = AUTO_OFFSET(XEXP (home, 0));
2195 if (! XEXP (home, 0)->used
2196 && offset >= boundary)
2198 offset += push_size;
2199 XEXP (XEXP (home, 0), 1)
2200 = gen_rtx_CONST_INT (VOIDmode, offset);
2203 XEXP (home, 0)->used = 1;
2209 push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
2210 block = BLOCK_CHAIN (block);
2215 /* Push every variables declared as an argument and make a room for
2216 instantiated register. */
2218 push_frame_in_args (tree parms, HOST_WIDE_INT push_size,
2219 HOST_WIDE_INT boundary)
2222 HOST_WIDE_INT offset;
2224 for (; parms; parms = TREE_CHAIN (parms))
2225 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2227 if (PARM_PASSED_IN_MEMORY (parms))
2229 home = DECL_INCOMING_RTL (parms);
2230 offset = AUTO_OFFSET(XEXP (home, 0));
2232 if (XEXP (home, 0)->used || offset < boundary)
2235 /* the operand related to the sweep variable. */
2236 if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
2238 if (XEXP (home, 0) == frame_pointer_rtx)
2239 XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2242 offset += push_size;
2243 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2248 XEXP (home, 0)->used = 1;
2255 /* Set to 1 when the instruction has the reference to be pushed. */
2256 static int insn_pushed;
2258 /* Tables of equivalent registers with frame pointer. */
2259 static int *fp_equiv = 0;
2262 /* Push the frame region to make a room for allocated local variable. */
2264 push_frame_of_insns (rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
2267 fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
2269 for (; insn; insn = NEXT_INSN (insn))
2270 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2271 || GET_CODE (insn) == CALL_INSN)
2275 insn_pushed = FALSE;
2277 /* Push frame in INSN operation. */
2278 push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
2280 /* Push frame in NOTE. */
2281 push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
2283 /* Push frame in CALL EXPR_LIST. */
2284 if (GET_CODE (insn) == CALL_INSN)
2285 push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
2286 push_size, boundary);
2288 /* Pushed frame addressing style may not be machine specific one.
2289 so the instruction should be converted to use the machine specific
2290 frame addressing. */
2292 && (last = try_split (PATTERN (insn), insn, 1)) != insn)
2294 rtx first = NEXT_INSN (insn);
2295 rtx trial = NEXT_INSN (first);
2296 rtx pattern = PATTERN (trial);
2299 /* Update REG_EQUIV info to the first splitted insn. */
2300 if ((set = single_set (insn))
2301 && find_reg_note (insn, REG_EQUIV, SET_SRC (set))
2302 && GET_CODE (PATTERN (first)) == SET)
2305 = gen_rtx_EXPR_LIST (REG_EQUIV,
2306 SET_SRC (PATTERN (first)),
2310 /* copy the first insn of splitted insns to the original insn and
2311 delete the first insn,
2312 because the original insn is pointed from records:
2313 insn_chain, reg_equiv_init, used for global_alloc. */
2314 if (cse_not_expected)
2316 add_insn_before (insn, first);
2318 /* Copy the various flags, and other information. */
2319 memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
2320 PATTERN (insn) = PATTERN (first);
2321 INSN_CODE (insn) = INSN_CODE (first);
2322 LOG_LINKS (insn) = LOG_LINKS (first);
2323 REG_NOTES (insn) = REG_NOTES (first);
2325 /* then remove the first insn of splitted insns. */
2326 remove_insn (first);
2327 INSN_DELETED_P (first) = 1;
2330 if (GET_CODE (pattern) == SET
2331 && GET_CODE (XEXP (pattern, 0)) == REG
2332 && GET_CODE (XEXP (pattern, 1)) == PLUS
2333 && XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
2334 && GET_CODE (XEXP (XEXP (pattern, 1), 1)) == CONST_INT)
2336 rtx offset = XEXP (XEXP (pattern, 1), 1);
2337 fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
2339 delete_insn (trial);
2351 /* Push the frame region by changing the operand that points the frame. */
2353 push_frame_in_operand (rtx insn, rtx orig,
2354 HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
2359 HOST_WIDE_INT offset;
2365 code = GET_CODE (x);
2387 Skip setjmp setup insn and setjmp restore insn
2389 (set (MEM (reg:SI xx)) (frame_pointer_rtx)))
2390 (set (frame_pointer_rtx) (REG))
2392 if (GET_CODE (XEXP (x, 0)) == MEM
2393 && XEXP (x, 1) == frame_pointer_rtx)
2395 if (XEXP (x, 0) == frame_pointer_rtx
2396 && GET_CODE (XEXP (x, 1)) == REG)
2400 powerpc case: restores setjmp address
2401 (set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
2403 (set (reg) (plus frame_pointer_rtx const_int -n))
2404 (set (frame_pointer_rtx) (reg))
2406 if (GET_CODE (XEXP (x, 0)) == REG
2407 && GET_CODE (XEXP (x, 1)) == PLUS
2408 && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
2409 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2410 && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
2413 offset = AUTO_OFFSET(x);
2414 if (x->used || -offset < boundary)
2417 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
2418 x->used = 1; insn_pushed = TRUE;
2422 /* Reset fp_equiv register. */
2423 else if (GET_CODE (XEXP (x, 0)) == REG
2424 && fp_equiv[REGNO (XEXP (x, 0))])
2425 fp_equiv[REGNO (XEXP (x, 0))] = 0;
2427 /* Propagete fp_equiv register. */
2428 else if (GET_CODE (XEXP (x, 0)) == REG
2429 && GET_CODE (XEXP (x, 1)) == REG
2430 && fp_equiv[REGNO (XEXP (x, 1))])
2431 if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
2432 || reg_renumber[REGNO (XEXP (x, 0))] > 0)
2433 fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
2437 if (XEXP (x, 0) == frame_pointer_rtx
2440 XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2441 XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2447 /* Handle special case of frame register plus constant. */
2448 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2449 && XEXP (x, 0) == frame_pointer_rtx)
2451 offset = AUTO_OFFSET(x);
2453 if (x->used || offset < boundary)
2456 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2457 x->used = 1; insn_pushed = TRUE;
2463 (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
2465 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2466 && GET_CODE (XEXP (x, 0)) == SUBREG
2467 && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
2469 offset = AUTO_OFFSET(x);
2471 if (x->used || offset < boundary)
2474 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2475 x->used = 1; insn_pushed = TRUE;
2480 Handle powerpc case:
2481 (set (reg x) (plus fp const))
2482 (set (.....) (... (plus (reg x) (const B))))
2484 else if (GET_CODE (XEXP (x, 1)) == CONST_INT
2485 && GET_CODE (XEXP (x, 0)) == REG
2486 && fp_equiv[REGNO (XEXP (x, 0))])
2488 offset = AUTO_OFFSET(x);
2493 offset += fp_equiv[REGNO (XEXP (x, 0))];
2495 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2496 x->used = 1; insn_pushed = TRUE;
2501 Handle special case of frame register plus reg (constant).
2502 (set (reg x) (const B))
2503 (set (....) (...(plus fp (reg x))))
2505 else if (XEXP (x, 0) == frame_pointer_rtx
2506 && GET_CODE (XEXP (x, 1)) == REG
2508 && PATTERN (PREV_INSN (insn))
2509 && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
2510 && GET_CODE (SET_SRC (PATTERN (PREV_INSN (insn)))) == CONST_INT)
2512 offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
2514 if (x->used || offset < boundary)
2517 SET_SRC (PATTERN (PREV_INSN (insn)))
2518 = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2520 XEXP (x, 1)->used = 1;
2525 Handle special case of frame register plus reg (used).
2526 The register already have a pushed offset, just mark this frame
2529 else if (XEXP (x, 0) == frame_pointer_rtx
2530 && XEXP (x, 1)->used)
2536 Process further subtree:
2537 Example: (plus:SI (mem/s:SI (plus:SI (FP) (const_int 8)))
2542 case CALL_PLACEHOLDER:
2543 push_frame_of_insns (XEXP (x, 0), push_size, boundary);
2544 push_frame_of_insns (XEXP (x, 1), push_size, boundary);
2545 push_frame_of_insns (XEXP (x, 2), push_size, boundary);
2552 /* Scan all subexpressions. */
2553 fmt = GET_RTX_FORMAT (code);
2554 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2557 if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
2558 fatal_insn ("push_frame_in_operand", insn);
2559 push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
2561 else if (*fmt == 'E')
2562 for (j = 0; j < XVECLEN (x, i); j++)
2563 push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
2567 /* Change the location pointed in reg_equiv_memory_loc. */
2569 push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT push_size,
2570 HOST_WIDE_INT boundary)
2573 extern rtx *reg_equiv_memory_loc;
2575 /* This function is processed if the push_frame is called from
2576 global_alloc (or reload) function. */
2577 if (reg_equiv_memory_loc == 0)
2580 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2581 if (reg_equiv_memory_loc[i])
2583 rtx x = reg_equiv_memory_loc[i];
2586 if (GET_CODE (x) == MEM
2587 && GET_CODE (XEXP (x, 0)) == PLUS
2588 && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
2590 offset = AUTO_OFFSET(XEXP (x, 0));
2592 if (! XEXP (x, 0)->used
2593 && offset >= boundary)
2595 offset += push_size;
2596 XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2599 XEXP (x, 0)->used = 1;
2602 else if (GET_CODE (x) == MEM
2603 && XEXP (x, 0) == frame_pointer_rtx
2606 XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2607 XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2613 /* Change the location pointed in reg_equiv_constant. */
2615 push_frame_of_reg_equiv_constant (HOST_WIDE_INT push_size,
2616 HOST_WIDE_INT boundary)
2619 extern rtx *reg_equiv_constant;
2621 /* This function is processed if the push_frame is called from
2622 global_alloc (or reload) function. */
2623 if (reg_equiv_constant == 0)
2626 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
2627 if (reg_equiv_constant[i])
2629 rtx x = reg_equiv_constant[i];
2632 if (GET_CODE (x) == PLUS
2633 && XEXP (x, 0) == frame_pointer_rtx)
2635 offset = AUTO_OFFSET(x);
2638 && offset >= boundary)
2640 offset += push_size;
2641 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2647 else if (x == frame_pointer_rtx
2650 reg_equiv_constant[i]
2651 = plus_constant (frame_pointer_rtx, push_size);
2652 reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
2658 /* Check every instructions if insn's memory reference is out of frame. */
2660 check_out_of_frame_access (rtx insn, HOST_WIDE_INT boundary)
2662 for (; insn; insn = NEXT_INSN (insn))
2663 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2664 || GET_CODE (insn) == CALL_INSN)
2666 if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
2673 /* Check every operands if the reference is out of frame. */
2675 check_out_of_frame_access_in_operand (rtx orig, HOST_WIDE_INT boundary)
2685 code = GET_CODE (x);
2705 if (XEXP (x, 0) == frame_pointer_rtx)
2711 /* Handle special case of frame register plus constant. */
2712 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2713 && XEXP (x, 0) == frame_pointer_rtx)
2715 if (0 <= AUTO_OFFSET(x)
2716 && AUTO_OFFSET(x) < boundary)
2721 Process further subtree:
2722 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2727 case CALL_PLACEHOLDER:
2728 if (check_out_of_frame_access (XEXP (x, 0), boundary))
2730 if (check_out_of_frame_access (XEXP (x, 1), boundary))
2732 if (check_out_of_frame_access (XEXP (x, 2), boundary))
2740 /* Scan all subexpressions. */
2741 fmt = GET_RTX_FORMAT (code);
2742 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2745 if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
2748 else if (*fmt == 'E')
2749 for (j = 0; j < XVECLEN (x, i); j++)
2750 if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))