1 /* $DragonFly: src/gnu/usr.bin/cc34/cc_prep/protector.c,v 1.1 2004/06/19 10:34:17 joerg Exp $ */
2 /* RTL buffer overflow protection function for GNU C compiler
3 Copyright (C) 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains several memory arrangement functions to protect
23 the return address and the frame pointer of the stack
24 from a stack-smashing attack. It also
25 provides the function that protects pointer variables. */
29 #include "coretypes.h"
37 #include "insn-config.h"
38 #include "insn-flags.h"
42 #include "hard-reg-set.h"
47 #include "conditions.h"
48 #include "insn-attr.h"
51 #include "protector.h"
54 /* Round a value to the lowest integer less than it that is a multiple of
55 the required alignment. Avoid using division in case the value is
56 negative. Assume the alignment is a power of two. */
57 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
59 /* Similar, but round to the next highest integer that meets the
61 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
64 /* Nonzero if function being compiled can define string buffers that may be
65 damaged by the stack-smash attack. */
66 static int current_function_defines_vulnerable_string;
67 static int current_function_defines_short_string;
68 static int current_function_has_variable_string;
69 static int current_function_defines_vsized_array;
70 static int current_function_is_inlinable;
73 static rtx guard_area, _guard;
74 static rtx function_first_insn, prologue_insert_point;
76 /* Offset to end of sweeped area for gathering character arrays. */
77 static HOST_WIDE_INT sweep_frame_offset;
79 /* Offset to end of allocated area for instantiating pseudo registers. */
80 static HOST_WIDE_INT push_allocated_offset = 0;
82 /* Offset to end of assigned area for instantiating pseudo registers. */
83 static HOST_WIDE_INT push_frame_offset = 0;
85 /* Set to 1 after cse_not_expected becomes nonzero. it is used to identify
86 which stage assign_stack_local_for_pseudo_reg is called from. */
87 static int saved_cse_not_expected = 0;
89 static int search_string_from_argsandvars (int);
90 static int search_string_from_local_vars (tree);
91 static int search_pointer_def (tree);
92 static int search_func_pointer (tree);
93 static int check_used_flag (rtx);
94 static void reset_used_flags_for_insns (rtx);
95 static void reset_used_flags_for_decls (tree);
96 static void reset_used_flags_of_plus (rtx);
97 static void rtl_prologue (rtx);
98 static void rtl_epilogue (rtx);
99 static void arrange_var_order (tree);
100 static void copy_args_for_protection (void);
101 static void sweep_string_variable (rtx, HOST_WIDE_INT);
102 static void sweep_string_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
103 static void sweep_string_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
104 static void sweep_string_use_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
105 static void sweep_string_in_operand (rtx, rtx *, HOST_WIDE_INT, HOST_WIDE_INT);
106 static void move_arg_location (rtx, rtx, rtx, HOST_WIDE_INT);
107 static void change_arg_use_of_insns (rtx, rtx, rtx *, HOST_WIDE_INT);
108 static void change_arg_use_in_operand (rtx, rtx, rtx, rtx *, HOST_WIDE_INT);
109 static void validate_insns_of_varrefs (rtx);
110 static void validate_operand_of_varrefs (rtx, rtx *);
112 /* Specify which size of buffers should be protected from a stack smashing
113 attack. Because small buffers are not used in situations which may
114 overflow buffer, the default size sets to the size of 64 bit register. */
115 #ifndef SUSPICIOUS_BUF_SIZE
116 #define SUSPICIOUS_BUF_SIZE 8
119 #define AUTO_BASEPTR(X) \
120 (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
121 #define AUTO_OFFSET(X) \
122 (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
123 #undef PARM_PASSED_IN_MEMORY
124 #define PARM_PASSED_IN_MEMORY(PARM) \
125 (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
126 #define TREE_VISITED(NODE) ((NODE)->common.unused_0)
128 /* Argument values for calling search_string_from_argsandvars. */
129 #define CALL_FROM_PREPARE_STACK_PROTECTION 0
130 #define CALL_FROM_PUSH_FRAME 1
133 /* Prepare several stack protection instruments for the current function
134 if the function has an array as a local variable, which may be vulnerable
135 from a stack smashing attack, and it is not inlinable.
137 The overall steps are as follows;
139 (2)insert guard_area on the stack,
140 (3)duplicate pointer arguments into local variables, and
141 (4)arrange the location of local variables. */
143 prepare_stack_protection (int inlinable)
145 tree blocks = DECL_INITIAL (current_function_decl);
146 current_function_is_inlinable = inlinable && !flag_no_inline;
147 push_frame_offset = push_allocated_offset = 0;
148 saved_cse_not_expected = 0;
150 /* Skip the protection if the function has no block
151 or it is an inline function. */
152 if (current_function_is_inlinable)
153 validate_insns_of_varrefs (get_insns ());
154 if (! blocks || current_function_is_inlinable)
157 current_function_defines_vulnerable_string
158 = search_string_from_argsandvars (CALL_FROM_PREPARE_STACK_PROTECTION);
160 if (current_function_defines_vulnerable_string
161 || flag_stack_protection)
163 function_first_insn = get_insns ();
165 if (current_function_contains_functions)
167 if (warn_stack_protector)
168 warning ("not protecting function: it contains functions");
172 /* Initialize recognition, indicating that volatile is OK. */
175 sweep_frame_offset = 0;
177 #ifdef STACK_GROWS_DOWNWARD
178 /* frame_offset: offset to end of allocated area of stack frame.
179 It is defined in the function.c. */
181 /* the location must be before buffers. */
182 guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
183 PUT_MODE (guard_area, GUARD_m);
184 MEM_VOLATILE_P (guard_area) = 1;
186 #ifndef FRAME_GROWS_DOWNWARD
187 sweep_frame_offset = frame_offset;
190 /* For making room for guard value, scan all insns and fix the offset
191 address of the variable that is based on frame pointer.
192 Scan all declarations of variables and fix the offset address
193 of the variable that is based on the frame pointer. */
194 sweep_string_variable (guard_area, UNITS_PER_GUARD);
197 /* the location of guard area moves to the beginning of stack frame. */
198 if (AUTO_OFFSET(XEXP (guard_area, 0)))
199 XEXP (XEXP (guard_area, 0), 1)
200 = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
203 /* Insert prologue rtl instructions. */
204 rtl_prologue (function_first_insn);
206 if (! current_function_has_variable_string)
208 /* Generate argument saving instruction. */
209 copy_args_for_protection ();
211 #ifndef FRAME_GROWS_DOWNWARD
212 /* If frame grows upward, character arrays for protecting args
213 may copy to the top of the guard variable.
214 So sweep the guard variable again. */
215 sweep_frame_offset = CEIL_ROUND (frame_offset,
216 BIGGEST_ALIGNMENT / BITS_PER_UNIT);
217 sweep_string_variable (guard_area, UNITS_PER_GUARD);
220 /* Variable can't be protected from the overflow of variable length
221 buffer. But variable reordering is still effective against
222 the overflow of fixed size character arrays. */
223 else if (warn_stack_protector)
224 warning ("not protecting variables: it has a variable length buffer");
226 #ifndef FRAME_GROWS_DOWNWARD
227 if (STARTING_FRAME_OFFSET == 0)
229 /* This part may be only for alpha. */
230 push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
231 assign_stack_local (BLKmode, push_allocated_offset, -1);
232 sweep_frame_offset = frame_offset;
233 sweep_string_variable (const0_rtx, -push_allocated_offset);
234 sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
238 /* Arrange the order of local variables. */
239 arrange_var_order (blocks);
241 #ifdef STACK_GROWS_DOWNWARD
242 /* Insert epilogue rtl instructions. */
243 rtl_epilogue (get_last_insn ());
245 init_recog_no_volatile ();
247 else if (current_function_defines_short_string
248 && warn_stack_protector)
249 warning ("not protecting function: buffer is less than %d bytes long",
250 SUSPICIOUS_BUF_SIZE);
254 Search string from arguments and local variables.
255 caller: CALL_FROM_PREPARE_STACK_PROTECTION (0)
256 CALL_FROM_PUSH_FRAME (1)
259 search_string_from_argsandvars (int caller)
264 /* Saves a latest search result as a cached infomation. */
265 static tree __latest_search_decl = 0;
266 static int __latest_search_result = FALSE;
268 if (__latest_search_decl == current_function_decl)
269 return __latest_search_result;
271 if (caller == CALL_FROM_PUSH_FRAME)
274 __latest_search_decl = current_function_decl;
275 __latest_search_result = TRUE;
277 current_function_defines_short_string = FALSE;
278 current_function_has_variable_string = FALSE;
279 current_function_defines_vsized_array = FALSE;
281 /* Search a string variable from local variables. */
282 blocks = DECL_INITIAL (current_function_decl);
283 string_p = search_string_from_local_vars (blocks);
285 if (! current_function_defines_vsized_array && current_function_calls_alloca)
287 current_function_has_variable_string = TRUE;
294 #ifdef STACK_GROWS_DOWNWARD
295 /* Search a string variable from arguments. */
296 parms = DECL_ARGUMENTS (current_function_decl);
298 for (; parms; parms = TREE_CHAIN (parms))
299 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
301 if (PARM_PASSED_IN_MEMORY (parms))
303 string_p = search_string_def (TREE_TYPE(parms));
310 __latest_search_result = FALSE;
315 /* Search string from local variables in the specified scope. */
317 search_string_from_local_vars (tree block)
322 while (block && TREE_CODE(block)==BLOCK)
324 for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
326 /* Skip the declaration that refers an external variable. */
327 /* name: types.decl.name.identifier.id */
328 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
329 && TREE_CODE (types) == VAR_DECL
330 && ! DECL_ARTIFICIAL (types)
331 && DECL_RTL_SET_P (types)
332 && GET_CODE (DECL_RTL (types)) == MEM
334 && search_string_def (TREE_TYPE (types)))
336 rtx home = DECL_RTL (types);
338 if (GET_CODE (home) == MEM
339 && (GET_CODE (XEXP (home, 0)) == MEM
340 || (GET_CODE (XEXP (home, 0)) == REG
341 && XEXP (home, 0) != virtual_stack_vars_rtx
342 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
343 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
344 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
345 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
348 /* If the value is indirect by memory or by a register
349 that isn't the frame pointer then it means the object is
350 variable-sized and address through
351 that register or stack slot.
352 The protection has no way to hide pointer variables
353 behind the array, so all we can do is staying
354 the order of variables and arguments. */
356 current_function_has_variable_string = TRUE;
359 /* Found character array. */
364 if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
369 block = BLOCK_CHAIN (block);
376 /* Search a character array from the specified type tree. */
378 search_string_def (tree type)
385 switch (TREE_CODE (type))
388 /* Check if the array is a variable-sized array. */
389 if (TYPE_DOMAIN (type) == 0
390 || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
391 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
392 current_function_defines_vsized_array = TRUE;
394 /* Check if the array is related to char array. */
395 if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
396 || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
397 || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
399 /* Check if the string is a variable string. */
400 if (TYPE_DOMAIN (type) == 0
401 || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
402 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
405 /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE. */
406 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
407 && (TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
408 >= SUSPICIOUS_BUF_SIZE))
411 current_function_defines_short_string = TRUE;
414 /* to protect every functions, sweep any arrays to the frame top. */
417 return search_string_def(TREE_TYPE(type));
420 case QUAL_UNION_TYPE:
422 /* Check if each field has character arrays. */
423 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
425 /* Omit here local type decls until we know how to support them. */
426 if ((TREE_CODE (tem) == TYPE_DECL)
427 || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
430 if (search_string_def(TREE_TYPE(tem)))
446 /* Examine whether the input contains frame pointer addressing. */
473 /* This case is not generated at the stack protection.
474 see plus_constant_wide and simplify_plus_minus function. */
475 if (XEXP (x, 0) == virtual_stack_vars_rtx)
479 if (XEXP (x, 0) == virtual_stack_vars_rtx
480 && GET_CODE (XEXP (x, 1)) == CONST_INT)
487 /* Scan all subexpressions. */
488 fmt = GET_RTX_FORMAT (code);
489 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
492 if (contains_fp (XEXP (x, i)))
495 else if (*fmt == 'E')
496 for (j = 0; j < XVECLEN (x, i); j++)
497 if (contains_fp (XVECEXP (x, i, j)))
504 /* Examine whether the input contains any pointer. */
506 search_pointer_def (tree type)
513 switch (TREE_CODE (type))
516 case QUAL_UNION_TYPE:
518 /* Check if each field has a pointer. */
519 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
521 if ((TREE_CODE (tem) == TYPE_DECL)
522 || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
525 if (search_pointer_def (TREE_TYPE(tem)))
531 return search_pointer_def (TREE_TYPE(type));
536 if (TYPE_READONLY (TREE_TYPE (type)))
538 /* If this pointer contains function pointer,
539 it should be protected. */
540 return search_func_pointer (TREE_TYPE (type));
552 /* Examine whether the input contains function pointer. */
554 search_func_pointer (tree type)
561 switch (TREE_CODE (type))
564 case QUAL_UNION_TYPE:
566 if (! TREE_VISITED (type))
568 /* Mark the type as having been visited already. */
569 TREE_VISITED (type) = 1;
571 /* Check if each field has a function pointer. */
572 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
574 if (TREE_CODE (tem) == FIELD_DECL
575 && search_func_pointer (TREE_TYPE(tem)))
577 TREE_VISITED (type) = 0;
582 TREE_VISITED (type) = 0;
587 return search_func_pointer (TREE_TYPE(type));
592 if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
594 return search_func_pointer (TREE_TYPE(type));
604 /* Check whether the specified rtx contains PLUS rtx with used flag. */
606 check_used_flag (rtx x)
609 register enum rtx_code code;
610 register const char *format_ptr;
637 format_ptr = GET_RTX_FORMAT (code);
638 for (i = 0; i < GET_RTX_LENGTH (code); i++)
640 switch (*format_ptr++)
643 if (check_used_flag (XEXP (x, i)))
648 for (j = 0; j < XVECLEN (x, i); j++)
649 if (check_used_flag (XVECEXP (x, i, j)))
659 /* Reset used flag of every insns after the spcecified insn. */
661 reset_used_flags_for_insns (rtx insn)
665 const char *format_ptr;
667 for (; insn; insn = NEXT_INSN (insn))
668 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
669 || GET_CODE (insn) == CALL_INSN)
671 code = GET_CODE (insn);
673 format_ptr = GET_RTX_FORMAT (code);
675 for (i = 0; i < GET_RTX_LENGTH (code); i++)
677 switch (*format_ptr++)
680 reset_used_flags_of_plus (XEXP (insn, i));
684 for (j = 0; j < XVECLEN (insn, i); j++)
685 reset_used_flags_of_plus (XVECEXP (insn, i, j));
693 /* Reset used flag of every variables in the specified block. */
695 reset_used_flags_for_decls (tree block)
700 while (block && TREE_CODE(block)==BLOCK)
702 types = BLOCK_VARS(block);
704 for (types= BLOCK_VARS(block); types; types = TREE_CHAIN(types))
706 /* Skip the declaration that refers an external variable and
707 also skip an global variable. */
708 if (! DECL_EXTERNAL (types))
710 if (! DECL_RTL_SET_P (types))
712 home = DECL_RTL (types);
714 if (GET_CODE (home) == MEM
715 && GET_CODE (XEXP (home, 0)) == PLUS
716 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
718 XEXP (home, 0)->used = 0;
723 reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
725 block = BLOCK_CHAIN (block);
730 /* Reset the used flag of every PLUS rtx derived from the specified rtx. */
732 reset_used_flags_of_plus (rtx x)
736 const char *format_ptr;
745 /* These types may be freely shared so we needn't do any resetting
763 /* The chain of insns is not being copied. */
770 case CALL_PLACEHOLDER:
771 reset_used_flags_for_insns (XEXP (x, 0));
772 reset_used_flags_for_insns (XEXP (x, 1));
773 reset_used_flags_for_insns (XEXP (x, 2));
780 format_ptr = GET_RTX_FORMAT (code);
781 for (i = 0; i < GET_RTX_LENGTH (code); i++)
783 switch (*format_ptr++)
786 reset_used_flags_of_plus (XEXP (x, i));
790 for (j = 0; j < XVECLEN (x, i); j++)
791 reset_used_flags_of_plus (XVECEXP (x, i, j));
798 /* Generate the prologue insns of the protector into the specified insn. */
800 rtl_prologue (rtx insn)
802 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
803 #undef HAS_INIT_SECTION
804 #define HAS_INIT_SECTION
809 for (; insn; insn = NEXT_INSN (insn))
810 if (GET_CODE (insn) == NOTE
811 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
814 #if !defined (HAS_INIT_SECTION)
815 /* If this function is `main', skip a call to `__main'
816 to run guard instruments after global initializers, etc. */
817 if (DECL_NAME (current_function_decl)
818 && MAIN_NAME_P (DECL_NAME (current_function_decl))
819 && DECL_CONTEXT (current_function_decl) == NULL_TREE)
822 for (; insn; insn = NEXT_INSN (insn))
823 if (GET_CODE (insn) == NOTE
824 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
831 /* Mark the next insn of FUNCTION_BEG insn. */
832 prologue_insert_point = NEXT_INSN (insn);
836 _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
837 emit_move_insn ( guard_area, _guard);
842 emit_insn_before (_val, prologue_insert_point);
846 /* Generate the epilogue insns of the protector into the specified insn. */
848 rtl_epilogue (rtx insn)
854 int flag_have_return = FALSE;
862 return_label = gen_label_rtx ();
864 for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
865 if (GET_CODE (insn) == JUMP_INSN
866 && GET_CODE (PATTERN (insn)) == RETURN
867 && GET_MODE (PATTERN (insn)) == VOIDmode)
869 rtx pat = gen_rtx_SET (VOIDmode,
871 gen_rtx_LABEL_REF (VOIDmode,
873 PATTERN (insn) = pat;
874 flag_have_return = TRUE;
878 emit_label (return_label);
882 /* if (guard_area != _guard) */
883 compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
885 if_false_label = gen_label_rtx (); /* { */
886 emit_jump_insn ( gen_beq(if_false_label));
888 /* generate string for the current function name */
889 funcstr = build_string (strlen(current_function_name ())+1,
890 current_function_name ());
891 TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
892 funcname = output_constant_def (funcstr, 1);
894 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__stack_smash_handler"),
896 XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
898 /* generate RTL to return from the current function */
900 emit_barrier (); /* } */
901 emit_label (if_false_label);
903 /* generate RTL to return from the current function */
904 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
905 use_return_register ();
908 if (HAVE_return && flag_have_return)
910 emit_jump_insn (gen_return ());
918 emit_insn_after (_val, insn);
922 /* For every variable which type is character array, moves its location
923 in the stack frame to the sweep_frame_offset position. */
925 arrange_var_order (tree block)
928 HOST_WIDE_INT offset;
930 while (block && TREE_CODE(block)==BLOCK)
932 /* arrange the location of character arrays in depth first. */
933 arrange_var_order (BLOCK_SUBBLOCKS (block));
935 for (types = BLOCK_VARS (block); types; types = TREE_CHAIN(types))
937 /* Skip the declaration that refers an external variable. */
938 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
939 && TREE_CODE (types) == VAR_DECL
940 && ! DECL_ARTIFICIAL (types)
941 /* && ! DECL_COPIED (types): gcc3.4 can sweep inlined string. */
942 && DECL_RTL_SET_P (types)
943 && GET_CODE (DECL_RTL (types)) == MEM
944 && GET_MODE (DECL_RTL (types)) == BLKmode
947 search_string_def (TREE_TYPE (types))
948 || (! current_function_defines_vulnerable_string && is_array)))
950 rtx home = DECL_RTL (types);
952 if (!(GET_CODE (home) == MEM
953 && (GET_CODE (XEXP (home, 0)) == MEM
954 || (GET_CODE (XEXP (home, 0)) == REG
955 && XEXP (home, 0) != virtual_stack_vars_rtx
956 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
957 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
958 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
959 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
963 /* Found a string variable. */
964 HOST_WIDE_INT var_size =
965 ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
968 /* Confirmed it is BLKmode. */
969 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
970 var_size = CEIL_ROUND (var_size, alignment);
972 /* Skip the variable if it is top of the region
973 specified by sweep_frame_offset. */
974 offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
975 if (offset == sweep_frame_offset - var_size)
976 sweep_frame_offset -= var_size;
978 else if (offset < sweep_frame_offset - var_size)
979 sweep_string_variable (DECL_RTL (types), var_size);
984 block = BLOCK_CHAIN (block);
989 /* To protect every pointer argument and move character arrays in the argument,
990 Copy those variables to the top of the stack frame and move the location of
991 character arrays to the posion of sweep_frame_offset. */
993 copy_args_for_protection (void)
995 tree parms = DECL_ARGUMENTS (current_function_decl);
998 parms = DECL_ARGUMENTS (current_function_decl);
999 for (; parms; parms = TREE_CHAIN (parms))
1000 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1002 if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1007 string_p = search_string_def (TREE_TYPE(parms));
1009 /* Check if it is a candidate to move. */
1010 if (string_p || search_pointer_def (TREE_TYPE (parms)))
1013 = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
1015 tree passed_type = DECL_ARG_TYPE (parms);
1016 tree nominal_type = TREE_TYPE (parms);
1020 if (GET_CODE (DECL_RTL (parms)) == REG)
1024 change_arg_use_of_insns (prologue_insert_point,
1025 DECL_RTL (parms), &safe, 0);
1028 /* Generate codes for copying the content. */
1029 rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
1031 /* Avoid register elimination in gcse.c. */
1032 PATTERN (movinsn)->volatil = 1;
1034 /* Save debugger info. */
1035 SET_DECL_RTL (parms, safe);
1038 else if (GET_CODE (DECL_RTL (parms)) == MEM
1039 && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1042 rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1044 /* Generate codes for copying the content. */
1045 movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1046 /* Avoid register elimination in gcse.c. */
1047 PATTERN (movinsn)->volatil = 1;
1049 /* Change the addressof information to the newly
1050 allocated pseudo register. */
1051 emit_move_insn (DECL_RTL (parms), safe);
1053 /* Save debugger info. */
1054 SET_DECL_RTL (parms, safe);
1057 /* See if the frontend wants to pass this by invisible
1059 else if (passed_type != nominal_type
1060 && POINTER_TYPE_P (passed_type)
1061 && TREE_TYPE (passed_type) == nominal_type)
1063 rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
1065 change_arg_use_of_insns (prologue_insert_point,
1069 /* Generate codes for copying the content. */
1070 rtx movinsn = emit_move_insn (safe, orig);
1072 /* Avoid register elimination in gcse.c */
1073 PATTERN (movinsn)->volatil = 1;
1075 /* Save debugger info. */
1076 SET_DECL_RTL (parms, safe);
1082 /* Declare temporary local variable for parms. */
1084 = assign_stack_local (DECL_MODE (parms), arg_size,
1085 DECL_MODE (parms) == BLKmode ?
1088 MEM_IN_STRUCT_P (temp_rtx)
1089 = AGGREGATE_TYPE_P (TREE_TYPE (parms));
1090 set_mem_alias_set (temp_rtx, get_alias_set (parms));
1092 /* Generate codes for copying the content. */
1093 store_expr (parms, temp_rtx, 0);
1095 /* Change the reference for each instructions. */
1096 move_arg_location (prologue_insert_point, DECL_RTL (parms),
1097 temp_rtx, arg_size);
1099 /* Change the location of parms variable. */
1100 SET_DECL_RTL (parms, temp_rtx);
1105 emit_insn_before (seq, prologue_insert_point);
1107 #ifdef FRAME_GROWS_DOWNWARD
1108 /* Process the string argument. */
1109 if (string_p && DECL_MODE (parms) == BLKmode)
1111 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1112 arg_size = CEIL_ROUND (arg_size, alignment);
1114 /* Change the reference for each instructions. */
1115 sweep_string_variable (DECL_RTL (parms), arg_size);
1124 /* Sweep a string variable to the positon of sweep_frame_offset in the
1125 stack frame, that is a last position of string variables. */
1127 sweep_string_variable (rtx sweep_var, HOST_WIDE_INT var_size)
1129 HOST_WIDE_INT sweep_offset;
1131 switch (GET_CODE (sweep_var))
1134 if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
1135 && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
1137 sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
1140 sweep_offset = INTVAL (sweep_var);
1146 /* Scan all declarations of variables and fix the offset address of
1147 the variable based on the frame pointer. */
1148 sweep_string_in_decls (DECL_INITIAL (current_function_decl),
1149 sweep_offset, var_size);
1151 /* Scan all argument variable and fix the offset address based on
1152 the frame pointer. */
1153 sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
1154 sweep_offset, var_size);
1156 /* For making room for sweep variable, scan all insns and
1157 fix the offset address of the variable that is based on frame pointer. */
1158 sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
1161 /* Clear all the USED bits in operands of all insns and declarations of
1163 reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
1164 reset_used_flags_for_insns (function_first_insn);
1166 sweep_frame_offset -= var_size;
1171 /* Move an argument to the local variable addressed by frame_offset. */
1173 move_arg_location (rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size)
1175 /* For making room for sweep variable, scan all insns and
1176 fix the offset address of the variable that is based on frame pointer. */
1177 change_arg_use_of_insns (insn, orig, &new, var_size);
1180 /* Clear all the USED bits in operands of all insns and declarations
1181 of local variables. */
1182 reset_used_flags_for_insns (insn);
1186 /* Sweep character arrays declared as local variable. */
1188 sweep_string_in_decls (tree block, HOST_WIDE_INT sweep_offset,
1189 HOST_WIDE_INT sweep_size)
1192 HOST_WIDE_INT offset;
1195 while (block && TREE_CODE(block)==BLOCK)
1197 for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
1199 /* Skip the declaration that refers an external variable and
1200 also skip an global variable. */
1201 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
1203 if (! DECL_RTL_SET_P (types))
1206 home = DECL_RTL (types);
1208 /* Process for static local variable. */
1209 if (GET_CODE (home) == MEM
1210 && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
1213 if (GET_CODE (home) == MEM
1214 && XEXP (home, 0) == virtual_stack_vars_rtx)
1218 /* the operand related to the sweep variable. */
1219 if (sweep_offset <= offset
1220 && offset < sweep_offset + sweep_size)
1222 offset = sweep_frame_offset - sweep_size - sweep_offset;
1224 XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1226 XEXP (home, 0)->used = 1;
1228 else if (sweep_offset <= offset
1229 && offset < sweep_frame_offset)
1231 /* the rest of variables under sweep_frame_offset,
1232 shift the location. */
1233 XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1235 XEXP (home, 0)->used = 1;
1239 if (GET_CODE (home) == MEM
1240 && GET_CODE (XEXP (home, 0)) == MEM)
1242 /* Process for dynamically allocated array. */
1243 home = XEXP (home, 0);
1246 if (GET_CODE (home) == MEM
1247 && GET_CODE (XEXP (home, 0)) == PLUS
1248 && XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
1249 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1251 if (! XEXP (home, 0)->used)
1253 offset = AUTO_OFFSET(XEXP (home, 0));
1255 /* the operand related to the sweep variable. */
1256 if (sweep_offset <= offset
1257 && offset < sweep_offset + sweep_size)
1261 += sweep_frame_offset - sweep_size - sweep_offset;
1262 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
1266 XEXP (home, 0)->used = 1;
1268 else if (sweep_offset <= offset
1269 && offset < sweep_frame_offset)
1271 /* the rest of variables under sweep_frame_offset,
1272 so shift the location. */
1274 XEXP (XEXP (home, 0), 1)
1275 = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1278 XEXP (home, 0)->used = 1;
1285 sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
1286 sweep_offset, sweep_size);
1288 block = BLOCK_CHAIN (block);
1293 /* Sweep character arrays declared as argument. */
1295 sweep_string_in_args (tree parms, HOST_WIDE_INT sweep_offset,
1296 HOST_WIDE_INT sweep_size)
1299 HOST_WIDE_INT offset;
1301 for (; parms; parms = TREE_CHAIN (parms))
1302 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1304 if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1306 home = DECL_INCOMING_RTL (parms);
1308 if (XEXP (home, 0)->used)
1311 offset = AUTO_OFFSET(XEXP (home, 0));
1313 /* the operand related to the sweep variable. */
1314 if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
1316 if (sweep_offset <= offset
1317 && offset < sweep_offset + sweep_size)
1319 offset += sweep_frame_offset - sweep_size - sweep_offset;
1320 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
1324 XEXP (home, 0)->used = 1;
1326 else if (sweep_offset <= offset
1327 && offset < sweep_frame_offset)
1329 /* the rest of variables under sweep_frame_offset,
1330 shift the location. */
1331 XEXP (XEXP (home, 0), 1)
1332 = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1335 XEXP (home, 0)->used = 1;
1343 /* Set to 1 when the instruction contains virtual registers. */
1344 static int has_virtual_reg;
1346 /* Sweep the specified character array for every insns. The array starts from
1347 the sweep_offset and its size is sweep_size. */
1349 sweep_string_use_of_insns (rtx insn, HOST_WIDE_INT sweep_offset,
1350 HOST_WIDE_INT sweep_size)
1352 for (; insn; insn = NEXT_INSN (insn))
1353 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1354 || GET_CODE (insn) == CALL_INSN)
1356 has_virtual_reg = FALSE;
1357 sweep_string_in_operand (insn, &PATTERN (insn),
1358 sweep_offset, sweep_size);
1359 sweep_string_in_operand (insn, ®_NOTES (insn),
1360 sweep_offset, sweep_size);
1365 /* Sweep the specified character array, which starts from the sweep_offset and
1366 its size is sweep_size.
1368 When a pointer is given,
1369 if it points the address higher than the array, it stays.
1370 if it points the address inside the array, it changes to point inside
1372 if it points the address lower than the array, it shifts higher address by
1375 sweep_string_in_operand (rtx insn, rtx *loc,
1376 HOST_WIDE_INT sweep_offset, HOST_WIDE_INT sweep_size)
1381 HOST_WIDE_INT offset;
1387 code = GET_CODE (x);
1406 if (x == virtual_incoming_args_rtx
1407 || x == virtual_stack_vars_rtx
1408 || x == virtual_stack_dynamic_rtx
1409 || x == virtual_outgoing_args_rtx
1410 || x == virtual_cfa_rtx)
1411 has_virtual_reg = TRUE;
1416 skip setjmp setup insn and setjmp restore insn
1418 (set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
1419 (set (virtual_stack_vars_rtx) (REG))
1421 if (GET_CODE (XEXP (x, 0)) == MEM
1422 && XEXP (x, 1) == virtual_stack_vars_rtx)
1424 if (XEXP (x, 0) == virtual_stack_vars_rtx
1425 && GET_CODE (XEXP (x, 1)) == REG)
1430 /* Handle typical case of frame register plus constant. */
1431 if (XEXP (x, 0) == virtual_stack_vars_rtx
1432 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1435 goto single_use_of_virtual_reg;
1437 offset = AUTO_OFFSET(x);
1439 /* When arguments grow downward, the virtual incoming
1440 args pointer points to the top of the argument block,
1441 so block is identified by the pointer - 1.
1442 The flag is set at the copy_rtx_and_substitute in integrate.c */
1443 if (RTX_INTEGRATED_P (x))
1446 /* the operand related to the sweep variable. */
1447 if (sweep_offset <= offset + k
1448 && offset + k < sweep_offset + sweep_size)
1450 offset += sweep_frame_offset - sweep_size - sweep_offset;
1452 XEXP (x, 0) = virtual_stack_vars_rtx;
1453 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1456 else if (sweep_offset <= offset + k
1457 && offset + k < sweep_frame_offset)
1459 /* the rest of variables under sweep_frame_offset,
1460 shift the location. */
1461 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1465 single_use_of_virtual_reg:
1466 if (has_virtual_reg) {
1467 /* excerpt from insn_invalid_p in recog.c */
1468 int icode = recog_memoized (insn);
1470 if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1475 temp = force_operand (x, NULL_RTX);
1479 emit_insn_before (seq, insn);
1480 if (! validate_change (insn, loc, temp, 0)
1481 && !validate_replace_rtx (x, temp, insn))
1482 fatal_insn ("sweep_string_in_operand", insn);
1486 has_virtual_reg = TRUE;
1490 #ifdef FRAME_GROWS_DOWNWARD
1491 /* Alert the case of frame register plus constant given by reg. */
1492 else if (XEXP (x, 0) == virtual_stack_vars_rtx
1493 && GET_CODE (XEXP (x, 1)) == REG)
1494 fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
1498 process further subtree:
1499 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1504 case CALL_PLACEHOLDER:
1505 for (i = 0; i < 3; i++)
1507 rtx seq = XEXP (x, i);
1510 push_to_sequence (seq);
1511 sweep_string_use_of_insns (XEXP (x, i),
1512 sweep_offset, sweep_size);
1513 XEXP (x, i) = get_insns ();
1523 /* Scan all subexpressions. */
1524 fmt = GET_RTX_FORMAT (code);
1525 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1529 virtual_stack_vars_rtx without offset
1531 (set (reg:SI xx) (reg:SI 78))
1532 (set (reg:SI xx) (MEM (reg:SI 78)))
1534 if (XEXP (x, i) == virtual_stack_vars_rtx)
1535 fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
1536 sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
1538 else if (*fmt == 'E')
1539 for (j = 0; j < XVECLEN (x, i); j++)
1540 sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
1544 /* Change the use of an argument to the use of the duplicated variable for
1545 every insns, The variable is addressed by new rtx. */
1547 change_arg_use_of_insns (rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size)
1549 for (; insn; insn = NEXT_INSN (insn))
1550 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1551 || GET_CODE (insn) == CALL_INSN)
1556 change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
1560 emit_insn_before (seq, insn);
1562 /* load_multiple insn from virtual_incoming_args_rtx have several
1563 load insns. If every insn change the load address of arg
1564 to frame region, those insns are moved before the PARALLEL insn
1565 and remove the PARALLEL insn. */
1566 if (GET_CODE (PATTERN (insn)) == PARALLEL
1567 && XVECLEN (PATTERN (insn), 0) == 0)
1573 /* Change the use of an argument to the use of the duplicated variable for
1574 every rtx derived from the x. */
1576 change_arg_use_in_operand (rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size)
1580 HOST_WIDE_INT offset;
1586 code = GET_CODE (x);
1606 /* Handle special case of MEM (incoming_args). */
1607 if (GET_CODE (orig) == MEM
1608 && XEXP (x, 0) == virtual_incoming_args_rtx)
1612 /* the operand related to the sweep variable. */
1613 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1614 offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1616 offset = AUTO_OFFSET(XEXP (*new, 0))
1617 + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1619 XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
1620 XEXP (x, 0)->used = 1;
1628 /* Handle special case of frame register plus constant. */
1629 if (GET_CODE (orig) == MEM
1630 && XEXP (x, 0) == virtual_incoming_args_rtx
1631 && GET_CODE (XEXP (x, 1)) == CONST_INT
1634 offset = AUTO_OFFSET(x);
1636 /* the operand related to the sweep variable. */
1637 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1638 offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
1641 offset = (AUTO_OFFSET(XEXP (*new, 0))
1642 + (offset - AUTO_OFFSET(XEXP (orig, 0))));
1644 XEXP (x, 0) = virtual_stack_vars_rtx;
1645 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1652 process further subtree:
1653 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1660 /* Handle special case of "set (REG or MEM) (incoming_args)".
1661 It means that the the address of the 1st argument is stored. */
1662 if (GET_CODE (orig) == MEM
1663 && XEXP (x, 1) == virtual_incoming_args_rtx)
1667 /* the operand related to the sweep variable. */
1668 if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1669 offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
1671 offset = (AUTO_OFFSET(XEXP (*new, 0))
1672 + (offset - AUTO_OFFSET(XEXP (orig, 0))));
1674 XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
1676 XEXP (x, 1)->used = 1;
1683 case CALL_PLACEHOLDER:
1684 for (i = 0; i < 3; i++)
1686 rtx seq = XEXP (x, i);
1689 push_to_sequence (seq);
1690 change_arg_use_of_insns (XEXP (x, i), orig, new, size);
1691 XEXP (x, i) = get_insns ();
1698 for (j = 0; j < XVECLEN (x, 0); j++)
1700 change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
1702 if (recog_memoized (insn) < 0)
1704 for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
1706 /* if parallel insn has a insn used virtual_incoming_args_rtx,
1707 the insn is removed from this PARALLEL insn. */
1708 if (check_used_flag (XVECEXP (x, 0, j)))
1710 emit_insn (XVECEXP (x, 0, j));
1711 XVECEXP (x, 0, j) = NULL;
1714 XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
1716 PUT_NUM_ELEM (XVEC (x, 0), i);
1724 /* Scan all subexpressions. */
1725 fmt = GET_RTX_FORMAT (code);
1726 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1729 if (XEXP (x, i) == orig)
1732 *new = gen_reg_rtx (GET_MODE (orig));
1736 change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
1738 else if (*fmt == 'E')
1739 for (j = 0; j < XVECLEN (x, i); j++)
1741 if (XVECEXP (x, i, j) == orig)
1744 *new = gen_reg_rtx (GET_MODE (orig));
1745 XVECEXP (x, i, j) = *new;
1748 change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
1753 /* Validate every instructions from the specified instruction.
1755 The stack protector prohibits to generate machine specific frame addressing
1756 for the first rtl generation. The prepare_stack_protection must convert
1757 machine independent frame addressing to machine specific frame addressing,
1758 so instructions for inline functions, which skip the conversion of
1759 the stack protection, validate every instructions. */
1761 validate_insns_of_varrefs (rtx insn)
1765 /* Initialize recognition, indicating that volatile is OK. */
1768 for (; insn; insn = next)
1770 next = NEXT_INSN (insn);
1771 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1772 || GET_CODE (insn) == CALL_INSN)
1774 /* excerpt from insn_invalid_p in recog.c */
1775 int icode = recog_memoized (insn);
1777 if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1778 validate_operand_of_varrefs (insn, &PATTERN (insn));
1782 init_recog_no_volatile ();
1786 /* Validate frame addressing of the rtx and covert it to machine specific one. */
1788 validate_operand_of_varrefs (rtx insn, rtx *loc)
1799 code = GET_CODE (x);
1820 /* validate insn of frame register plus constant. */
1821 if (GET_CODE (x) == PLUS
1822 && XEXP (x, 0) == virtual_stack_vars_rtx
1823 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1827 { /* excerpt from expand_binop in optabs.c */
1828 optab binoptab = add_optab;
1829 enum machine_mode mode = GET_MODE (x);
1830 int icode = (int) binoptab->handlers[(int) mode].insn_code;
1831 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1833 rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
1834 temp = gen_reg_rtx (mode);
1836 /* Now, if insn's predicates don't allow offset operands,
1837 put them into pseudo regs. */
1839 if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
1840 && mode1 != VOIDmode)
1841 xop1 = copy_to_mode_reg (mode1, xop1);
1843 pat = GEN_FCN (icode) (temp, xop0, xop1);
1847 abort (); /* there must be add_optab handler. */
1852 emit_insn_before (seq, insn);
1853 if (! validate_change (insn, loc, temp, 0))
1860 case CALL_PLACEHOLDER:
1861 for (i = 0; i < 3; i++)
1863 rtx seq = XEXP (x, i);
1866 push_to_sequence (seq);
1867 validate_insns_of_varrefs (XEXP (x, i));
1868 XEXP (x, i) = get_insns ();
1878 /* Scan all subexpressions. */
1879 fmt = GET_RTX_FORMAT (code);
1880 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1882 validate_operand_of_varrefs (insn, &XEXP (x, i));
1883 else if (*fmt == 'E')
1884 for (j = 0; j < XVECLEN (x, i); j++)
1885 validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
1890 /* Return size that is not allocated for stack frame. It will be allocated
1891 to modify the home of pseudo registers called from global_alloc. */
1893 get_frame_free_size (void)
1895 if (! flag_propolice_protection)
1898 return push_allocated_offset - push_frame_offset;
1902 /* The following codes are invoked after the instantiation of pseudo registers.
1904 Reorder local variables to place a peudo register after buffers to avoid
1905 the corruption of local variables that could be used to further corrupt
1906 arbitrary memory locations. */
1907 #if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
1908 static void push_frame (HOST_WIDE_INT, HOST_WIDE_INT);
1909 static void push_frame_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
1910 static void push_frame_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
1911 static void push_frame_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
1912 static void push_frame_in_operand (rtx, rtx, HOST_WIDE_INT, HOST_WIDE_INT);
1913 static void push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT, HOST_WIDE_INT);
1914 static void push_frame_of_reg_equiv_constant (HOST_WIDE_INT, HOST_WIDE_INT);
1915 static void reset_used_flags_for_push_frame (void);
1916 static int check_out_of_frame_access (rtx, HOST_WIDE_INT);
1917 static int check_out_of_frame_access_in_operand (rtx, HOST_WIDE_INT);
1921 /* Assign stack local at the stage of register allocater. if a pseudo reg is
1922 spilled out from such an allocation, it is allocated on the stack.
1923 The protector keep the location be lower stack region than the location of
1926 assign_stack_local_for_pseudo_reg (enum machine_mode mode,
1927 HOST_WIDE_INT size, int align)
1929 #if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
1930 return assign_stack_local (mode, size, align);
1932 tree blocks = DECL_INITIAL (current_function_decl);
1934 HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
1935 int first_call_from_purge_addressof, first_call_from_global_alloc;
1937 if (! flag_propolice_protection
1940 || current_function_is_inlinable
1941 || ! search_string_from_argsandvars (CALL_FROM_PUSH_FRAME)
1942 || current_function_contains_functions)
1943 return assign_stack_local (mode, size, align);
1945 first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
1946 first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
1947 saved_cse_not_expected = cse_not_expected;
1949 starting_frame = ((STARTING_FRAME_OFFSET)
1950 ? STARTING_FRAME_OFFSET : BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1951 units_per_push = MAX (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1952 GET_MODE_SIZE (mode));
1954 if (first_call_from_purge_addressof)
1956 push_frame_offset = push_allocated_offset;
1957 if (check_out_of_frame_access (get_insns (), starting_frame))
1959 /* After the purge_addressof stage, there may be an instruction which
1960 have the pointer less than the starting_frame.
1961 if there is an access below frame, push dummy region to seperate
1962 the address of instantiated variables. */
1963 push_frame (GET_MODE_SIZE (DImode), 0);
1964 assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
1968 if (first_call_from_global_alloc)
1970 push_frame_offset = push_allocated_offset = 0;
1971 if (check_out_of_frame_access (get_insns (), starting_frame))
1973 if (STARTING_FRAME_OFFSET)
1975 /* if there is an access below frame, push dummy region
1976 to seperate the address of instantiated variables. */
1977 push_frame (GET_MODE_SIZE (DImode), 0);
1978 assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
1981 push_allocated_offset = starting_frame;
1985 saved_frame_offset = frame_offset;
1986 frame_offset = push_frame_offset;
1988 new = assign_stack_local (mode, size, align);
1990 push_frame_offset = frame_offset;
1991 frame_offset = saved_frame_offset;
1993 if (push_frame_offset > push_allocated_offset)
1995 push_frame (units_per_push,
1996 push_allocated_offset + STARTING_FRAME_OFFSET);
1998 assign_stack_local (BLKmode, units_per_push, -1);
1999 push_allocated_offset += units_per_push;
2002 /* At the second call from global alloc, alpha push frame and assign
2003 a local variable to the top of the stack. */
2004 if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
2005 push_frame_offset = push_allocated_offset = 0;
2012 #if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2014 /* push frame infomation for instantiating pseudo register at the top of stack.
2015 This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
2018 It is called by purge_addressof function and global_alloc (or reload)
2021 push_frame (HOST_WIDE_INT var_size, HOST_WIDE_INT boundary)
2023 reset_used_flags_for_push_frame();
2025 /* Scan all declarations of variables and fix the offset address of
2026 the variable based on the frame pointer. */
2027 push_frame_in_decls (DECL_INITIAL (current_function_decl),
2028 var_size, boundary);
2030 /* Scan all argument variable and fix the offset address based on
2031 the frame pointer. */
2032 push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
2033 var_size, boundary);
2035 /* Scan all operands of all insns and fix the offset address
2036 based on the frame pointer. */
2037 push_frame_of_insns (get_insns (), var_size, boundary);
2039 /* Scan all reg_equiv_memory_loc and reg_equiv_constant. */
2040 push_frame_of_reg_equiv_memory_loc (var_size, boundary);
2041 push_frame_of_reg_equiv_constant (var_size, boundary);
2043 reset_used_flags_for_push_frame();
2047 /* Reset used flag of every insns, reg_equiv_memory_loc,
2048 and reg_equiv_constant. */
2050 reset_used_flags_for_push_frame(void)
2053 extern rtx *reg_equiv_memory_loc;
2054 extern rtx *reg_equiv_constant;
2056 /* Clear all the USED bits in operands of all insns and declarations of
2058 reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2059 reset_used_flags_for_insns (get_insns ());
2062 /* The following codes are processed if the push_frame is called from
2063 global_alloc (or reload) function. */
2064 if (reg_equiv_memory_loc == 0)
2067 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2068 if (reg_equiv_memory_loc[i])
2070 rtx x = reg_equiv_memory_loc[i];
2072 if (GET_CODE (x) == MEM
2073 && GET_CODE (XEXP (x, 0)) == PLUS
2074 && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
2077 XEXP (x, 0)->used = 0;
2082 if (reg_equiv_constant == 0)
2085 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2086 if (reg_equiv_constant[i])
2088 rtx x = reg_equiv_constant[i];
2090 if (GET_CODE (x) == PLUS
2091 && AUTO_BASEPTR (x) == frame_pointer_rtx)
2100 /* Push every variables declared as a local variable and make a room for
2101 instantiated register. */
2103 push_frame_in_decls (tree block, HOST_WIDE_INT push_size,
2104 HOST_WIDE_INT boundary)
2107 HOST_WIDE_INT offset;
2110 while (block && TREE_CODE(block)==BLOCK)
2112 for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
2114 /* Skip the declaration that refers an external variable and
2115 also skip an global variable. */
2116 if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
2118 if (! DECL_RTL_SET_P (types))
2121 home = DECL_RTL (types);
2123 /* Process for static local variable. */
2124 if (GET_CODE (home) == MEM
2125 && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2128 if (GET_CODE (home) == MEM
2129 && GET_CODE (XEXP (home, 0)) == REG)
2131 if (XEXP (home, 0) != frame_pointer_rtx
2135 XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2139 XEXP (home, 0)->used = 1;
2142 if (GET_CODE (home) == MEM
2143 && GET_CODE (XEXP (home, 0)) == MEM)
2145 /* Process for dynamically allocated array. */
2146 home = XEXP (home, 0);
2149 if (GET_CODE (home) == MEM
2150 && GET_CODE (XEXP (home, 0)) == PLUS
2151 && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2153 offset = AUTO_OFFSET(XEXP (home, 0));
2155 if (! XEXP (home, 0)->used
2156 && offset >= boundary)
2158 offset += push_size;
2159 XEXP (XEXP (home, 0), 1)
2160 = gen_rtx_CONST_INT (VOIDmode, offset);
2163 XEXP (home, 0)->used = 1;
2169 push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
2170 block = BLOCK_CHAIN (block);
2175 /* Push every variables declared as an argument and make a room for
2176 instantiated register. */
2178 push_frame_in_args (tree parms, HOST_WIDE_INT push_size,
2179 HOST_WIDE_INT boundary)
2182 HOST_WIDE_INT offset;
2184 for (; parms; parms = TREE_CHAIN (parms))
2185 if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2187 if (PARM_PASSED_IN_MEMORY (parms))
2189 home = DECL_INCOMING_RTL (parms);
2190 offset = AUTO_OFFSET(XEXP (home, 0));
2192 if (XEXP (home, 0)->used || offset < boundary)
2195 /* the operand related to the sweep variable. */
2196 if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
2198 if (XEXP (home, 0) == frame_pointer_rtx)
2199 XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2202 offset += push_size;
2203 XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2208 XEXP (home, 0)->used = 1;
2215 /* Set to 1 when the instruction has the reference to be pushed. */
2216 static int insn_pushed;
2218 /* Tables of equivalent registers with frame pointer. */
2219 static int *fp_equiv = 0;
2222 /* Push the frame region to make a room for allocated local variable. */
2224 push_frame_of_insns (rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
2227 fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
2229 for (; insn; insn = NEXT_INSN (insn))
2230 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2231 || GET_CODE (insn) == CALL_INSN)
2235 insn_pushed = FALSE;
2237 /* Push frame in INSN operation. */
2238 push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
2240 /* Push frame in NOTE. */
2241 push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
2243 /* Push frame in CALL EXPR_LIST. */
2244 if (GET_CODE (insn) == CALL_INSN)
2245 push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
2246 push_size, boundary);
2248 /* Pushed frame addressing style may not be machine specific one.
2249 so the instruction should be converted to use the machine specific
2250 frame addressing. */
2252 && (last = try_split (PATTERN (insn), insn, 1)) != insn)
2254 rtx first = NEXT_INSN (insn);
2255 rtx trial = NEXT_INSN (first);
2256 rtx pattern = PATTERN (trial);
2259 /* Update REG_EQUIV info to the first splitted insn. */
2260 if ((set = single_set (insn))
2261 && find_reg_note (insn, REG_EQUIV, SET_SRC (set))
2262 && GET_CODE (PATTERN (first)) == SET)
2265 = gen_rtx_EXPR_LIST (REG_EQUIV,
2266 SET_SRC (PATTERN (first)),
2270 /* copy the first insn of splitted insns to the original insn and
2271 delete the first insn,
2272 because the original insn is pointed from records:
2273 insn_chain, reg_equiv_init, used for global_alloc. */
2274 if (cse_not_expected)
2276 add_insn_before (insn, first);
2278 /* Copy the various flags, and other information. */
2279 memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
2280 PATTERN (insn) = PATTERN (first);
2281 REG_NOTES (insn) = REG_NOTES (first);
2283 /* then remove the first insn of splitted insns. */
2284 remove_insn (first);
2285 INSN_DELETED_P (first) = 1;
2288 if (GET_CODE (pattern) == SET
2289 && GET_CODE (XEXP (pattern, 0)) == REG
2290 && GET_CODE (XEXP (pattern, 1)) == PLUS
2291 && XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
2292 && GET_CODE (XEXP (XEXP (pattern, 1), 1)) == CONST_INT)
2294 rtx offset = XEXP (XEXP (pattern, 1), 1);
2295 fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
2297 delete_insn (trial);
2309 /* Push the frame region by changing the operand that points the frame. */
2311 push_frame_in_operand (rtx insn, rtx orig,
2312 HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
2317 HOST_WIDE_INT offset;
2323 code = GET_CODE (x);
2345 Skip setjmp setup insn and setjmp restore insn
2347 (set (MEM (reg:SI xx)) (frame_pointer_rtx)))
2348 (set (frame_pointer_rtx) (REG))
2350 if (GET_CODE (XEXP (x, 0)) == MEM
2351 && XEXP (x, 1) == frame_pointer_rtx)
2353 if (XEXP (x, 0) == frame_pointer_rtx
2354 && GET_CODE (XEXP (x, 1)) == REG)
2358 powerpc case: restores setjmp address
2359 (set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
2361 (set (reg) (plus frame_pointer_rtx const_int -n))
2362 (set (frame_pointer_rtx) (reg))
2364 if (GET_CODE (XEXP (x, 0)) == REG
2365 && GET_CODE (XEXP (x, 1)) == PLUS
2366 && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
2367 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2368 && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
2371 offset = AUTO_OFFSET(x);
2372 if (x->used || -offset < boundary)
2375 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
2376 x->used = 1; insn_pushed = TRUE;
2380 /* Reset fp_equiv register. */
2381 else if (GET_CODE (XEXP (x, 0)) == REG
2382 && fp_equiv[REGNO (XEXP (x, 0))])
2383 fp_equiv[REGNO (XEXP (x, 0))] = 0;
2385 /* Propagete fp_equiv register. */
2386 else if (GET_CODE (XEXP (x, 0)) == REG
2387 && GET_CODE (XEXP (x, 1)) == REG
2388 && fp_equiv[REGNO (XEXP (x, 1))])
2389 if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
2390 || reg_renumber[REGNO (XEXP (x, 0))] > 0)
2391 fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
2395 if (XEXP (x, 0) == frame_pointer_rtx
2398 XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2399 XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2405 /* Handle special case of frame register plus constant. */
2406 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2407 && XEXP (x, 0) == frame_pointer_rtx)
2409 offset = AUTO_OFFSET(x);
2411 if (x->used || offset < boundary)
2414 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2415 x->used = 1; insn_pushed = TRUE;
2421 (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
2423 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2424 && GET_CODE (XEXP (x, 0)) == SUBREG
2425 && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
2427 offset = AUTO_OFFSET(x);
2429 if (x->used || offset < boundary)
2432 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2433 x->used = 1; insn_pushed = TRUE;
2438 Handle powerpc case:
2439 (set (reg x) (plus fp const))
2440 (set (.....) (... (plus (reg x) (const B))))
2442 else if (GET_CODE (XEXP (x, 1)) == CONST_INT
2443 && GET_CODE (XEXP (x, 0)) == REG
2444 && fp_equiv[REGNO (XEXP (x, 0))])
2446 offset = AUTO_OFFSET(x);
2451 offset += fp_equiv[REGNO (XEXP (x, 0))];
2453 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2454 x->used = 1; insn_pushed = TRUE;
2459 Handle special case of frame register plus reg (constant).
2460 (set (reg x) (const B))
2461 (set (....) (...(plus fp (reg x))))
2463 else if (XEXP (x, 0) == frame_pointer_rtx
2464 && GET_CODE (XEXP (x, 1)) == REG
2466 && PATTERN (PREV_INSN (insn))
2467 && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
2468 && GET_CODE (SET_SRC (PATTERN (PREV_INSN (insn)))) == CONST_INT)
2470 offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
2472 if (x->used || offset < boundary)
2475 SET_SRC (PATTERN (PREV_INSN (insn)))
2476 = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2478 XEXP (x, 1)->used = 1;
2483 Handle special case of frame register plus reg (used).
2484 The register already have a pushed offset, just mark this frame
2487 else if (XEXP (x, 0) == frame_pointer_rtx
2488 && XEXP (x, 1)->used)
2494 Process further subtree:
2495 Example: (plus:SI (mem/s:SI (plus:SI (FP) (const_int 8)))
2500 case CALL_PLACEHOLDER:
2501 push_frame_of_insns (XEXP (x, 0), push_size, boundary);
2502 push_frame_of_insns (XEXP (x, 1), push_size, boundary);
2503 push_frame_of_insns (XEXP (x, 2), push_size, boundary);
2510 /* Scan all subexpressions. */
2511 fmt = GET_RTX_FORMAT (code);
2512 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2515 if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
2516 fatal_insn ("push_frame_in_operand", insn);
2517 push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
2519 else if (*fmt == 'E')
2520 for (j = 0; j < XVECLEN (x, i); j++)
2521 push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
2525 /* Change the location pointed in reg_equiv_memory_loc. */
2527 push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT push_size,
2528 HOST_WIDE_INT boundary)
2531 extern rtx *reg_equiv_memory_loc;
2533 /* This function is processed if the push_frame is called from
2534 global_alloc (or reload) function. */
2535 if (reg_equiv_memory_loc == 0)
2538 for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2539 if (reg_equiv_memory_loc[i])
2541 rtx x = reg_equiv_memory_loc[i];
2544 if (GET_CODE (x) == MEM
2545 && GET_CODE (XEXP (x, 0)) == PLUS
2546 && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
2548 offset = AUTO_OFFSET(XEXP (x, 0));
2550 if (! XEXP (x, 0)->used
2551 && offset >= boundary)
2553 offset += push_size;
2554 XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2557 XEXP (x, 0)->used = 1;
2560 else if (GET_CODE (x) == MEM
2561 && XEXP (x, 0) == frame_pointer_rtx
2564 XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2565 XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2571 /* Change the location pointed in reg_equiv_constant. */
2573 push_frame_of_reg_equiv_constant (HOST_WIDE_INT push_size,
2574 HOST_WIDE_INT boundary)
2577 extern rtx *reg_equiv_constant;
2579 /* This function is processed if the push_frame is called from
2580 global_alloc (or reload) function. */
2581 if (reg_equiv_constant == 0)
2584 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
2585 if (reg_equiv_constant[i])
2587 rtx x = reg_equiv_constant[i];
2590 if (GET_CODE (x) == PLUS
2591 && XEXP (x, 0) == frame_pointer_rtx)
2593 offset = AUTO_OFFSET(x);
2596 && offset >= boundary)
2598 offset += push_size;
2599 XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2605 else if (x == frame_pointer_rtx
2608 reg_equiv_constant[i]
2609 = plus_constant (frame_pointer_rtx, push_size);
2610 reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
2616 /* Check every instructions if insn's memory reference is out of frame. */
2618 check_out_of_frame_access (rtx insn, HOST_WIDE_INT boundary)
2620 for (; insn; insn = NEXT_INSN (insn))
2621 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2622 || GET_CODE (insn) == CALL_INSN)
2624 if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
2631 /* Check every operands if the reference is out of frame. */
2633 check_out_of_frame_access_in_operand (rtx orig, HOST_WIDE_INT boundary)
2643 code = GET_CODE (x);
2663 if (XEXP (x, 0) == frame_pointer_rtx)
2669 /* Handle special case of frame register plus constant. */
2670 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2671 && XEXP (x, 0) == frame_pointer_rtx)
2673 if (0 <= AUTO_OFFSET(x)
2674 && AUTO_OFFSET(x) < boundary)
2679 Process further subtree:
2680 Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2685 case CALL_PLACEHOLDER:
2686 if (check_out_of_frame_access (XEXP (x, 0), boundary))
2688 if (check_out_of_frame_access (XEXP (x, 1), boundary))
2690 if (check_out_of_frame_access (XEXP (x, 2), boundary))
2698 /* Scan all subexpressions. */
2699 fmt = GET_RTX_FORMAT (code);
2700 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2703 if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
2706 else if (*fmt == 'E')
2707 for (j = 0; j < XVECLEN (x, i); j++)
2708 if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))