Add the DragonFly cvs id and perform general cleanups on cvs/rcs/sccs ids. Most
[dragonfly.git] / contrib / gcc / integrate.c
... / ...
CommitLineData
1/* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
21
22/* $FreeBSD: src/contrib/gcc/integrate.c,v 1.1.1.3.2.1 2002/05/01 19:57:46 obrien Exp $ */
23/* $DragonFly: src/contrib/gcc/Attic/integrate.c,v 1.2 2003/06/17 04:24:01 dillon Exp $ */
24
25
26#include "config.h"
27#include "system.h"
28
29#include "rtl.h"
30#include "tree.h"
31#include "regs.h"
32#include "flags.h"
33#include "insn-config.h"
34#include "insn-flags.h"
35#include "expr.h"
36#include "output.h"
37#include "recog.h"
38#include "integrate.h"
39#include "real.h"
40#include "except.h"
41#include "function.h"
42#include "toplev.h"
43#include "intl.h"
44
45#include "obstack.h"
46#define obstack_chunk_alloc xmalloc
47#define obstack_chunk_free free
48
49extern struct obstack *function_maybepermanent_obstack;
50
51/* Similar, but round to the next highest integer that meets the
52 alignment. */
53#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54
55/* Default max number of insns a function can have and still be inline.
56 This is overridden on RISC machines. */
57#ifndef INTEGRATE_THRESHOLD
58/* Inlining small functions might save more space then not inlining at
59 all. Assume 1 instruction for the call and 1.5 insns per argument. */
60#define INTEGRATE_THRESHOLD(DECL) \
61 (optimize_size \
62 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
63 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
64#endif
65\f
66static rtx initialize_for_inline PROTO((tree, int, int, int, int));
67static void finish_inline PROTO((tree, rtx));
68static void adjust_copied_decl_tree PROTO((tree));
69static tree copy_decl_list PROTO((tree));
70static tree copy_decl_tree PROTO((tree));
71static void copy_decl_rtls PROTO((tree));
72static void save_constants PROTO((rtx *));
73static void note_modified_parmregs PROTO((rtx, rtx));
74static rtx copy_for_inline PROTO((rtx));
75static void integrate_parm_decls PROTO((tree, struct inline_remap *,
76 rtvec));
77static void integrate_decl_tree PROTO((tree, int,
78 struct inline_remap *));
79static void save_constants_in_decl_trees PROTO ((tree));
80static void subst_constants PROTO((rtx *, rtx,
81 struct inline_remap *));
82static void restore_constants PROTO((rtx *));
83static void set_block_origin_self PROTO((tree));
84static void set_decl_origin_self PROTO((tree));
85static void set_block_abstract_flags PROTO((tree, int));
86static void process_reg_param PROTO((struct inline_remap *, rtx,
87 rtx));
88
89
90void set_decl_abstract_flags PROTO((tree, int));
91static tree copy_and_set_decl_abstract_origin PROTO((tree));
92
93/* The maximum number of instructions accepted for inlining a
94 function. Increasing values mean more agressive inlining.
95 This affects currently only functions explicitly marked as
96 inline (or methods defined within the class definition for C++).
97 The default value of 10000 is arbitrary but high to match the
98 previously unlimited gcc capabilities. */
99
100int inline_max_insns = 10000;
101
102\f
103/* Returns the Ith entry in the label_map contained in MAP. If the
104 Ith entry has not yet been set, return a fresh label. This function
105 performs a lazy initialization of label_map, thereby avoiding huge memory
106 explosions when the label_map gets very large. */
107
108rtx
109get_label_from_map (map, i)
110 struct inline_remap *map;
111 int i;
112{
113 rtx x = map->label_map[i];
114
115 if (x == NULL_RTX)
116 x = map->label_map[i] = gen_label_rtx();
117
118 return x;
119}
120
121/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
122 is safe and reasonable to integrate into other functions.
123 Nonzero means value is a warning msgid with a single %s
124 for the function's name. */
125
126const char *
127function_cannot_inline_p (fndecl)
128 register tree fndecl;
129{
130 register rtx insn;
131 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
132
133 /* For functions marked as inline increase the maximum size to
134 inline_max_insns (-finline-limit-<n>). For regular functions
135 use the limit given by INTEGRATE_THRESHOLD. */
136
137 int max_insns = (DECL_INLINE (fndecl))
138 ? (inline_max_insns
139 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
140 : INTEGRATE_THRESHOLD (fndecl);
141
142 register int ninsns = 0;
143 register tree parms;
144 rtx result;
145
146 /* No inlines with varargs. */
147 if ((last && TREE_VALUE (last) != void_type_node)
148 || current_function_varargs)
149 return N_("varargs function cannot be inline");
150
151 if (current_function_calls_alloca)
152 return N_("function using alloca cannot be inline");
153
154 if (current_function_contains_functions)
155 return N_("function with nested functions cannot be inline");
156
157 if (current_function_cannot_inline)
158 return current_function_cannot_inline;
159
160 /* If its not even close, don't even look. */
161 if (get_max_uid () > 3 * max_insns)
162 return N_("function too large to be inline");
163
164#if 0
165 /* Don't inline functions which do not specify a function prototype and
166 have BLKmode argument or take the address of a parameter. */
167 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
168 {
169 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
170 TREE_ADDRESSABLE (parms) = 1;
171 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
172 return N_("no prototype, and parameter address used; cannot be inline");
173 }
174#endif
175
176 /* We can't inline functions that return structures
177 the old-fashioned PCC way, copying into a static block. */
178 if (current_function_returns_pcc_struct)
179 return N_("inline functions not supported for this return value type");
180
181 /* We can't inline functions that return structures of varying size. */
182 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
183 return N_("function with varying-size return value cannot be inline");
184
185 /* Cannot inline a function with a varying size argument or one that
186 receives a transparent union. */
187 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
188 {
189 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
190 return N_("function with varying-size parameter cannot be inline");
191 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
192 return N_("function with transparent unit parameter cannot be inline");
193 }
194
195 if (get_max_uid () > max_insns)
196 {
197 for (ninsns = 0, insn = get_first_nonparm_insn ();
198 insn && ninsns < max_insns;
199 insn = NEXT_INSN (insn))
200 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
201 ninsns++;
202
203 if (ninsns >= max_insns)
204 return N_("function too large to be inline");
205 }
206
207 /* We will not inline a function which uses computed goto. The addresses of
208 its local labels, which may be tucked into global storage, are of course
209 not constant across instantiations, which causes unexpected behaviour. */
210 if (current_function_has_computed_jump)
211 return N_("function with computed jump cannot inline");
212
213 /* We cannot inline a nested function that jumps to a nonlocal label. */
214 if (current_function_has_nonlocal_goto)
215 return N_("function with nonlocal goto cannot be inline");
216
217 /* This is a hack, until the inliner is taught about eh regions at
218 the start of the function. */
219 for (insn = get_insns ();
220 insn
221 && ! (GET_CODE (insn) == NOTE
222 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
223 insn = NEXT_INSN (insn))
224 {
225 if (insn && GET_CODE (insn) == NOTE
226 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
227 return N_("function with complex parameters cannot be inline");
228 }
229
230 /* We can't inline functions that return a PARALLEL rtx. */
231 result = DECL_RTL (DECL_RESULT (fndecl));
232 if (result && GET_CODE (result) == PARALLEL)
233 return N_("inline functions not supported for this return value type");
234
235 return 0;
236}
237\f
238/* Variables used within save_for_inline. */
239
240/* Mapping from old pseudo-register to new pseudo-registers.
241 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
242 It is allocated in `save_for_inline' and `expand_inline_function',
243 and deallocated on exit from each of those routines. */
244static rtx *reg_map;
245
246/* Mapping from old code-labels to new code-labels.
247 The first element of this map is label_map[min_labelno].
248 It is allocated in `save_for_inline' and `expand_inline_function',
249 and deallocated on exit from each of those routines. */
250static rtx *label_map;
251
252/* Mapping from old insn uid's to copied insns.
253 It is allocated in `save_for_inline' and `expand_inline_function',
254 and deallocated on exit from each of those routines. */
255static rtx *insn_map;
256
257/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
258 Zero for a reg that isn't a parm's home.
259 Only reg numbers less than max_parm_reg are mapped here. */
260static tree *parmdecl_map;
261
262/* Keep track of first pseudo-register beyond those that are parms. */
263extern int max_parm_reg;
264extern rtx *parm_reg_stack_loc;
265
266/* When an insn is being copied by copy_for_inline,
267 this is nonzero if we have copied an ASM_OPERANDS.
268 In that case, it is the original input-operand vector. */
269static rtvec orig_asm_operands_vector;
270
271/* When an insn is being copied by copy_for_inline,
272 this is nonzero if we have copied an ASM_OPERANDS.
273 In that case, it is the copied input-operand vector. */
274static rtvec copy_asm_operands_vector;
275
276/* Likewise, this is the copied constraints vector. */
277static rtvec copy_asm_constraints_vector;
278
279/* In save_for_inline, nonzero if past the parm-initialization insns. */
280static int in_nonparm_insns;
281\f
282/* subroutines passed to duplicate_eh_handlers to map exception labels */
283
284static rtx
285save_for_inline_eh_labelmap (label)
286 rtx label;
287{
288 int index = CODE_LABEL_NUMBER (label);
289 return label_map[index];
290}
291
292/* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
293 needed to save FNDECL's insns and info for future inline expansion. */
294
295static rtx
296initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
297 tree fndecl;
298 int min_labelno;
299 int max_labelno;
300 int max_reg;
301 int copy;
302{
303 int function_flags, i;
304 rtvec arg_vector;
305 tree parms;
306
307 /* Compute the values of any flags we must restore when inlining this. */
308
309 function_flags
310 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
311 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
312 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
313 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
314 + (current_function_returns_pcc_struct
315 * FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
316 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
317 + (current_function_has_nonlocal_label
318 * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
319 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
320 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
321 + (current_function_uses_pic_offset_table
322 * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
323 + current_function_has_computed_jump * FUNCTION_FLAGS_HAS_COMPUTED_JUMP);
324
325 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
326 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
327 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
328
329 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
330 parms;
331 parms = TREE_CHAIN (parms), i++)
332 {
333 rtx p = DECL_RTL (parms);
334 int copied_incoming = 0;
335
336 /* If we have (mem (addressof (mem ...))), use the inner MEM since
337 otherwise the copy_rtx call below will not unshare the MEM since
338 it shares ADDRESSOF. */
339 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
340 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
341 p = XEXP (XEXP (p, 0), 0);
342
343 if (GET_CODE (p) == MEM && copy)
344 {
345 /* Copy the rtl so that modifications of the addresses
346 later in compilation won't affect this arg_vector.
347 Virtual register instantiation can screw the address
348 of the rtl. */
349 rtx new = copy_rtx (p);
350
351 /* Don't leave the old copy anywhere in this decl. */
352 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
353 || (GET_CODE (DECL_RTL (parms)) == MEM
354 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
355 && (XEXP (DECL_RTL (parms), 0)
356 == XEXP (DECL_INCOMING_RTL (parms), 0))))
357 DECL_INCOMING_RTL (parms) = new, copied_incoming = 1;
358
359 DECL_RTL (parms) = new;
360 }
361
362 RTVEC_ELT (arg_vector, i) = p;
363
364 if (GET_CODE (p) == REG)
365 parmdecl_map[REGNO (p)] = parms;
366 else if (GET_CODE (p) == CONCAT)
367 {
368 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
369 rtx pimag = gen_imagpart (GET_MODE (preal), p);
370
371 if (GET_CODE (preal) == REG)
372 parmdecl_map[REGNO (preal)] = parms;
373 if (GET_CODE (pimag) == REG)
374 parmdecl_map[REGNO (pimag)] = parms;
375 }
376
377 /* This flag is cleared later
378 if the function ever modifies the value of the parm. */
379 TREE_READONLY (parms) = 1;
380
381 /* Copy DECL_INCOMING_RTL if not done already. This can
382 happen if DECL_RTL is a reg. */
383 if (copy && ! copied_incoming)
384 {
385 p = DECL_INCOMING_RTL (parms);
386
387 /* If we have (mem (addressof (mem ...))), use the inner MEM since
388 otherwise the copy_rtx call below will not unshare the MEM since
389 it shares ADDRESSOF. */
390 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
391 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
392 p = XEXP (XEXP (p, 0), 0);
393
394 if (GET_CODE (p) == MEM)
395 DECL_INCOMING_RTL (parms) = copy_rtx (p);
396 }
397 }
398
399 /* Assume we start out in the insns that set up the parameters. */
400 in_nonparm_insns = 0;
401
402 /* The list of DECL_SAVED_INSNS, starts off with a header which
403 contains the following information:
404
405 the first insn of the function (not including the insns that copy
406 parameters into registers).
407 the first parameter insn of the function,
408 the first label used by that function,
409 the last label used by that function,
410 the highest register number used for parameters,
411 the total number of registers used,
412 the size of the incoming stack area for parameters,
413 the number of bytes popped on return,
414 the stack slot list,
415 the labels that are forced to exist,
416 some flags that are used to restore compiler globals,
417 the value of current_function_outgoing_args_size,
418 the original argument vector,
419 the original DECL_INITIAL,
420 and pointers to the table of pseudo regs, pointer flags, and alignment. */
421
422 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
423 max_parm_reg, max_reg,
424 current_function_args_size,
425 current_function_pops_args,
426 stack_slot_list, forced_labels, function_flags,
427 current_function_outgoing_args_size,
428 arg_vector, (rtx) DECL_INITIAL (fndecl),
429 (rtvec) regno_reg_rtx, regno_pointer_flag,
430 regno_pointer_align,
431 (rtvec) parm_reg_stack_loc);
432}
433
434/* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
435 things that must be done to make FNDECL expandable as an inline function.
436 HEAD contains the chain of insns to which FNDECL will expand. */
437
438static void
439finish_inline (fndecl, head)
440 tree fndecl;
441 rtx head;
442{
443 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
444 FIRST_PARM_INSN (head) = get_insns ();
445 DECL_SAVED_INSNS (fndecl) = head;
446 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
447}
448
449/* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
450 they all point to the new (copied) rtxs. */
451
452static void
453adjust_copied_decl_tree (block)
454 register tree block;
455{
456 register tree subblock;
457 register rtx original_end;
458
459 original_end = BLOCK_END_NOTE (block);
460 if (original_end)
461 {
462 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
463 NOTE_SOURCE_FILE (original_end) = 0;
464 }
465
466 /* Process all subblocks. */
467 for (subblock = BLOCK_SUBBLOCKS (block);
468 subblock;
469 subblock = TREE_CHAIN (subblock))
470 adjust_copied_decl_tree (subblock);
471}
472
473/* Make the insns and PARM_DECLs of the current function permanent
474 and record other information in DECL_SAVED_INSNS to allow inlining
475 of this function in subsequent calls.
476
477 This function is called when we are going to immediately compile
478 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
479 modified by the compilation process, so we copy all of them to
480 new storage and consider the new insns to be the insn chain to be
481 compiled. Our caller (rest_of_compilation) saves the original
482 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
483
484/* ??? The nonlocal_label list should be adjusted also. However, since
485 a function that contains a nested function never gets inlined currently,
486 the nonlocal_label list will always be empty, so we don't worry about
487 it for now. */
488
489void
490save_for_inline_copying (fndecl)
491 tree fndecl;
492{
493 rtx first_insn, last_insn, insn;
494 rtx head, copy;
495 int max_labelno, min_labelno, i, len;
496 int max_reg;
497 int max_uid;
498 rtx first_nonparm_insn;
499 char *new, *new1;
500 rtx *new_parm_reg_stack_loc;
501 rtx *new2;
502
503 /* Make and emit a return-label if we have not already done so.
504 Do this before recording the bounds on label numbers. */
505
506 if (return_label == 0)
507 {
508 return_label = gen_label_rtx ();
509 emit_label (return_label);
510 }
511
512 /* Get some bounds on the labels and registers used. */
513
514 max_labelno = max_label_num ();
515 min_labelno = get_first_label_num ();
516 max_reg = max_reg_num ();
517
518 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
519 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
520 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
521 for the parms, prior to elimination of virtual registers.
522 These values are needed for substituting parms properly. */
523
524 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
525
526 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
527
528 if (current_function_uses_const_pool)
529 {
530 /* Replace any constant pool references with the actual constant. We
531 will put the constants back in the copy made below. */
532 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
533 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
534 {
535 save_constants (&PATTERN (insn));
536 if (REG_NOTES (insn))
537 save_constants (&REG_NOTES (insn));
538 }
539
540 /* Also scan all decls, and replace any constant pool references with the
541 actual constant. */
542 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
543
544 /* Clear out the constant pool so that we can recreate it with the
545 copied constants below. */
546 init_const_rtx_hash_table ();
547 clear_const_double_mem ();
548 }
549
550 max_uid = INSN_UID (head);
551
552 /* We have now allocated all that needs to be allocated permanently
553 on the rtx obstack. Set our high-water mark, so that we
554 can free the rest of this when the time comes. */
555
556 preserve_data ();
557
558 /* Copy the chain insns of this function.
559 Install the copied chain as the insns of this function,
560 for continued compilation;
561 the original chain is recorded as the DECL_SAVED_INSNS
562 for inlining future calls. */
563
564 /* If there are insns that copy parms from the stack into pseudo registers,
565 those insns are not copied. `expand_inline_function' must
566 emit the correct code to handle such things. */
567
568 insn = get_insns ();
569 if (GET_CODE (insn) != NOTE)
570 abort ();
571 first_insn = rtx_alloc (NOTE);
572 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
573 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
574 INSN_UID (first_insn) = INSN_UID (insn);
575 PREV_INSN (first_insn) = NULL;
576 NEXT_INSN (first_insn) = NULL;
577 last_insn = first_insn;
578
579 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
580 Make these new rtx's now, and install them in regno_reg_rtx, so they
581 will be the official pseudo-reg rtx's for the rest of compilation. */
582
583 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
584
585 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
586 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
587 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
588 regno_reg_rtx[i], len);
589
590 regno_reg_rtx = reg_map;
591
592 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
593 init_virtual_regs ();
594
595 /* Likewise each label rtx must have a unique rtx as its copy. */
596
597 /* We used to use alloca here, but the size of what it would try to
598 allocate would occasionally cause it to exceed the stack limit and
599 cause unpredictable core dumps. Some examples were > 2Mb in size. */
600 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
601
602 for (i = min_labelno; i < max_labelno; i++)
603 label_map[i] = gen_label_rtx ();
604
605 /* Likewise for parm_reg_stack_slot. */
606 new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
607 for (i = 0; i < max_parm_reg; i++)
608 new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
609
610 parm_reg_stack_loc = new_parm_reg_stack_loc;
611
612 /* Record the mapping of old insns to copied insns. */
613
614 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
615 bzero ((char *) insn_map, max_uid * sizeof (rtx));
616
617 /* Get the insn which signals the end of parameter setup code. */
618 first_nonparm_insn = get_first_nonparm_insn ();
619
620 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
621 (the former occurs when a variable has its address taken)
622 since these may be shared and can be changed by virtual
623 register instantiation. DECL_RTL values for our arguments
624 have already been copied by initialize_for_inline. */
625 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
626 if (GET_CODE (regno_reg_rtx[i]) == MEM)
627 XEXP (regno_reg_rtx[i], 0)
628 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
629
630 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
631 contained in it. */
632 new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
633 bcopy ((char *) parm_reg_stack_loc, (char *) new2,
634 max_parm_reg * sizeof (rtx));
635 parm_reg_stack_loc = new2;
636 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
637 if (parm_reg_stack_loc[i])
638 parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
639
640 /* Copy the tree of subblocks of the function, and the decls in them.
641 We will use the copy for compiling this function, then restore the original
642 subblocks and decls for use when inlining this function.
643
644 Several parts of the compiler modify BLOCK trees. In particular,
645 instantiate_virtual_regs will instantiate any virtual regs
646 mentioned in the DECL_RTLs of the decls, and loop
647 unrolling will replicate any BLOCK trees inside an unrolled loop.
648
649 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
650 which we will use for inlining. The rtl might even contain pseudoregs
651 whose space has been freed. */
652
653 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
654 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
655
656 /* Now copy each DECL_RTL which is a MEM,
657 so it is safe to modify their addresses. */
658 copy_decl_rtls (DECL_INITIAL (fndecl));
659
660 /* The fndecl node acts as its own progenitor, so mark it as such. */
661 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
662
663 /* Now copy the chain of insns. Do this twice. The first copy the insn
664 itself and its body. The second time copy of REG_NOTES. This is because
665 a REG_NOTE may have a forward pointer to another insn. */
666
667 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
668 {
669 orig_asm_operands_vector = 0;
670
671 if (insn == first_nonparm_insn)
672 in_nonparm_insns = 1;
673
674 switch (GET_CODE (insn))
675 {
676 case NOTE:
677 /* No need to keep these. */
678 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
679 continue;
680
681 copy = rtx_alloc (NOTE);
682 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
683 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
684 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
685 else
686 {
687 NOTE_SOURCE_FILE (insn) = (char *) copy;
688 NOTE_SOURCE_FILE (copy) = 0;
689 }
690 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
691 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
692 {
693 int new_region = CODE_LABEL_NUMBER
694 (label_map[NOTE_BLOCK_NUMBER (copy)]);
695
696 /* we have to duplicate the handlers for the original */
697 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
698 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy), new_region,
699 save_for_inline_eh_labelmap);
700
701 /* We have to forward these both to match the new exception
702 region. */
703 NOTE_BLOCK_NUMBER (copy) = new_region;
704
705 }
706 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
707 break;
708
709 case INSN:
710 case JUMP_INSN:
711 case CALL_INSN:
712 copy = rtx_alloc (GET_CODE (insn));
713
714 if (GET_CODE (insn) == CALL_INSN)
715 CALL_INSN_FUNCTION_USAGE (copy)
716 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
717
718 PATTERN (copy) = copy_for_inline (PATTERN (insn));
719 INSN_CODE (copy) = -1;
720 LOG_LINKS (copy) = NULL_RTX;
721 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
722 break;
723
724 case CODE_LABEL:
725 copy = label_map[CODE_LABEL_NUMBER (insn)];
726 LABEL_NAME (copy) = LABEL_NAME (insn);
727 break;
728
729 case BARRIER:
730 copy = rtx_alloc (BARRIER);
731 break;
732
733 default:
734 abort ();
735 }
736 INSN_UID (copy) = INSN_UID (insn);
737 insn_map[INSN_UID (insn)] = copy;
738 NEXT_INSN (last_insn) = copy;
739 PREV_INSN (copy) = last_insn;
740 last_insn = copy;
741 }
742
743 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
744
745 /* Now copy the REG_NOTES. */
746 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
747 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
748 && insn_map[INSN_UID(insn)])
749 REG_NOTES (insn_map[INSN_UID (insn)])
750 = copy_for_inline (REG_NOTES (insn));
751
752 NEXT_INSN (last_insn) = NULL;
753
754 finish_inline (fndecl, head);
755
756 /* Make new versions of the register tables. */
757 new = (char *) savealloc (regno_pointer_flag_length);
758 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
759 new1 = (char *) savealloc (regno_pointer_flag_length);
760 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
761
762 regno_pointer_flag = new;
763 regno_pointer_align = new1;
764
765 set_new_first_and_last_insn (first_insn, last_insn);
766
767 if (label_map)
768 free (label_map);
769}
770
771/* Copy NODE (as with copy_node). NODE must be a DECL. Set the
772 DECL_ABSTRACT_ORIGIN for the new accordinly. */
773
774static tree
775copy_and_set_decl_abstract_origin (node)
776 tree node;
777{
778 tree copy = copy_node (node);
779 if (DECL_ABSTRACT_ORIGIN (copy) != NULL_TREE)
780 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
781 situation occurs if we inline a function which itself made
782 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
783 most distant ancestor, we don't have to do anything here. */
784 ;
785 else
786 /* The most distant ancestor must be NODE. */
787 DECL_ABSTRACT_ORIGIN (copy) = node;
788
789 return copy;
790}
791
792/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
793 For example, this can copy a list made of TREE_LIST nodes. While copying,
794 set DECL_ABSTRACT_ORIGIN appropriately. */
795
796static tree
797copy_decl_list (list)
798 tree list;
799{
800 tree head;
801 register tree prev, next;
802
803 if (list == 0)
804 return 0;
805
806 head = prev = copy_and_set_decl_abstract_origin (list);
807 next = TREE_CHAIN (list);
808 while (next)
809 {
810 register tree copy;
811
812 copy = copy_and_set_decl_abstract_origin (next);
813 TREE_CHAIN (prev) = copy;
814 prev = copy;
815 next = TREE_CHAIN (next);
816 }
817 return head;
818}
819
820/* Make a copy of the entire tree of blocks BLOCK, and return it. */
821
822static tree
823copy_decl_tree (block)
824 tree block;
825{
826 tree t, vars, subblocks;
827
828 vars = copy_decl_list (BLOCK_VARS (block));
829 subblocks = 0;
830
831 /* Process all subblocks. */
832 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
833 {
834 tree copy = copy_decl_tree (t);
835 TREE_CHAIN (copy) = subblocks;
836 subblocks = copy;
837 }
838
839 t = copy_node (block);
840 BLOCK_VARS (t) = vars;
841 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
842 /* If the BLOCK being cloned is already marked as having been instantiated
843 from something else, then leave that `origin' marking alone. Otherwise,
844 mark the clone as having originated from the BLOCK we are cloning. */
845 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
846 BLOCK_ABSTRACT_ORIGIN (t) = block;
847 return t;
848}
849
850/* Copy DECL_RTLs in all decls in the given BLOCK node. */
851
852static void
853copy_decl_rtls (block)
854 tree block;
855{
856 tree t;
857
858 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
859 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
860 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
861
862 /* Process all subblocks. */
863 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
864 copy_decl_rtls (t);
865}
866
867/* Make the insns and PARM_DECLs of the current function permanent
868 and record other information in DECL_SAVED_INSNS to allow inlining
869 of this function in subsequent calls.
870
871 This routine need not copy any insns because we are not going
872 to immediately compile the insns in the insn chain. There
873 are two cases when we would compile the insns for FNDECL:
874 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
875 be output at the end of other compilation, because somebody took
876 its address. In the first case, the insns of FNDECL are copied
877 as it is expanded inline, so FNDECL's saved insns are not
878 modified. In the second case, FNDECL is used for the last time,
879 so modifying the rtl is not a problem.
880
881 We don't have to worry about FNDECL being inline expanded by
882 other functions which are written at the end of compilation
883 because flag_no_inline is turned on when we begin writing
884 functions at the end of compilation. */
885
886void
887save_for_inline_nocopy (fndecl)
888 tree fndecl;
889{
890 rtx insn;
891 rtx head;
892 rtx first_nonparm_insn;
893
894 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
895 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
896 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
897 for the parms, prior to elimination of virtual registers.
898 These values are needed for substituting parms properly. */
899
900 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
901
902 /* Make and emit a return-label if we have not already done so. */
903
904 if (return_label == 0)
905 {
906 return_label = gen_label_rtx ();
907 emit_label (return_label);
908 }
909
910 head = initialize_for_inline (fndecl, get_first_label_num (),
911 max_label_num (), max_reg_num (), 0);
912
913 /* If there are insns that copy parms from the stack into pseudo registers,
914 those insns are not copied. `expand_inline_function' must
915 emit the correct code to handle such things. */
916
917 insn = get_insns ();
918 if (GET_CODE (insn) != NOTE)
919 abort ();
920
921 /* Get the insn which signals the end of parameter setup code. */
922 first_nonparm_insn = get_first_nonparm_insn ();
923
924 /* Now just scan the chain of insns to see what happens to our
925 PARM_DECLs. If a PARM_DECL is used but never modified, we
926 can substitute its rtl directly when expanding inline (and
927 perform constant folding when its incoming value is constant).
928 Otherwise, we have to copy its value into a new register and track
929 the new register's life. */
930
931 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
932 {
933 if (insn == first_nonparm_insn)
934 in_nonparm_insns = 1;
935
936 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
937 {
938 if (current_function_uses_const_pool)
939 {
940 /* Replace any constant pool references with the actual constant.
941 We will put the constant back if we need to write the
942 function out after all. */
943 save_constants (&PATTERN (insn));
944 if (REG_NOTES (insn))
945 save_constants (&REG_NOTES (insn));
946 }
947
948 /* Record what interesting things happen to our parameters. */
949 note_stores (PATTERN (insn), note_modified_parmregs);
950 }
951 }
952
953 /* Also scan all decls, and replace any constant pool references with the
954 actual constant. */
955 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
956
957 /* We have now allocated all that needs to be allocated permanently
958 on the rtx obstack. Set our high-water mark, so that we
959 can free the rest of this when the time comes. */
960
961 preserve_data ();
962
963 finish_inline (fndecl, head);
964}
965\f
966/* Given PX, a pointer into an insn, search for references to the constant
967 pool. Replace each with a CONST that has the mode of the original
968 constant, contains the constant, and has RTX_INTEGRATED_P set.
969 Similarly, constant pool addresses not enclosed in a MEM are replaced
970 with an ADDRESS and CONST rtx which also gives the constant, its
971 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
972
973static void
974save_constants (px)
975 rtx *px;
976{
977 rtx x;
978 int i, j;
979
980 again:
981 x = *px;
982
983 /* If this is a CONST_DOUBLE, don't try to fix things up in
984 CONST_DOUBLE_MEM, because this is an infinite recursion. */
985 if (GET_CODE (x) == CONST_DOUBLE)
986 return;
987 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
988 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
989 {
990 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
991 rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
992 RTX_INTEGRATED_P (new) = 1;
993
994 /* If the MEM was in a different mode than the constant (perhaps we
995 were only looking at the low-order part), surround it with a
996 SUBREG so we can save both modes. */
997
998 if (GET_MODE (x) != const_mode)
999 {
1000 new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
1001 RTX_INTEGRATED_P (new) = 1;
1002 }
1003
1004 *px = new;
1005 save_constants (&XEXP (*px, 0));
1006 }
1007 else if (GET_CODE (x) == SYMBOL_REF
1008 && CONSTANT_POOL_ADDRESS_P (x))
1009 {
1010 *px = gen_rtx_ADDRESS (GET_MODE (x),
1011 gen_rtx_CONST (get_pool_mode (x),
1012 get_pool_constant (x)));
1013 save_constants (&XEXP (*px, 0));
1014 RTX_INTEGRATED_P (*px) = 1;
1015 }
1016
1017 else
1018 {
1019 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
1020 int len = GET_RTX_LENGTH (GET_CODE (x));
1021
1022 for (i = len-1; i >= 0; i--)
1023 {
1024 switch (fmt[i])
1025 {
1026 case 'E':
1027 for (j = 0; j < XVECLEN (x, i); j++)
1028 save_constants (&XVECEXP (x, i, j));
1029 break;
1030
1031 case 'e':
1032 if (XEXP (x, i) == 0)
1033 continue;
1034 if (i == 0)
1035 {
1036 /* Hack tail-recursion here. */
1037 px = &XEXP (x, 0);
1038 goto again;
1039 }
1040 save_constants (&XEXP (x, i));
1041 break;
1042 }
1043 }
1044 }
1045}
1046\f
1047/* Note whether a parameter is modified or not. */
1048
1049static void
1050note_modified_parmregs (reg, x)
1051 rtx reg;
1052 rtx x ATTRIBUTE_UNUSED;
1053{
1054 if (GET_CODE (reg) == REG && in_nonparm_insns
1055 && REGNO (reg) < max_parm_reg
1056 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1057 && parmdecl_map[REGNO (reg)] != 0)
1058 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
1059}
1060
1061/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
1062 according to `reg_map' and `label_map'. The original rtl insns
1063 will be saved for inlining; this is used to make a copy
1064 which is used to finish compiling the inline function itself.
1065
1066 If we find a "saved" constant pool entry, one which was replaced with
1067 the value of the constant, convert it back to a constant pool entry.
1068 Since the pool wasn't touched, this should simply restore the old
1069 address.
1070
1071 All other kinds of rtx are copied except those that can never be
1072 changed during compilation. */
1073
1074static rtx
1075copy_for_inline (orig)
1076 rtx orig;
1077{
1078 register rtx x = orig;
1079 register rtx new;
1080 register int i;
1081 register enum rtx_code code;
1082 register char *format_ptr;
1083
1084 if (x == 0)
1085 return x;
1086
1087 code = GET_CODE (x);
1088
1089 /* These types may be freely shared. */
1090
1091 switch (code)
1092 {
1093 case QUEUED:
1094 case CONST_INT:
1095 case PC:
1096 case CC0:
1097 return x;
1098
1099 case SYMBOL_REF:
1100 if (! SYMBOL_REF_NEED_ADJUST (x))
1101 return x;
1102 return rethrow_symbol_map (x, save_for_inline_eh_labelmap);
1103
1104 case CONST_DOUBLE:
1105 /* We have to make a new CONST_DOUBLE to ensure that we account for
1106 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
1107 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1108 {
1109 REAL_VALUE_TYPE d;
1110
1111 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1112 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
1113 }
1114 else
1115 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1116 VOIDmode);
1117
1118 case CONST:
1119 /* Get constant pool entry for constant in the pool. */
1120 if (RTX_INTEGRATED_P (x))
1121 return validize_mem (force_const_mem (GET_MODE (x),
1122 copy_for_inline (XEXP (x, 0))));
1123 break;
1124
1125 case SUBREG:
1126 /* Get constant pool entry, but access in different mode. */
1127 if (RTX_INTEGRATED_P (x))
1128 {
1129 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1130 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1131
1132 PUT_MODE (new, GET_MODE (x));
1133 return validize_mem (new);
1134 }
1135 break;
1136
1137 case ADDRESS:
1138 /* If not special for constant pool error. Else get constant pool
1139 address. */
1140 if (! RTX_INTEGRATED_P (x))
1141 abort ();
1142
1143 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1144 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1145 new = XEXP (new, 0);
1146
1147#ifdef POINTERS_EXTEND_UNSIGNED
1148 if (GET_MODE (new) != GET_MODE (x))
1149 new = convert_memory_address (GET_MODE (x), new);
1150#endif
1151
1152 return new;
1153
1154 case ASM_OPERANDS:
1155 /* If a single asm insn contains multiple output operands
1156 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1157 We must make sure that the copied insn continues to share it. */
1158 if (orig_asm_operands_vector == XVEC (orig, 3))
1159 {
1160 x = rtx_alloc (ASM_OPERANDS);
1161 x->volatil = orig->volatil;
1162 XSTR (x, 0) = XSTR (orig, 0);
1163 XSTR (x, 1) = XSTR (orig, 1);
1164 XINT (x, 2) = XINT (orig, 2);
1165 XVEC (x, 3) = copy_asm_operands_vector;
1166 XVEC (x, 4) = copy_asm_constraints_vector;
1167 XSTR (x, 5) = XSTR (orig, 5);
1168 XINT (x, 6) = XINT (orig, 6);
1169 return x;
1170 }
1171 break;
1172
1173 case MEM:
1174 /* A MEM is usually allowed to be shared if its address is constant
1175 or is a constant plus one of the special registers.
1176
1177 We do not allow sharing of addresses that are either a special
1178 register or the sum of a constant and a special register because
1179 it is possible for unshare_all_rtl to copy the address, into memory
1180 that won't be saved. Although the MEM can safely be shared, and
1181 won't be copied there, the address itself cannot be shared, and may
1182 need to be copied.
1183
1184 There are also two exceptions with constants: The first is if the
1185 constant is a LABEL_REF or the sum of the LABEL_REF
1186 and an integer. This case can happen if we have an inline
1187 function that supplies a constant operand to the call of another
1188 inline function that uses it in a switch statement. In this case,
1189 we will be replacing the LABEL_REF, so we have to replace this MEM
1190 as well.
1191
1192 The second case is if we have a (const (plus (address ..) ...)).
1193 In that case we need to put back the address of the constant pool
1194 entry. */
1195
1196 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1197 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1198 && ! (GET_CODE (XEXP (x, 0)) == CONST
1199 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1200 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1201 == LABEL_REF)
1202 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1203 == ADDRESS)))))
1204 return x;
1205 break;
1206
1207 case LABEL_REF:
1208 /* If this is a non-local label, just make a new LABEL_REF.
1209 Otherwise, use the new label as well. */
1210 x = gen_rtx_LABEL_REF (GET_MODE (orig),
1211 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1212 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1213 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1214 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1215 return x;
1216
1217 case REG:
1218 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1219 return reg_map [REGNO (x)];
1220 else
1221 return x;
1222
1223 case SET:
1224 /* If a parm that gets modified lives in a pseudo-reg,
1225 clear its TREE_READONLY to prevent certain optimizations. */
1226 {
1227 rtx dest = SET_DEST (x);
1228
1229 while (GET_CODE (dest) == STRICT_LOW_PART
1230 || GET_CODE (dest) == ZERO_EXTRACT
1231 || GET_CODE (dest) == SUBREG)
1232 dest = XEXP (dest, 0);
1233
1234 if (GET_CODE (dest) == REG
1235 && REGNO (dest) < max_parm_reg
1236 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1237 && parmdecl_map[REGNO (dest)] != 0
1238 /* The insn to load an arg pseudo from a stack slot
1239 does not count as modifying it. */
1240 && in_nonparm_insns)
1241 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1242 }
1243 break;
1244
1245#if 0 /* This is a good idea, but here is the wrong place for it. */
1246 /* Arrange that CONST_INTs always appear as the second operand
1247 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1248 always appear as the first. */
1249 case PLUS:
1250 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1251 || (XEXP (x, 1) == frame_pointer_rtx
1252 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1253 && XEXP (x, 1) == arg_pointer_rtx)))
1254 {
1255 rtx t = XEXP (x, 0);
1256 XEXP (x, 0) = XEXP (x, 1);
1257 XEXP (x, 1) = t;
1258 }
1259 break;
1260#endif
1261 default:
1262 break;
1263 }
1264
1265 /* Replace this rtx with a copy of itself. */
1266
1267 x = rtx_alloc (code);
1268 bcopy ((char *) orig, (char *) x,
1269 (sizeof (*x) - sizeof (x->fld)
1270 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1271
1272 /* Now scan the subexpressions recursively.
1273 We can store any replaced subexpressions directly into X
1274 since we know X is not shared! Any vectors in X
1275 must be copied if X was copied. */
1276
1277 format_ptr = GET_RTX_FORMAT (code);
1278
1279 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1280 {
1281 switch (*format_ptr++)
1282 {
1283 case 'e':
1284 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1285 break;
1286
1287 case 'u':
1288 /* Change any references to old-insns to point to the
1289 corresponding copied insns. */
1290 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1291 break;
1292
1293 case 'E':
1294 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1295 {
1296 register int j;
1297
1298 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1299 for (j = 0; j < XVECLEN (x, i); j++)
1300 XVECEXP (x, i, j)
1301 = copy_for_inline (XVECEXP (x, i, j));
1302 }
1303 break;
1304 }
1305 }
1306
1307 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1308 {
1309 orig_asm_operands_vector = XVEC (orig, 3);
1310 copy_asm_operands_vector = XVEC (x, 3);
1311 copy_asm_constraints_vector = XVEC (x, 4);
1312 }
1313
1314 return x;
1315}
1316
1317/* Unfortunately, we need a global copy of const_equiv map for communication
1318 with a function called from note_stores. Be *very* careful that this
1319 is used properly in the presence of recursion. */
1320
1321varray_type global_const_equiv_varray;
1322\f
1323#define FIXED_BASE_PLUS_P(X) \
1324 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1325 && GET_CODE (XEXP (X, 0)) == REG \
1326 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1327 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1328
1329/* Called to set up a mapping for the case where a parameter is in a
1330 register. If it is read-only and our argument is a constant, set up the
1331 constant equivalence.
1332
1333 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
1334 if it is a register.
1335
1336 Also, don't allow hard registers here; they might not be valid when
1337 substituted into insns. */
1338static void
1339process_reg_param (map, loc, copy)
1340 struct inline_remap *map;
1341 rtx loc, copy;
1342{
1343 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1344 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1345 && ! REG_USERVAR_P (copy))
1346 || (GET_CODE (copy) == REG
1347 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1348 {
1349 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
1350 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1351 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1352 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
1353 copy = temp;
1354 }
1355 map->reg_map[REGNO (loc)] = copy;
1356}
1357
1358/* Used by duplicate_eh_handlers to map labels for the exception table */
1359static struct inline_remap *eif_eh_map;
1360
1361static rtx
1362expand_inline_function_eh_labelmap (label)
1363 rtx label;
1364{
1365 int index = CODE_LABEL_NUMBER (label);
1366 return get_label_from_map (eif_eh_map, index);
1367}
1368
1369/* Integrate the procedure defined by FNDECL. Note that this function
1370 may wind up calling itself. Since the static variables are not
1371 reentrant, we do not assign them until after the possibility
1372 of recursion is eliminated.
1373
1374 If IGNORE is nonzero, do not produce a value.
1375 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1376
1377 Value is:
1378 (rtx)-1 if we could not substitute the function
1379 0 if we substituted it and it does not produce a value
1380 else an rtx for where the value is stored. */
1381
1382rtx
1383expand_inline_function (fndecl, parms, target, ignore, type,
1384 structure_value_addr)
1385 tree fndecl, parms;
1386 rtx target;
1387 int ignore;
1388 tree type;
1389 rtx structure_value_addr;
1390{
1391 tree formal, actual, block;
1392 rtx header = DECL_SAVED_INSNS (fndecl);
1393 rtx insns = FIRST_FUNCTION_INSN (header);
1394 rtx parm_insns = FIRST_PARM_INSN (header);
1395 tree *arg_trees;
1396 rtx *arg_vals;
1397 rtx insn;
1398 int max_regno;
1399 register int i;
1400 int min_labelno = FIRST_LABELNO (header);
1401 int max_labelno = LAST_LABELNO (header);
1402 int nargs;
1403 rtx local_return_label = 0;
1404 rtx loc;
1405 rtx stack_save = 0;
1406 rtx temp;
1407 struct inline_remap *map = 0;
1408#ifdef HAVE_cc0
1409 rtx cc0_insn = 0;
1410#endif
1411 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1412 rtx static_chain_value = 0;
1413
1414 /* The pointer used to track the true location of the memory used
1415 for MAP->LABEL_MAP. */
1416 rtx *real_label_map = 0;
1417
1418 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1419 max_regno = MAX_REGNUM (header) + 3;
1420 if (max_regno < FIRST_PSEUDO_REGISTER)
1421 abort ();
1422
1423 nargs = list_length (DECL_ARGUMENTS (fndecl));
1424
1425 /* Check that the parms type match and that sufficient arguments were
1426 passed. Since the appropriate conversions or default promotions have
1427 already been applied, the machine modes should match exactly. */
1428
1429 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1430 formal;
1431 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1432 {
1433 tree arg;
1434 enum machine_mode mode;
1435
1436 if (actual == 0)
1437 return (rtx) (HOST_WIDE_INT) -1;
1438
1439 arg = TREE_VALUE (actual);
1440 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1441
1442 if (mode != TYPE_MODE (TREE_TYPE (arg))
1443 /* If they are block mode, the types should match exactly.
1444 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1445 which could happen if the parameter has incomplete type. */
1446 || (mode == BLKmode
1447 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1448 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1449 return (rtx) (HOST_WIDE_INT) -1;
1450 }
1451
1452 /* Extra arguments are valid, but will be ignored below, so we must
1453 evaluate them here for side-effects. */
1454 for (; actual; actual = TREE_CHAIN (actual))
1455 expand_expr (TREE_VALUE (actual), const0_rtx,
1456 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1457
1458 /* Make a binding contour to keep inline cleanups called at
1459 outer function-scope level from looking like they are shadowing
1460 parameter declarations. */
1461 pushlevel (0);
1462
1463 /* Expand the function arguments. Do this first so that any
1464 new registers get created before we allocate the maps. */
1465
1466 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1467 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1468
1469 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1470 formal;
1471 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1472 {
1473 /* Actual parameter, converted to the type of the argument within the
1474 function. */
1475 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1476 /* Mode of the variable used within the function. */
1477 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1478 int invisiref = 0;
1479
1480 arg_trees[i] = arg;
1481 loc = RTVEC_ELT (arg_vector, i);
1482
1483 /* If this is an object passed by invisible reference, we copy the
1484 object into a stack slot and save its address. If this will go
1485 into memory, we do nothing now. Otherwise, we just expand the
1486 argument. */
1487 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1488 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1489 {
1490 rtx stack_slot
1491 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1492 int_size_in_bytes (TREE_TYPE (arg)), 1);
1493 MEM_SET_IN_STRUCT_P (stack_slot,
1494 AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1495
1496 store_expr (arg, stack_slot, 0);
1497
1498 arg_vals[i] = XEXP (stack_slot, 0);
1499 invisiref = 1;
1500 }
1501 else if (GET_CODE (loc) != MEM)
1502 {
1503 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1504 /* The mode if LOC and ARG can differ if LOC was a variable
1505 that had its mode promoted via PROMOTED_MODE. */
1506 arg_vals[i] = convert_modes (GET_MODE (loc),
1507 TYPE_MODE (TREE_TYPE (arg)),
1508 expand_expr (arg, NULL_RTX, mode,
1509 EXPAND_SUM),
1510 TREE_UNSIGNED (TREE_TYPE (formal)));
1511 else
1512 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1513 }
1514 else
1515 arg_vals[i] = 0;
1516
1517 if (arg_vals[i] != 0
1518 && (! TREE_READONLY (formal)
1519 /* If the parameter is not read-only, copy our argument through
1520 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1521 TARGET in any way. In the inline function, they will likely
1522 be two different pseudos, and `safe_from_p' will make all
1523 sorts of smart assumptions about their not conflicting.
1524 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1525 wrong, so put ARG_VALS[I] into a fresh register.
1526 Don't worry about invisible references, since their stack
1527 temps will never overlap the target. */
1528 || (target != 0
1529 && ! invisiref
1530 && (GET_CODE (arg_vals[i]) == REG
1531 || GET_CODE (arg_vals[i]) == SUBREG
1532 || GET_CODE (arg_vals[i]) == MEM)
1533 && reg_overlap_mentioned_p (arg_vals[i], target))
1534 /* ??? We must always copy a SUBREG into a REG, because it might
1535 get substituted into an address, and not all ports correctly
1536 handle SUBREGs in addresses. */
1537 || (GET_CODE (arg_vals[i]) == SUBREG)))
1538 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1539
1540 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1541 && POINTER_TYPE_P (TREE_TYPE (formal)))
1542 mark_reg_pointer (arg_vals[i],
1543 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1544 / BITS_PER_UNIT));
1545 }
1546
1547 /* Allocate the structures we use to remap things. */
1548
1549 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1550 map->fndecl = fndecl;
1551
1552 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1553 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1554
1555 /* We used to use alloca here, but the size of what it would try to
1556 allocate would occasionally cause it to exceed the stack limit and
1557 cause unpredictable core dumps. */
1558 real_label_map
1559 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1560 map->label_map = real_label_map;
1561
1562 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1563 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1564 map->min_insnno = 0;
1565 map->max_insnno = INSN_UID (header);
1566
1567 map->integrating = 1;
1568
1569 /* const_equiv_varray maps pseudos in our routine to constants, so
1570 it needs to be large enough for all our pseudos. This is the
1571 number we are currently using plus the number in the called
1572 routine, plus 15 for each arg, five to compute the virtual frame
1573 pointer, and five for the return value. This should be enough
1574 for most cases. We do not reference entries outside the range of
1575 the map.
1576
1577 ??? These numbers are quite arbitrary and were obtained by
1578 experimentation. At some point, we should try to allocate the
1579 table after all the parameters are set up so we an more accurately
1580 estimate the number of pseudos we will need. */
1581
1582 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
1583 (max_reg_num ()
1584 + (max_regno - FIRST_PSEUDO_REGISTER)
1585 + 15 * nargs
1586 + 10),
1587 "expand_inline_function");
1588 map->const_age = 0;
1589
1590 /* Record the current insn in case we have to set up pointers to frame
1591 and argument memory blocks. If there are no insns yet, add a dummy
1592 insn that can be used as an insertion point. */
1593 map->insns_at_start = get_last_insn ();
1594 if (map->insns_at_start == 0)
1595 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1596
1597 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1598 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1599
1600 /* Update the outgoing argument size to allow for those in the inlined
1601 function. */
1602 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1603 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1604
1605 /* If the inline function needs to make PIC references, that means
1606 that this function's PIC offset table must be used. */
1607 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1608 current_function_uses_pic_offset_table = 1;
1609
1610 /* If this function needs a context, set it up. */
1611 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1612 static_chain_value = lookup_static_chain (fndecl);
1613
1614 /* If the inline function has these flags sets, that means that
1615 coresponding global flags should be set for this function. */
1616 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_SETJMP)
1617 current_function_calls_setjmp = 1;
1618
1619 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_LONGJMP)
1620 current_function_calls_longjmp = 1;
1621
1622 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
1623 current_function_has_nonlocal_label = 1;
1624
1625 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_CONST_POOL)
1626 current_function_uses_const_pool = 1;
1627
1628 if (GET_CODE (parm_insns) == NOTE
1629 && NOTE_LINE_NUMBER (parm_insns) > 0)
1630 {
1631 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1632 NOTE_LINE_NUMBER (parm_insns));
1633 if (note)
1634 RTX_INTEGRATED_P (note) = 1;
1635 }
1636
1637 /* Process each argument. For each, set up things so that the function's
1638 reference to the argument will refer to the argument being passed.
1639 We only replace REG with REG here. Any simplifications are done
1640 via const_equiv_map.
1641
1642 We make two passes: In the first, we deal with parameters that will
1643 be placed into registers, since we need to ensure that the allocated
1644 register number fits in const_equiv_map. Then we store all non-register
1645 parameters into their memory location. */
1646
1647 /* Don't try to free temp stack slots here, because we may put one of the
1648 parameters into a temp stack slot. */
1649
1650 for (i = 0; i < nargs; i++)
1651 {
1652 rtx copy = arg_vals[i];
1653
1654 loc = RTVEC_ELT (arg_vector, i);
1655
1656 /* There are three cases, each handled separately. */
1657 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1658 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1659 {
1660 /* This must be an object passed by invisible reference (it could
1661 also be a variable-sized object, but we forbid inlining functions
1662 with variable-sized arguments). COPY is the address of the
1663 actual value (this computation will cause it to be copied). We
1664 map that address for the register, noting the actual address as
1665 an equivalent in case it can be substituted into the insns. */
1666
1667 if (GET_CODE (copy) != REG)
1668 {
1669 temp = copy_addr_to_reg (copy);
1670 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1671 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
1672 copy = temp;
1673 }
1674 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1675 }
1676 else if (GET_CODE (loc) == MEM)
1677 {
1678 /* This is the case of a parameter that lives in memory.
1679 It will live in the block we allocate in the called routine's
1680 frame that simulates the incoming argument area. Do nothing
1681 now; we will call store_expr later. */
1682 ;
1683 }
1684 else if (GET_CODE (loc) == REG)
1685 process_reg_param (map, loc, copy);
1686 else if (GET_CODE (loc) == CONCAT)
1687 {
1688 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1689 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1690 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1691 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1692
1693 process_reg_param (map, locreal, copyreal);
1694 process_reg_param (map, locimag, copyimag);
1695 }
1696 else
1697 abort ();
1698 }
1699
1700 /* Now do the parameters that will be placed in memory. */
1701
1702 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1703 formal; formal = TREE_CHAIN (formal), i++)
1704 {
1705 loc = RTVEC_ELT (arg_vector, i);
1706
1707 if (GET_CODE (loc) == MEM
1708 /* Exclude case handled above. */
1709 && ! (GET_CODE (XEXP (loc, 0)) == REG
1710 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1711 {
1712 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1713 DECL_SOURCE_LINE (formal));
1714 if (note)
1715 RTX_INTEGRATED_P (note) = 1;
1716
1717 /* Compute the address in the area we reserved and store the
1718 value there. */
1719 temp = copy_rtx_and_substitute (loc, map);
1720 subst_constants (&temp, NULL_RTX, map);
1721 apply_change_group ();
1722 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1723 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1724 store_expr (arg_trees[i], temp, 0);
1725 }
1726 }
1727
1728 /* Deal with the places that the function puts its result.
1729 We are driven by what is placed into DECL_RESULT.
1730
1731 Initially, we assume that we don't have anything special handling for
1732 REG_FUNCTION_RETURN_VALUE_P. */
1733
1734 map->inline_target = 0;
1735 loc = DECL_RTL (DECL_RESULT (fndecl));
1736
1737 if (TYPE_MODE (type) == VOIDmode)
1738 /* There is no return value to worry about. */
1739 ;
1740 else if (GET_CODE (loc) == MEM)
1741 {
1742 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1743 {
1744 temp = copy_rtx_and_substitute (loc, map);
1745 subst_constants (&temp, NULL_RTX, map);
1746 apply_change_group ();
1747 target = temp;
1748 }
1749 else
1750 {
1751 if (! structure_value_addr
1752 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1753 abort ();
1754
1755 /* Pass the function the address in which to return a structure
1756 value. Note that a constructor can cause someone to call us
1757 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1758 via the first parameter, rather than the struct return address.
1759
1760 We have two cases: If the address is a simple register
1761 indirect, use the mapping mechanism to point that register to
1762 our structure return address. Otherwise, store the structure
1763 return value into the place that it will be referenced from. */
1764
1765 if (GET_CODE (XEXP (loc, 0)) == REG)
1766 {
1767 temp = force_operand (structure_value_addr, NULL_RTX);
1768 temp = force_reg (Pmode, temp);
1769 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1770
1771 if (CONSTANT_P (structure_value_addr)
1772 || GET_CODE (structure_value_addr) == ADDRESSOF
1773 || (GET_CODE (structure_value_addr) == PLUS
1774 && (XEXP (structure_value_addr, 0)
1775 == virtual_stack_vars_rtx)
1776 && (GET_CODE (XEXP (structure_value_addr, 1))
1777 == CONST_INT)))
1778 {
1779 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1780 CONST_AGE_PARM);
1781 }
1782 }
1783 else
1784 {
1785 temp = copy_rtx_and_substitute (loc, map);
1786 subst_constants (&temp, NULL_RTX, map);
1787 apply_change_group ();
1788 emit_move_insn (temp, structure_value_addr);
1789 }
1790 }
1791 }
1792 else if (ignore)
1793 /* We will ignore the result value, so don't look at its structure.
1794 Note that preparations for an aggregate return value
1795 do need to be made (above) even if it will be ignored. */
1796 ;
1797 else if (GET_CODE (loc) == REG)
1798 {
1799 /* The function returns an object in a register and we use the return
1800 value. Set up our target for remapping. */
1801
1802 /* Machine mode function was declared to return. */
1803 enum machine_mode departing_mode = TYPE_MODE (type);
1804 /* (Possibly wider) machine mode it actually computes
1805 (for the sake of callers that fail to declare it right).
1806 We have to use the mode of the result's RTL, rather than
1807 its type, since expand_function_start may have promoted it. */
1808 enum machine_mode arriving_mode
1809 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1810 rtx reg_to_map;
1811
1812 /* Don't use MEMs as direct targets because on some machines
1813 substituting a MEM for a REG makes invalid insns.
1814 Let the combiner substitute the MEM if that is valid. */
1815 if (target == 0 || GET_CODE (target) != REG
1816 || GET_MODE (target) != departing_mode)
1817 {
1818 /* Don't make BLKmode registers. If this looks like
1819 a BLKmode object being returned in a register, get
1820 the mode from that, otherwise abort. */
1821 if (departing_mode == BLKmode)
1822 {
1823 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1824 {
1825 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1826 arriving_mode = departing_mode;
1827 }
1828 else
1829 abort();
1830 }
1831
1832 target = gen_reg_rtx (departing_mode);
1833 }
1834
1835 /* If function's value was promoted before return,
1836 avoid machine mode mismatch when we substitute INLINE_TARGET.
1837 But TARGET is what we will return to the caller. */
1838 if (arriving_mode != departing_mode)
1839 {
1840 /* Avoid creating a paradoxical subreg wider than
1841 BITS_PER_WORD, since that is illegal. */
1842 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1843 {
1844 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1845 GET_MODE_BITSIZE (arriving_mode)))
1846 /* Maybe could be handled by using convert_move () ? */
1847 abort ();
1848 reg_to_map = gen_reg_rtx (arriving_mode);
1849 target = gen_lowpart (departing_mode, reg_to_map);
1850 }
1851 else
1852 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1853 }
1854 else
1855 reg_to_map = target;
1856
1857 /* Usually, the result value is the machine's return register.
1858 Sometimes it may be a pseudo. Handle both cases. */
1859 if (REG_FUNCTION_VALUE_P (loc))
1860 map->inline_target = reg_to_map;
1861 else
1862 map->reg_map[REGNO (loc)] = reg_to_map;
1863 }
1864 else
1865 abort ();
1866
1867 /* Make a fresh binding contour that we can easily remove. Do this after
1868 expanding our arguments so cleanups are properly scoped. */
1869 pushlevel (0);
1870 expand_start_bindings (0);
1871
1872 /* Initialize label_map. get_label_from_map will actually make
1873 the labels. */
1874 bzero ((char *) &map->label_map [min_labelno],
1875 (max_labelno - min_labelno) * sizeof (rtx));
1876
1877 /* Perform postincrements before actually calling the function. */
1878 emit_queue ();
1879
1880 /* Clean up stack so that variables might have smaller offsets. */
1881 do_pending_stack_adjust ();
1882
1883 /* Save a copy of the location of const_equiv_varray for
1884 mark_stores, called via note_stores. */
1885 global_const_equiv_varray = map->const_equiv_varray;
1886
1887 /* If the called function does an alloca, save and restore the
1888 stack pointer around the call. This saves stack space, but
1889 also is required if this inline is being done between two
1890 pushes. */
1891 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1892 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1893
1894 /* Now copy the insns one by one. Do this in two passes, first the insns and
1895 then their REG_NOTES, just like save_for_inline. */
1896
1897 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1898
1899 for (insn = insns; insn; insn = NEXT_INSN (insn))
1900 {
1901 rtx copy, pattern, set;
1902
1903 map->orig_asm_operands_vector = 0;
1904
1905 switch (GET_CODE (insn))
1906 {
1907 case INSN:
1908 pattern = PATTERN (insn);
1909 set = single_set (insn);
1910 copy = 0;
1911 if (GET_CODE (pattern) == USE
1912 && GET_CODE (XEXP (pattern, 0)) == REG
1913 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1914 /* The (USE (REG n)) at return from the function should
1915 be ignored since we are changing (REG n) into
1916 inline_target. */
1917 break;
1918
1919 /* If the inline fn needs eh context, make sure that
1920 the current fn has one. */
1921 if (GET_CODE (pattern) == USE
1922 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1923 get_eh_context ();
1924
1925 /* Ignore setting a function value that we don't want to use. */
1926 if (map->inline_target == 0
1927 && set != 0
1928 && GET_CODE (SET_DEST (set)) == REG
1929 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1930 {
1931 if (volatile_refs_p (SET_SRC (set)))
1932 {
1933 rtx new_set;
1934
1935 /* If we must not delete the source,
1936 load it into a new temporary. */
1937 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1938
1939 new_set = single_set (copy);
1940 if (new_set == 0)
1941 abort ();
1942
1943 SET_DEST (new_set)
1944 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1945 }
1946 /* If the source and destination are the same and it
1947 has a note on it, keep the insn. */
1948 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1949 && REG_NOTES (insn) != 0)
1950 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1951 else
1952 break;
1953 }
1954
1955 /* If this is setting the static chain rtx, omit it. */
1956 else if (static_chain_value != 0
1957 && set != 0
1958 && GET_CODE (SET_DEST (set)) == REG
1959 && rtx_equal_p (SET_DEST (set),
1960 static_chain_incoming_rtx))
1961 break;
1962
1963 /* If this is setting the static chain pseudo, set it from
1964 the value we want to give it instead. */
1965 else if (static_chain_value != 0
1966 && set != 0
1967 && rtx_equal_p (SET_SRC (set),
1968 static_chain_incoming_rtx))
1969 {
1970 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1971
1972 copy = emit_move_insn (newdest, static_chain_value);
1973 static_chain_value = 0;
1974 }
1975 else
1976 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1977 /* REG_NOTES will be copied later. */
1978
1979#ifdef HAVE_cc0
1980 /* If this insn is setting CC0, it may need to look at
1981 the insn that uses CC0 to see what type of insn it is.
1982 In that case, the call to recog via validate_change will
1983 fail. So don't substitute constants here. Instead,
1984 do it when we emit the following insn.
1985
1986 For example, see the pyr.md file. That machine has signed and
1987 unsigned compares. The compare patterns must check the
1988 following branch insn to see which what kind of compare to
1989 emit.
1990
1991 If the previous insn set CC0, substitute constants on it as
1992 well. */
1993 if (sets_cc0_p (PATTERN (copy)) != 0)
1994 cc0_insn = copy;
1995 else
1996 {
1997 if (cc0_insn)
1998 try_constants (cc0_insn, map);
1999 cc0_insn = 0;
2000 try_constants (copy, map);
2001 }
2002#else
2003 try_constants (copy, map);
2004#endif
2005 break;
2006
2007 case JUMP_INSN:
2008 if (GET_CODE (PATTERN (insn)) == RETURN
2009 || (GET_CODE (PATTERN (insn)) == PARALLEL
2010 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
2011 {
2012 if (local_return_label == 0)
2013 local_return_label = gen_label_rtx ();
2014 pattern = gen_jump (local_return_label);
2015 }
2016 else
2017 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
2018
2019 copy = emit_jump_insn (pattern);
2020
2021#ifdef HAVE_cc0
2022 if (cc0_insn)
2023 try_constants (cc0_insn, map);
2024 cc0_insn = 0;
2025#endif
2026 try_constants (copy, map);
2027
2028 /* If this used to be a conditional jump insn but whose branch
2029 direction is now know, we must do something special. */
2030 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
2031 {
2032#ifdef HAVE_cc0
2033 /* The previous insn set cc0 for us. So delete it. */
2034 delete_insn (PREV_INSN (copy));
2035#endif
2036
2037 /* If this is now a no-op, delete it. */
2038 if (map->last_pc_value == pc_rtx)
2039 {
2040 delete_insn (copy);
2041 copy = 0;
2042 }
2043 else
2044 /* Otherwise, this is unconditional jump so we must put a
2045 BARRIER after it. We could do some dead code elimination
2046 here, but jump.c will do it just as well. */
2047 emit_barrier ();
2048 }
2049 break;
2050
2051 case CALL_INSN:
2052 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
2053 copy = emit_call_insn (pattern);
2054
2055 /* Because the USAGE information potentially contains objects other
2056 than hard registers, we need to copy it. */
2057 CALL_INSN_FUNCTION_USAGE (copy)
2058 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
2059
2060#ifdef HAVE_cc0
2061 if (cc0_insn)
2062 try_constants (cc0_insn, map);
2063 cc0_insn = 0;
2064#endif
2065 try_constants (copy, map);
2066
2067 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
2068 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2069 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
2070 break;
2071
2072 case CODE_LABEL:
2073 copy = emit_label (get_label_from_map (map,
2074 CODE_LABEL_NUMBER (insn)));
2075 LABEL_NAME (copy) = LABEL_NAME (insn);
2076 map->const_age++;
2077 break;
2078
2079 case BARRIER:
2080 copy = emit_barrier ();
2081 break;
2082
2083 case NOTE:
2084 /* It is important to discard function-end and function-beg notes,
2085 so we have only one of each in the current function.
2086 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2087 deleted these in the copy used for continuing compilation,
2088 not the copy used for inlining). */
2089 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
2090 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
2091 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
2092 {
2093 copy = emit_note (NOTE_SOURCE_FILE (insn),
2094 NOTE_LINE_NUMBER (insn));
2095 if (copy
2096 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
2097 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
2098 {
2099 rtx label
2100 = get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
2101
2102 /* we have to duplicate the handlers for the original */
2103 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
2104 {
2105 /* We need to duplicate the handlers for the EH region
2106 and we need to indicate where the label map is */
2107 eif_eh_map = map;
2108 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy),
2109 CODE_LABEL_NUMBER (label),
2110 expand_inline_function_eh_labelmap);
2111 }
2112
2113 /* We have to forward these both to match the new exception
2114 region. */
2115 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
2116 }
2117 }
2118 else
2119 copy = 0;
2120 break;
2121
2122 default:
2123 abort ();
2124 break;
2125 }
2126
2127 if (copy)
2128 RTX_INTEGRATED_P (copy) = 1;
2129
2130 map->insn_map[INSN_UID (insn)] = copy;
2131 }
2132
2133 /* Now copy the REG_NOTES. Increment const_age, so that only constants
2134 from parameters can be substituted in. These are the only ones that
2135 are valid across the entire function. */
2136 map->const_age++;
2137 for (insn = insns; insn; insn = NEXT_INSN (insn))
2138 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2139 && map->insn_map[INSN_UID (insn)]
2140 && REG_NOTES (insn))
2141 {
2142 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2143 /* We must also do subst_constants, in case one of our parameters
2144 has const type and constant value. */
2145 subst_constants (&tem, NULL_RTX, map);
2146 apply_change_group ();
2147 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2148 }
2149
2150 if (local_return_label)
2151 emit_label (local_return_label);
2152
2153 /* Restore the stack pointer if we saved it above. */
2154 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2155 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2156
2157 /* Make copies of the decls of the symbols in the inline function, so that
2158 the copies of the variables get declared in the current function. Set
2159 up things so that lookup_static_chain knows that to interpret registers
2160 in SAVE_EXPRs for TYPE_SIZEs as local. */
2161
2162 inline_function_decl = fndecl;
2163 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2164 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2165 inline_function_decl = 0;
2166
2167 /* End the scope containing the copied formal parameter variables
2168 and copied LABEL_DECLs. */
2169
2170 expand_end_bindings (getdecls (), 1, 1);
2171 block = poplevel (1, 1, 0);
2172 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2173 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2174 poplevel (0, 0, 0);
2175
2176 /* Must mark the line number note after inlined functions as a repeat, so
2177 that the test coverage code can avoid counting the call twice. This
2178 just tells the code to ignore the immediately following line note, since
2179 there already exists a copy of this note before the expanded inline call.
2180 This line number note is still needed for debugging though, so we can't
2181 delete it. */
2182 if (flag_test_coverage)
2183 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2184
2185 emit_line_note (input_filename, lineno);
2186
2187 /* If the function returns a BLKmode object in a register, copy it
2188 out of the temp register into a BLKmode memory object. */
2189 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
2190 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
2191 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
2192
2193 if (structure_value_addr)
2194 {
2195 target = gen_rtx_MEM (TYPE_MODE (type),
2196 memory_address (TYPE_MODE (type),
2197 structure_value_addr));
2198 MEM_SET_IN_STRUCT_P (target, 1);
2199 }
2200
2201 /* Make sure we free the things we explicitly allocated with xmalloc. */
2202 if (real_label_map)
2203 free (real_label_map);
2204 if (map)
2205 VARRAY_FREE (map->const_equiv_varray);
2206
2207 return target;
2208}
2209\f
2210/* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2211 push all of those decls and give each one the corresponding home. */
2212
2213static void
2214integrate_parm_decls (args, map, arg_vector)
2215 tree args;
2216 struct inline_remap *map;
2217 rtvec arg_vector;
2218{
2219 register tree tail;
2220 register int i;
2221
2222 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2223 {
2224 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2225 TREE_TYPE (tail));
2226 rtx new_decl_rtl
2227 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2228
2229 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2230 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2231 here, but that's going to require some more work. */
2232 /* DECL_INCOMING_RTL (decl) = ?; */
2233 /* These args would always appear unused, if not for this. */
2234 TREE_USED (decl) = 1;
2235 /* Prevent warning for shadowing with these. */
2236 DECL_ABSTRACT_ORIGIN (decl) = DECL_ORIGIN (tail);
2237 pushdecl (decl);
2238 /* Fully instantiate the address with the equivalent form so that the
2239 debugging information contains the actual register, instead of the
2240 virtual register. Do this by not passing an insn to
2241 subst_constants. */
2242 subst_constants (&new_decl_rtl, NULL_RTX, map);
2243 apply_change_group ();
2244 DECL_RTL (decl) = new_decl_rtl;
2245 }
2246}
2247
2248/* Given a BLOCK node LET, push decls and levels so as to construct in the
2249 current function a tree of contexts isomorphic to the one that is given.
2250
2251 LEVEL indicates how far down into the BLOCK tree is the node we are
2252 currently traversing. It is always zero except for recursive calls.
2253
2254 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2255 registers used in the DECL_RTL field should be remapped. If it is zero,
2256 no mapping is necessary. */
2257
2258static void
2259integrate_decl_tree (let, level, map)
2260 tree let;
2261 int level;
2262 struct inline_remap *map;
2263{
2264 tree t, node;
2265
2266 if (level > 0)
2267 pushlevel (0);
2268
2269 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2270 {
2271 tree d;
2272
2273 push_obstacks_nochange ();
2274 saveable_allocation ();
2275 d = copy_and_set_decl_abstract_origin (t);
2276 pop_obstacks ();
2277
2278 if (DECL_RTL (t) != 0)
2279 {
2280 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2281 /* Fully instantiate the address with the equivalent form so that the
2282 debugging information contains the actual register, instead of the
2283 virtual register. Do this by not passing an insn to
2284 subst_constants. */
2285 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2286 apply_change_group ();
2287 }
2288 /* These args would always appear unused, if not for this. */
2289 TREE_USED (d) = 1;
2290
2291 if (DECL_LANG_SPECIFIC (d))
2292 copy_lang_decl (d);
2293
2294 pushdecl (d);
2295 }
2296
2297 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2298 integrate_decl_tree (t, level + 1, map);
2299
2300 if (level > 0)
2301 {
2302 node = poplevel (1, 0, 0);
2303 if (node)
2304 {
2305 TREE_USED (node) = TREE_USED (let);
2306 BLOCK_ABSTRACT_ORIGIN (node) = let;
2307 }
2308 }
2309}
2310
2311/* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2312 through save_constants. */
2313
2314static void
2315save_constants_in_decl_trees (let)
2316 tree let;
2317{
2318 tree t;
2319
2320 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2321 if (DECL_RTL (t) != 0)
2322 save_constants (&DECL_RTL (t));
2323
2324 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2325 save_constants_in_decl_trees (t);
2326}
2327\f
2328/* Create a new copy of an rtx.
2329 Recursively copies the operands of the rtx,
2330 except for those few rtx codes that are sharable.
2331
2332 We always return an rtx that is similar to that incoming rtx, with the
2333 exception of possibly changing a REG to a SUBREG or vice versa. No
2334 rtl is ever emitted.
2335
2336 Handle constants that need to be placed in the constant pool by
2337 calling `force_const_mem'. */
2338
2339rtx
2340copy_rtx_and_substitute (orig, map)
2341 register rtx orig;
2342 struct inline_remap *map;
2343{
2344 register rtx copy, temp;
2345 register int i, j;
2346 register RTX_CODE code;
2347 register enum machine_mode mode;
2348 register char *format_ptr;
2349 int regno;
2350
2351 if (orig == 0)
2352 return 0;
2353
2354 code = GET_CODE (orig);
2355 mode = GET_MODE (orig);
2356
2357 switch (code)
2358 {
2359 case REG:
2360 /* If the stack pointer register shows up, it must be part of
2361 stack-adjustments (*not* because we eliminated the frame pointer!).
2362 Small hard registers are returned as-is. Pseudo-registers
2363 go through their `reg_map'. */
2364 regno = REGNO (orig);
2365 if (regno <= LAST_VIRTUAL_REGISTER)
2366 {
2367 /* Some hard registers are also mapped,
2368 but others are not translated. */
2369 if (map->reg_map[regno] != 0)
2370 return map->reg_map[regno];
2371
2372 /* If this is the virtual frame pointer, make space in current
2373 function's stack frame for the stack frame of the inline function.
2374
2375 Copy the address of this area into a pseudo. Map
2376 virtual_stack_vars_rtx to this pseudo and set up a constant
2377 equivalence for it to be the address. This will substitute the
2378 address into insns where it can be substituted and use the new
2379 pseudo where it can't. */
2380 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2381 {
2382 rtx loc, seq;
2383 int size = DECL_FRAME_SIZE (map->fndecl);
2384
2385#ifdef FRAME_GROWS_DOWNWARD
2386 /* In this case, virtual_stack_vars_rtx points to one byte
2387 higher than the top of the frame area. So make sure we
2388 allocate a big enough chunk to keep the frame pointer
2389 aligned like a real one. */
2390 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2391#endif
2392 start_sequence ();
2393 loc = assign_stack_temp (BLKmode, size, 1);
2394 loc = XEXP (loc, 0);
2395#ifdef FRAME_GROWS_DOWNWARD
2396 /* In this case, virtual_stack_vars_rtx points to one byte
2397 higher than the top of the frame area. So compute the offset
2398 to one byte higher than our substitute frame. */
2399 loc = plus_constant (loc, size);
2400#endif
2401 map->reg_map[regno] = temp
2402 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2403
2404#ifdef STACK_BOUNDARY
2405 mark_reg_pointer (map->reg_map[regno],
2406 STACK_BOUNDARY / BITS_PER_UNIT);
2407#endif
2408
2409 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
2410
2411 seq = gen_sequence ();
2412 end_sequence ();
2413 emit_insn_after (seq, map->insns_at_start);
2414 return temp;
2415 }
2416 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2417 {
2418 /* Do the same for a block to contain any arguments referenced
2419 in memory. */
2420 rtx loc, seq;
2421 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2422
2423 start_sequence ();
2424 loc = assign_stack_temp (BLKmode, size, 1);
2425 loc = XEXP (loc, 0);
2426 /* When arguments grow downward, the virtual incoming
2427 args pointer points to the top of the argument block,
2428 so the remapped location better do the same. */
2429#ifdef ARGS_GROW_DOWNWARD
2430 loc = plus_constant (loc, size);
2431#endif
2432 map->reg_map[regno] = temp
2433 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2434
2435#ifdef STACK_BOUNDARY
2436 mark_reg_pointer (map->reg_map[regno],
2437 STACK_BOUNDARY / BITS_PER_UNIT);
2438#endif
2439
2440 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
2441
2442 seq = gen_sequence ();
2443 end_sequence ();
2444 emit_insn_after (seq, map->insns_at_start);
2445 return temp;
2446 }
2447 else if (REG_FUNCTION_VALUE_P (orig))
2448 {
2449 /* This is a reference to the function return value. If
2450 the function doesn't have a return value, error. If the
2451 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
2452 if (map->inline_target == 0)
2453 /* Must be unrolling loops or replicating code if we
2454 reach here, so return the register unchanged. */
2455 return orig;
2456 else if (GET_MODE (map->inline_target) != BLKmode
2457 && mode != GET_MODE (map->inline_target))
2458 return gen_lowpart (mode, map->inline_target);
2459 else
2460 return map->inline_target;
2461 }
2462 return orig;
2463 }
2464 if (map->reg_map[regno] == NULL)
2465 {
2466 map->reg_map[regno] = gen_reg_rtx (mode);
2467 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2468 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2469 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2470 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2471
2472 if (map->regno_pointer_flag[regno])
2473 mark_reg_pointer (map->reg_map[regno],
2474 map->regno_pointer_align[regno]);
2475 }
2476 return map->reg_map[regno];
2477
2478 case SUBREG:
2479 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2480 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2481 if (GET_CODE (copy) == SUBREG)
2482 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
2483 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2484 else if (GET_CODE (copy) == CONCAT)
2485 {
2486 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
2487
2488 if (GET_MODE (retval) == GET_MODE (orig))
2489 return retval;
2490 else
2491 return gen_rtx_SUBREG (GET_MODE (orig), retval,
2492 (SUBREG_WORD (orig) %
2493 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
2494 / (unsigned) UNITS_PER_WORD)));
2495 }
2496 else
2497 return gen_rtx_SUBREG (GET_MODE (orig), copy,
2498 SUBREG_WORD (orig));
2499
2500 case ADDRESSOF:
2501 copy = gen_rtx_ADDRESSOF (mode,
2502 copy_rtx_and_substitute (XEXP (orig, 0), map), 0);
2503 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2504 regno = ADDRESSOF_REGNO (orig);
2505 if (map->reg_map[regno])
2506 regno = REGNO (map->reg_map[regno]);
2507 else if (regno > LAST_VIRTUAL_REGISTER)
2508 {
2509 temp = XEXP (orig, 0);
2510 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2511 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2512 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2513 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2514 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2515
2516 if (map->regno_pointer_flag[regno])
2517 mark_reg_pointer (map->reg_map[regno],
2518 map->regno_pointer_align[regno]);
2519 regno = REGNO (map->reg_map[regno]);
2520 }
2521 ADDRESSOF_REGNO (copy) = regno;
2522 return copy;
2523
2524 case USE:
2525 case CLOBBER:
2526 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2527 to (use foo) if the original insn didn't have a subreg.
2528 Removing the subreg distorts the VAX movstrhi pattern
2529 by changing the mode of an operand. */
2530 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2531 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2532 copy = SUBREG_REG (copy);
2533 return gen_rtx_fmt_e (code, VOIDmode, copy);
2534
2535 case CODE_LABEL:
2536 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2537 = LABEL_PRESERVE_P (orig);
2538 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2539
2540 case LABEL_REF:
2541 copy = gen_rtx_LABEL_REF (mode,
2542 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2543 : get_label_from_map (map,
2544 CODE_LABEL_NUMBER (XEXP (orig, 0))));
2545 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2546
2547 /* The fact that this label was previously nonlocal does not mean
2548 it still is, so we must check if it is within the range of
2549 this function's labels. */
2550 LABEL_REF_NONLOCAL_P (copy)
2551 = (LABEL_REF_NONLOCAL_P (orig)
2552 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2553 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2554
2555 /* If we have made a nonlocal label local, it means that this
2556 inlined call will be referring to our nonlocal goto handler.
2557 So make sure we create one for this block; we normally would
2558 not since this is not otherwise considered a "call". */
2559 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2560 function_call_count++;
2561
2562 return copy;
2563
2564 case PC:
2565 case CC0:
2566 case CONST_INT:
2567 return orig;
2568
2569 case SYMBOL_REF:
2570 /* Symbols which represent the address of a label stored in the constant
2571 pool must be modified to point to a constant pool entry for the
2572 remapped label. Otherwise, symbols are returned unchanged. */
2573 if (CONSTANT_POOL_ADDRESS_P (orig))
2574 {
2575 rtx constant = get_pool_constant (orig);
2576 if (GET_CODE (constant) == LABEL_REF)
2577 return XEXP (force_const_mem (GET_MODE (orig),
2578 copy_rtx_and_substitute (constant,
2579 map)),
2580 0);
2581 }
2582 else
2583 if (SYMBOL_REF_NEED_ADJUST (orig))
2584 {
2585 eif_eh_map = map;
2586 return rethrow_symbol_map (orig,
2587 expand_inline_function_eh_labelmap);
2588 }
2589
2590 return orig;
2591
2592 case CONST_DOUBLE:
2593 /* We have to make a new copy of this CONST_DOUBLE because don't want
2594 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2595 duplicate of a CONST_DOUBLE we have already seen. */
2596 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2597 {
2598 REAL_VALUE_TYPE d;
2599
2600 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2601 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2602 }
2603 else
2604 return immed_double_const (CONST_DOUBLE_LOW (orig),
2605 CONST_DOUBLE_HIGH (orig), VOIDmode);
2606
2607 case CONST:
2608 /* Make new constant pool entry for a constant
2609 that was in the pool of the inline function. */
2610 if (RTX_INTEGRATED_P (orig))
2611 {
2612 /* If this was an address of a constant pool entry that itself
2613 had to be placed in the constant pool, it might not be a
2614 valid address. So the recursive call below might turn it
2615 into a register. In that case, it isn't a constant any
2616 more, so return it. This has the potential of changing a
2617 MEM into a REG, but we'll assume that it safe. */
2618 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2619 if (! CONSTANT_P (temp))
2620 return temp;
2621 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2622 }
2623 break;
2624
2625 case ADDRESS:
2626 /* If from constant pool address, make new constant pool entry and
2627 return its address. */
2628 if (! RTX_INTEGRATED_P (orig))
2629 abort ();
2630
2631 temp
2632 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2633 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2634 map));
2635
2636#if 0
2637 /* Legitimizing the address here is incorrect.
2638
2639 The only ADDRESS rtx's that can reach here are ones created by
2640 save_constants. Hence the operand of the ADDRESS is always valid
2641 in this position of the instruction, since the original rtx without
2642 the ADDRESS was valid.
2643
2644 The reason we don't legitimize the address here is that on the
2645 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2646 This code forces the operand of the address to a register, which
2647 fails because we can not take the HIGH part of a register.
2648
2649 Also, change_address may create new registers. These registers
2650 will not have valid reg_map entries. This can cause try_constants()
2651 to fail because assumes that all registers in the rtx have valid
2652 reg_map entries, and it may end up replacing one of these new
2653 registers with junk. */
2654
2655 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2656 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2657#endif
2658
2659 temp = XEXP (temp, 0);
2660
2661#ifdef POINTERS_EXTEND_UNSIGNED
2662 if (GET_MODE (temp) != GET_MODE (orig))
2663 temp = convert_memory_address (GET_MODE (orig), temp);
2664#endif
2665
2666 return temp;
2667
2668 case ASM_OPERANDS:
2669 /* If a single asm insn contains multiple output operands
2670 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2671 We must make sure that the copied insn continues to share it. */
2672 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2673 {
2674 copy = rtx_alloc (ASM_OPERANDS);
2675 copy->volatil = orig->volatil;
2676 XSTR (copy, 0) = XSTR (orig, 0);
2677 XSTR (copy, 1) = XSTR (orig, 1);
2678 XINT (copy, 2) = XINT (orig, 2);
2679 XVEC (copy, 3) = map->copy_asm_operands_vector;
2680 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2681 XSTR (copy, 5) = XSTR (orig, 5);
2682 XINT (copy, 6) = XINT (orig, 6);
2683 return copy;
2684 }
2685 break;
2686
2687 case CALL:
2688 /* This is given special treatment because the first
2689 operand of a CALL is a (MEM ...) which may get
2690 forced into a register for cse. This is undesirable
2691 if function-address cse isn't wanted or if we won't do cse. */
2692#ifndef NO_FUNCTION_CSE
2693 if (! (optimize && ! flag_no_function_cse))
2694#endif
2695 return gen_rtx_CALL (GET_MODE (orig),
2696 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2697 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2698 copy_rtx_and_substitute (XEXP (orig, 1), map));
2699 break;
2700
2701#if 0
2702 /* Must be ifdefed out for loop unrolling to work. */
2703 case RETURN:
2704 abort ();
2705#endif
2706
2707 case SET:
2708 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2709 Adjust the setting by the offset of the area we made.
2710 If the nonlocal goto is into the current function,
2711 this will result in unnecessarily bad code, but should work. */
2712 if (SET_DEST (orig) == virtual_stack_vars_rtx
2713 || SET_DEST (orig) == virtual_incoming_args_rtx)
2714 {
2715 /* In case a translation hasn't occurred already, make one now. */
2716 rtx equiv_reg;
2717 rtx equiv_loc;
2718 HOST_WIDE_INT loc_offset;
2719
2720 copy_rtx_and_substitute (SET_DEST (orig), map);
2721 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2722 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray, REGNO (equiv_reg)).rtx;
2723 loc_offset
2724 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2725 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2726 force_operand
2727 (plus_constant
2728 (copy_rtx_and_substitute (SET_SRC (orig), map),
2729 - loc_offset),
2730 NULL_RTX));
2731 }
2732 break;
2733
2734 case MEM:
2735 copy = rtx_alloc (MEM);
2736 PUT_MODE (copy, mode);
2737 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2738 MEM_COPY_ATTRIBUTES (copy, orig);
2739 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
2740
2741 /* If doing function inlining, this MEM might not be const in the
2742 function that it is being inlined into, and thus may not be
2743 unchanging after function inlining. Constant pool references are
2744 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2745 for them. */
2746 if (! map->integrating)
2747 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2748
2749 return copy;
2750
2751 default:
2752 break;
2753 }
2754
2755 copy = rtx_alloc (code);
2756 PUT_MODE (copy, mode);
2757 copy->in_struct = orig->in_struct;
2758 copy->volatil = orig->volatil;
2759 copy->unchanging = orig->unchanging;
2760
2761 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2762
2763 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2764 {
2765 switch (*format_ptr++)
2766 {
2767 case '0':
2768 XEXP (copy, i) = XEXP (orig, i);
2769 break;
2770
2771 case 'e':
2772 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2773 break;
2774
2775 case 'u':
2776 /* Change any references to old-insns to point to the
2777 corresponding copied insns. */
2778 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2779 break;
2780
2781 case 'E':
2782 XVEC (copy, i) = XVEC (orig, i);
2783 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2784 {
2785 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2786 for (j = 0; j < XVECLEN (copy, i); j++)
2787 XVECEXP (copy, i, j)
2788 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2789 }
2790 break;
2791
2792 case 'w':
2793 XWINT (copy, i) = XWINT (orig, i);
2794 break;
2795
2796 case 'i':
2797 XINT (copy, i) = XINT (orig, i);
2798 break;
2799
2800 case 's':
2801 XSTR (copy, i) = XSTR (orig, i);
2802 break;
2803
2804 default:
2805 abort ();
2806 }
2807 }
2808
2809 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2810 {
2811 map->orig_asm_operands_vector = XVEC (orig, 3);
2812 map->copy_asm_operands_vector = XVEC (copy, 3);
2813 map->copy_asm_constraints_vector = XVEC (copy, 4);
2814 }
2815
2816 return copy;
2817}
2818\f
2819/* Substitute known constant values into INSN, if that is valid. */
2820
2821void
2822try_constants (insn, map)
2823 rtx insn;
2824 struct inline_remap *map;
2825{
2826 int i;
2827
2828 map->num_sets = 0;
2829 subst_constants (&PATTERN (insn), insn, map);
2830
2831 /* Apply the changes if they are valid; otherwise discard them. */
2832 apply_change_group ();
2833
2834 /* Show we don't know the value of anything stored or clobbered. */
2835 note_stores (PATTERN (insn), mark_stores);
2836 map->last_pc_value = 0;
2837#ifdef HAVE_cc0
2838 map->last_cc0_value = 0;
2839#endif
2840
2841 /* Set up any constant equivalences made in this insn. */
2842 for (i = 0; i < map->num_sets; i++)
2843 {
2844 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2845 {
2846 int regno = REGNO (map->equiv_sets[i].dest);
2847
2848 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2849 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2850 /* Following clause is a hack to make case work where GNU C++
2851 reassigns a variable to make cse work right. */
2852 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2853 regno).rtx,
2854 map->equiv_sets[i].equiv))
2855 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2856 map->equiv_sets[i].equiv, map->const_age);
2857 }
2858 else if (map->equiv_sets[i].dest == pc_rtx)
2859 map->last_pc_value = map->equiv_sets[i].equiv;
2860#ifdef HAVE_cc0
2861 else if (map->equiv_sets[i].dest == cc0_rtx)
2862 map->last_cc0_value = map->equiv_sets[i].equiv;
2863#endif
2864 }
2865}
2866\f
2867/* Substitute known constants for pseudo regs in the contents of LOC,
2868 which are part of INSN.
2869 If INSN is zero, the substitution should always be done (this is used to
2870 update DECL_RTL).
2871 These changes are taken out by try_constants if the result is not valid.
2872
2873 Note that we are more concerned with determining when the result of a SET
2874 is a constant, for further propagation, than actually inserting constants
2875 into insns; cse will do the latter task better.
2876
2877 This function is also used to adjust address of items previously addressed
2878 via the virtual stack variable or virtual incoming arguments registers. */
2879
2880static void
2881subst_constants (loc, insn, map)
2882 rtx *loc;
2883 rtx insn;
2884 struct inline_remap *map;
2885{
2886 rtx x = *loc;
2887 register int i;
2888 register enum rtx_code code;
2889 register char *format_ptr;
2890 int num_changes = num_validated_changes ();
2891 rtx new = 0;
2892 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2893
2894 code = GET_CODE (x);
2895
2896 switch (code)
2897 {
2898 case PC:
2899 case CONST_INT:
2900 case CONST_DOUBLE:
2901 case SYMBOL_REF:
2902 case CONST:
2903 case LABEL_REF:
2904 case ADDRESS:
2905 return;
2906
2907#ifdef HAVE_cc0
2908 case CC0:
2909 validate_change (insn, loc, map->last_cc0_value, 1);
2910 return;
2911#endif
2912
2913 case USE:
2914 case CLOBBER:
2915 /* The only thing we can do with a USE or CLOBBER is possibly do
2916 some substitutions in a MEM within it. */
2917 if (GET_CODE (XEXP (x, 0)) == MEM)
2918 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2919 return;
2920
2921 case REG:
2922 /* Substitute for parms and known constants. Don't replace
2923 hard regs used as user variables with constants. */
2924 {
2925 int regno = REGNO (x);
2926 struct const_equiv_data *p;
2927
2928 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2929 && regno < VARRAY_SIZE (map->const_equiv_varray)
2930 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2931 p->rtx != 0)
2932 && p->age >= map->const_age)
2933 validate_change (insn, loc, p->rtx, 1);
2934 return;
2935 }
2936
2937 case SUBREG:
2938 /* SUBREG applied to something other than a reg
2939 should be treated as ordinary, since that must
2940 be a special hack and we don't know how to treat it specially.
2941 Consider for example mulsidi3 in m68k.md.
2942 Ordinary SUBREG of a REG needs this special treatment. */
2943 if (GET_CODE (SUBREG_REG (x)) == REG)
2944 {
2945 rtx inner = SUBREG_REG (x);
2946 rtx new = 0;
2947
2948 /* We can't call subst_constants on &SUBREG_REG (x) because any
2949 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2950 see what is inside, try to form the new SUBREG and see if that is
2951 valid. We handle two cases: extracting a full word in an
2952 integral mode and extracting the low part. */
2953 subst_constants (&inner, NULL_RTX, map);
2954
2955 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2956 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2957 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2958 new = operand_subword (inner, SUBREG_WORD (x), 0,
2959 GET_MODE (SUBREG_REG (x)));
2960
2961 cancel_changes (num_changes);
2962 if (new == 0 && subreg_lowpart_p (x))
2963 new = gen_lowpart_common (GET_MODE (x), inner);
2964
2965 if (new)
2966 validate_change (insn, loc, new, 1);
2967
2968 return;
2969 }
2970 break;
2971
2972 case MEM:
2973 subst_constants (&XEXP (x, 0), insn, map);
2974
2975 /* If a memory address got spoiled, change it back. */
2976 if (insn != 0 && num_validated_changes () != num_changes
2977 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2978 cancel_changes (num_changes);
2979 return;
2980
2981 case SET:
2982 {
2983 /* Substitute constants in our source, and in any arguments to a
2984 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2985 itself. */
2986 rtx *dest_loc = &SET_DEST (x);
2987 rtx dest = *dest_loc;
2988 rtx src, tem;
2989
2990 subst_constants (&SET_SRC (x), insn, map);
2991 src = SET_SRC (x);
2992
2993 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2994 || GET_CODE (*dest_loc) == SUBREG
2995 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2996 {
2997 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2998 {
2999 subst_constants (&XEXP (*dest_loc, 1), insn, map);
3000 subst_constants (&XEXP (*dest_loc, 2), insn, map);
3001 }
3002 dest_loc = &XEXP (*dest_loc, 0);
3003 }
3004
3005 /* Do substitute in the address of a destination in memory. */
3006 if (GET_CODE (*dest_loc) == MEM)
3007 subst_constants (&XEXP (*dest_loc, 0), insn, map);
3008
3009 /* Check for the case of DEST a SUBREG, both it and the underlying
3010 register are less than one word, and the SUBREG has the wider mode.
3011 In the case, we are really setting the underlying register to the
3012 source converted to the mode of DEST. So indicate that. */
3013 if (GET_CODE (dest) == SUBREG
3014 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
3015 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
3016 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
3017 <= GET_MODE_SIZE (GET_MODE (dest)))
3018 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
3019 src)))
3020 src = tem, dest = SUBREG_REG (dest);
3021
3022 /* If storing a recognizable value save it for later recording. */
3023 if ((map->num_sets < MAX_RECOG_OPERANDS)
3024 && (CONSTANT_P (src)
3025 || (GET_CODE (src) == REG
3026 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
3027 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
3028 || (GET_CODE (src) == PLUS
3029 && GET_CODE (XEXP (src, 0)) == REG
3030 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
3031 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
3032 && CONSTANT_P (XEXP (src, 1)))
3033 || GET_CODE (src) == COMPARE
3034#ifdef HAVE_cc0
3035 || dest == cc0_rtx
3036#endif
3037 || (dest == pc_rtx
3038 && (src == pc_rtx || GET_CODE (src) == RETURN
3039 || GET_CODE (src) == LABEL_REF))))
3040 {
3041 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
3042 it will cause us to save the COMPARE with any constants
3043 substituted, which is what we want for later. */
3044 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
3045 map->equiv_sets[map->num_sets++].dest = dest;
3046 }
3047 }
3048 return;
3049
3050 default:
3051 break;
3052 }
3053
3054 format_ptr = GET_RTX_FORMAT (code);
3055
3056 /* If the first operand is an expression, save its mode for later. */
3057 if (*format_ptr == 'e')
3058 op0_mode = GET_MODE (XEXP (x, 0));
3059
3060 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3061 {
3062 switch (*format_ptr++)
3063 {
3064 case '0':
3065 break;
3066
3067 case 'e':
3068 if (XEXP (x, i))
3069 subst_constants (&XEXP (x, i), insn, map);
3070 break;
3071
3072 case 'u':
3073 case 'i':
3074 case 's':
3075 case 'w':
3076 break;
3077
3078 case 'E':
3079 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
3080 {
3081 int j;
3082 for (j = 0; j < XVECLEN (x, i); j++)
3083 subst_constants (&XVECEXP (x, i, j), insn, map);
3084 }
3085 break;
3086
3087 default:
3088 abort ();
3089 }
3090 }
3091
3092 /* If this is a commutative operation, move a constant to the second
3093 operand unless the second operand is already a CONST_INT. */
3094 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
3095 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3096 {
3097 rtx tem = XEXP (x, 0);
3098 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3099 validate_change (insn, &XEXP (x, 1), tem, 1);
3100 }
3101
3102 /* Simplify the expression in case we put in some constants. */
3103 switch (GET_RTX_CLASS (code))
3104 {
3105 case '1':
3106 if (op0_mode == MAX_MACHINE_MODE)
3107 abort ();
3108 new = simplify_unary_operation (code, GET_MODE (x),
3109 XEXP (x, 0), op0_mode);
3110 break;
3111
3112 case '<':
3113 {
3114 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
3115 if (op_mode == VOIDmode)
3116 op_mode = GET_MODE (XEXP (x, 1));
3117 new = simplify_relational_operation (code, op_mode,
3118 XEXP (x, 0), XEXP (x, 1));
3119#ifdef FLOAT_STORE_FLAG_VALUE
3120 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3121 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3122 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3123 GET_MODE (x)));
3124#endif
3125 break;
3126 }
3127
3128 case '2':
3129 case 'c':
3130 new = simplify_binary_operation (code, GET_MODE (x),
3131 XEXP (x, 0), XEXP (x, 1));
3132 break;
3133
3134 case 'b':
3135 case '3':
3136 if (op0_mode == MAX_MACHINE_MODE)
3137 abort ();
3138 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
3139 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
3140 break;
3141 }
3142
3143 if (new)
3144 validate_change (insn, loc, new, 1);
3145}
3146
3147/* Show that register modified no longer contain known constants. We are
3148 called from note_stores with parts of the new insn. */
3149
3150void
3151mark_stores (dest, x)
3152 rtx dest;
3153 rtx x ATTRIBUTE_UNUSED;
3154{
3155 int regno = -1;
3156 enum machine_mode mode;
3157
3158 /* DEST is always the innermost thing set, except in the case of
3159 SUBREGs of hard registers. */
3160
3161 if (GET_CODE (dest) == REG)
3162 regno = REGNO (dest), mode = GET_MODE (dest);
3163 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
3164 {
3165 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3166 mode = GET_MODE (SUBREG_REG (dest));
3167 }
3168
3169 if (regno >= 0)
3170 {
3171 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3172 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3173 int i;
3174
3175 /* Ignore virtual stack var or virtual arg register since those
3176 are handled separately. */
3177 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3178 && regno != VIRTUAL_STACK_VARS_REGNUM)
3179 for (i = regno; i <= last_reg; i++)
3180 if (i < VARRAY_SIZE (global_const_equiv_varray))
3181 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
3182 }
3183}
3184\f
3185/* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3186 pointed to by PX, they represent constants in the constant pool.
3187 Replace these with a new memory reference obtained from force_const_mem.
3188 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3189 address of a constant pool entry. Replace them with the address of
3190 a new constant pool entry obtained from force_const_mem. */
3191
3192static void
3193restore_constants (px)
3194 rtx *px;
3195{
3196 rtx x = *px;
3197 int i, j;
3198 char *fmt;
3199
3200 if (x == 0)
3201 return;
3202
3203 if (GET_CODE (x) == CONST_DOUBLE)
3204 {
3205 /* We have to make a new CONST_DOUBLE to ensure that we account for
3206 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3207 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3208 {
3209 REAL_VALUE_TYPE d;
3210
3211 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3212 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3213 }
3214 else
3215 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3216 VOIDmode);
3217 }
3218
3219 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3220 {
3221 restore_constants (&XEXP (x, 0));
3222 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3223 }
3224 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3225 {
3226 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3227 rtx new = XEXP (SUBREG_REG (x), 0);
3228
3229 restore_constants (&new);
3230 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3231 PUT_MODE (new, GET_MODE (x));
3232 *px = validize_mem (new);
3233 }
3234 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3235 {
3236 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3237 XEXP (XEXP (x, 0), 0)),
3238 0);
3239
3240#ifdef POINTERS_EXTEND_UNSIGNED
3241 if (GET_MODE (new) != GET_MODE (x))
3242 new = convert_memory_address (GET_MODE (x), new);
3243#endif
3244
3245 *px = new;
3246 }
3247 else
3248 {
3249 fmt = GET_RTX_FORMAT (GET_CODE (x));
3250 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3251 {
3252 switch (*fmt++)
3253 {
3254 case 'E':
3255 for (j = 0; j < XVECLEN (x, i); j++)
3256 restore_constants (&XVECEXP (x, i, j));
3257 break;
3258
3259 case 'e':
3260 restore_constants (&XEXP (x, i));
3261 break;
3262 }
3263 }
3264 }
3265}
3266\f
3267/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3268 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3269 that it points to the node itself, thus indicating that the node is its
3270 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3271 the given node is NULL, recursively descend the decl/block tree which
3272 it is the root of, and for each other ..._DECL or BLOCK node contained
3273 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3274 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3275 values to point to themselves. */
3276
3277static void
3278set_block_origin_self (stmt)
3279 register tree stmt;
3280{
3281 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3282 {
3283 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3284
3285 {
3286 register tree local_decl;
3287
3288 for (local_decl = BLOCK_VARS (stmt);
3289 local_decl != NULL_TREE;
3290 local_decl = TREE_CHAIN (local_decl))
3291 set_decl_origin_self (local_decl); /* Potential recursion. */
3292 }
3293
3294 {
3295 register tree subblock;
3296
3297 for (subblock = BLOCK_SUBBLOCKS (stmt);
3298 subblock != NULL_TREE;
3299 subblock = BLOCK_CHAIN (subblock))
3300 set_block_origin_self (subblock); /* Recurse. */
3301 }
3302 }
3303}
3304
3305/* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3306 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3307 node to so that it points to the node itself, thus indicating that the
3308 node represents its own (abstract) origin. Additionally, if the
3309 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3310 the decl/block tree of which the given node is the root of, and for
3311 each other ..._DECL or BLOCK node contained therein whose
3312 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3313 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3314 point to themselves. */
3315
3316static void
3317set_decl_origin_self (decl)
3318 register tree decl;
3319{
3320 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3321 {
3322 DECL_ABSTRACT_ORIGIN (decl) = decl;
3323 if (TREE_CODE (decl) == FUNCTION_DECL)
3324 {
3325 register tree arg;
3326
3327 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3328 DECL_ABSTRACT_ORIGIN (arg) = arg;
3329 if (DECL_INITIAL (decl) != NULL_TREE
3330 && DECL_INITIAL (decl) != error_mark_node)
3331 set_block_origin_self (DECL_INITIAL (decl));
3332 }
3333 }
3334}
3335\f
3336/* Given a pointer to some BLOCK node, and a boolean value to set the
3337 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3338 the given block, and for all local decls and all local sub-blocks
3339 (recursively) which are contained therein. */
3340
3341static void
3342set_block_abstract_flags (stmt, setting)
3343 register tree stmt;
3344 register int setting;
3345{
3346 register tree local_decl;
3347 register tree subblock;
3348
3349 BLOCK_ABSTRACT (stmt) = setting;
3350
3351 for (local_decl = BLOCK_VARS (stmt);
3352 local_decl != NULL_TREE;
3353 local_decl = TREE_CHAIN (local_decl))
3354 set_decl_abstract_flags (local_decl, setting);
3355
3356 for (subblock = BLOCK_SUBBLOCKS (stmt);
3357 subblock != NULL_TREE;
3358 subblock = BLOCK_CHAIN (subblock))
3359 set_block_abstract_flags (subblock, setting);
3360}
3361
3362/* Given a pointer to some ..._DECL node, and a boolean value to set the
3363 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3364 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3365 set the abstract flags for all of the parameters, local vars, local
3366 blocks and sub-blocks (recursively) to the same setting. */
3367
3368void
3369set_decl_abstract_flags (decl, setting)
3370 register tree decl;
3371 register int setting;
3372{
3373 DECL_ABSTRACT (decl) = setting;
3374 if (TREE_CODE (decl) == FUNCTION_DECL)
3375 {
3376 register tree arg;
3377
3378 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3379 DECL_ABSTRACT (arg) = setting;
3380 if (DECL_INITIAL (decl) != NULL_TREE
3381 && DECL_INITIAL (decl) != error_mark_node)
3382 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3383 }
3384}
3385\f
3386/* Output the assembly language code for the function FNDECL
3387 from its DECL_SAVED_INSNS. Used for inline functions that are output
3388 at end of compilation instead of where they came in the source. */
3389
3390void
3391output_inline_function (fndecl)
3392 tree fndecl;
3393{
3394 rtx head;
3395 rtx last;
3396
3397 /* Things we allocate from here on are part of this function, not
3398 permanent. */
3399 temporary_allocation ();
3400
3401 head = DECL_SAVED_INSNS (fndecl);
3402 current_function_decl = fndecl;
3403
3404 /* This call is only used to initialize global variables. */
3405 init_function_start (fndecl, "lossage", 1);
3406
3407 /* Redo parameter determinations in case the FUNCTION_...
3408 macros took machine-specific actions that need to be redone. */
3409 assign_parms (fndecl, 1);
3410
3411 /* Set stack frame size. */
3412 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3413
3414 /* The first is a bit of a lie (the array may be larger), but doesn't
3415 matter too much and it isn't worth saving the actual bound. */
3416 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3417 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3418 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3419 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3420 max_parm_reg = MAX_PARMREG (head);
3421 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3422
3423 stack_slot_list = STACK_SLOT_LIST (head);
3424 forced_labels = FORCED_LABELS (head);
3425
3426 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_COMPUTED_JUMP)
3427 current_function_has_computed_jump = 1;
3428
3429 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3430 current_function_calls_alloca = 1;
3431
3432 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3433 current_function_calls_setjmp = 1;
3434
3435 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3436 current_function_calls_longjmp = 1;
3437
3438 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3439 current_function_returns_struct = 1;
3440
3441 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3442 current_function_returns_pcc_struct = 1;
3443
3444 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3445 current_function_needs_context = 1;
3446
3447 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3448 current_function_has_nonlocal_label = 1;
3449
3450 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3451 current_function_returns_pointer = 1;
3452
3453 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3454 current_function_uses_const_pool = 1;
3455
3456 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3457 current_function_uses_pic_offset_table = 1;
3458
3459 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3460 current_function_pops_args = POPS_ARGS (head);
3461
3462 /* This is the only thing the expand_function_end call that uses to be here
3463 actually does and that call can cause problems. */
3464 immediate_size_expand--;
3465
3466 /* Find last insn and rebuild the constant pool. */
3467 for (last = FIRST_PARM_INSN (head);
3468 NEXT_INSN (last); last = NEXT_INSN (last))
3469 {
3470 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3471 {
3472 restore_constants (&PATTERN (last));
3473 restore_constants (&REG_NOTES (last));
3474 }
3475 }
3476
3477 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3478 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3479
3480 /* We must have already output DWARF debugging information for the
3481 original (abstract) inline function declaration/definition, so
3482 we want to make sure that the debugging information we generate
3483 for this special instance of the inline function refers back to
3484 the information we already generated. To make sure that happens,
3485 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3486 node (and for all of the local ..._DECL nodes which are its children)
3487 so that they all point to themselves. */
3488
3489 set_decl_origin_self (fndecl);
3490
3491 /* We're not deferring this any longer. */
3492 DECL_DEFER_OUTPUT (fndecl) = 0;
3493
3494 /* We can't inline this anymore. */
3495 DECL_INLINE (fndecl) = 0;
3496
3497 /* Compile this function all the way down to assembly code. */
3498 rest_of_compilation (fndecl);
3499
3500 current_function_decl = 0;
3501}