Add the DragonFly cvs id and perform general cleanups on cvs/rcs/sccs ids. Most
[dragonfly.git] / contrib / gcc / stmt.c
CommitLineData
984263bc
MD
1/* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
21
22/* $FreeBSD: src/contrib/gcc/stmt.c,v 1.1.1.4.2.2 2002/05/01 19:57:46 obrien Exp $ */
1de703da 23/* $DragonFly: src/contrib/gcc/Attic/stmt.c,v 1.2 2003/06/17 04:24:01 dillon Exp $ */
984263bc
MD
24
25
26/* This file handles the generation of rtl code from tree structure
27 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
28 It also creates the rtl expressions for parameters and auto variables
29 and has full responsibility for allocating stack slots.
30
31 The functions whose names start with `expand_' are called by the
32 parser to generate RTL instructions for various kinds of constructs.
33
34 Some control and binding constructs require calling several such
35 functions at different times. For example, a simple if-then
36 is expanded by calling `expand_start_cond' (with the condition-expression
37 as argument) before parsing the then-clause and calling `expand_end_cond'
38 after parsing the then-clause. */
39
40#include "config.h"
41#include "system.h"
42
43#include "rtl.h"
44#include "tree.h"
45#include "flags.h"
46#include "except.h"
47#include "function.h"
48#include "insn-flags.h"
49#include "insn-config.h"
50#include "insn-codes.h"
51#include "expr.h"
52#include "hard-reg-set.h"
53#include "obstack.h"
54#include "loop.h"
55#include "recog.h"
56#include "machmode.h"
57#include "toplev.h"
58#include "output.h"
59
60#define obstack_chunk_alloc xmalloc
61#define obstack_chunk_free free
62struct obstack stmt_obstack;
63
64/* Assume that case vectors are not pc-relative. */
65#ifndef CASE_VECTOR_PC_RELATIVE
66#define CASE_VECTOR_PC_RELATIVE 0
67#endif
68
69/* Filename and line number of last line-number note,
70 whether we actually emitted it or not. */
71char *emit_filename;
72int emit_lineno;
73
74/* Nonzero if within a ({...}) grouping, in which case we must
75 always compute a value for each expr-stmt in case it is the last one. */
76
77int expr_stmts_for_value;
78
79/* Each time we expand an expression-statement,
80 record the expr's type and its RTL value here. */
81
82static tree last_expr_type;
83static rtx last_expr_value;
84
85/* Each time we expand the end of a binding contour (in `expand_end_bindings')
86 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
87 This is used by the `remember_end_note' function to record the endpoint
88 of each generated block in its associated BLOCK node. */
89
90static rtx last_block_end_note;
91
92/* Number of binding contours started so far in this function. */
93
94int block_start_count;
95\f
96/* Functions and data structures for expanding case statements. */
97
98/* Case label structure, used to hold info on labels within case
99 statements. We handle "range" labels; for a single-value label
100 as in C, the high and low limits are the same.
101
102 An AVL tree of case nodes is initially created, and later transformed
103 to a list linked via the RIGHT fields in the nodes. Nodes with
104 higher case values are later in the list.
105
106 Switch statements can be output in one of two forms. A branch table
107 is used if there are more than a few labels and the labels are dense
108 within the range between the smallest and largest case value. If a
109 branch table is used, no further manipulations are done with the case
110 node chain.
111
112 The alternative to the use of a branch table is to generate a series
113 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
114 and PARENT fields to hold a binary tree. Initially the tree is
115 totally unbalanced, with everything on the right. We balance the tree
116 with nodes on the left having lower case values than the parent
117 and nodes on the right having higher values. We then output the tree
118 in order. */
119
120struct case_node
121{
122 struct case_node *left; /* Left son in binary tree */
123 struct case_node *right; /* Right son in binary tree; also node chain */
124 struct case_node *parent; /* Parent of node in binary tree */
125 tree low; /* Lowest index value for this label */
126 tree high; /* Highest index value for this label */
127 tree code_label; /* Label to jump to when node matches */
128 int balance;
129};
130
131typedef struct case_node case_node;
132typedef struct case_node *case_node_ptr;
133
134/* These are used by estimate_case_costs and balance_case_nodes. */
135
136/* This must be a signed type, and non-ANSI compilers lack signed char. */
137static short *cost_table;
138static int use_cost_table;
139\f
140/* Stack of control and binding constructs we are currently inside.
141
142 These constructs begin when you call `expand_start_WHATEVER'
143 and end when you call `expand_end_WHATEVER'. This stack records
144 info about how the construct began that tells the end-function
145 what to do. It also may provide information about the construct
146 to alter the behavior of other constructs within the body.
147 For example, they may affect the behavior of C `break' and `continue'.
148
149 Each construct gets one `struct nesting' object.
150 All of these objects are chained through the `all' field.
151 `nesting_stack' points to the first object (innermost construct).
152 The position of an entry on `nesting_stack' is in its `depth' field.
153
154 Each type of construct has its own individual stack.
155 For example, loops have `loop_stack'. Each object points to the
156 next object of the same type through the `next' field.
157
158 Some constructs are visible to `break' exit-statements and others
159 are not. Which constructs are visible depends on the language.
160 Therefore, the data structure allows each construct to be visible
161 or not, according to the args given when the construct is started.
162 The construct is visible if the `exit_label' field is non-null.
163 In that case, the value should be a CODE_LABEL rtx. */
164
165struct nesting
166{
167 struct nesting *all;
168 struct nesting *next;
169 int depth;
170 rtx exit_label;
171 union
172 {
173 /* For conds (if-then and if-then-else statements). */
174 struct
175 {
176 /* Label for the end of the if construct.
177 There is none if EXITFLAG was not set
178 and no `else' has been seen yet. */
179 rtx endif_label;
180 /* Label for the end of this alternative.
181 This may be the end of the if or the next else/elseif. */
182 rtx next_label;
183 } cond;
184 /* For loops. */
185 struct
186 {
187 /* Label at the top of the loop; place to loop back to. */
188 rtx start_label;
189 /* Label at the end of the whole construct. */
190 rtx end_label;
191 /* Label before a jump that branches to the end of the whole
192 construct. This is where destructors go if any. */
193 rtx alt_end_label;
194 /* Label for `continue' statement to jump to;
195 this is in front of the stepper of the loop. */
196 rtx continue_label;
197 } loop;
198 /* For variable binding contours. */
199 struct
200 {
201 /* Sequence number of this binding contour within the function,
202 in order of entry. */
203 int block_start_count;
204 /* Nonzero => value to restore stack to on exit. */
205 rtx stack_level;
206 /* The NOTE that starts this contour.
207 Used by expand_goto to check whether the destination
208 is within each contour or not. */
209 rtx first_insn;
210 /* Innermost containing binding contour that has a stack level. */
211 struct nesting *innermost_stack_block;
212 /* List of cleanups to be run on exit from this contour.
213 This is a list of expressions to be evaluated.
214 The TREE_PURPOSE of each link is the ..._DECL node
215 which the cleanup pertains to. */
216 tree cleanups;
217 /* List of cleanup-lists of blocks containing this block,
218 as they were at the locus where this block appears.
219 There is an element for each containing block,
220 ordered innermost containing block first.
221 The tail of this list can be 0,
222 if all remaining elements would be empty lists.
223 The element's TREE_VALUE is the cleanup-list of that block,
224 which may be null. */
225 tree outer_cleanups;
226 /* Chain of labels defined inside this binding contour.
227 For contours that have stack levels or cleanups. */
228 struct label_chain *label_chain;
229 /* Number of function calls seen, as of start of this block. */
230 int function_call_count;
231 /* Nonzero if this is associated with a EH region. */
232 int exception_region;
233 /* The saved target_temp_slot_level from our outer block.
234 We may reset target_temp_slot_level to be the level of
235 this block, if that is done, target_temp_slot_level
236 reverts to the saved target_temp_slot_level at the very
237 end of the block. */
238 int target_temp_slot_level;
239 /* True if we are currently emitting insns in an area of
240 output code that is controlled by a conditional
241 expression. This is used by the cleanup handling code to
242 generate conditional cleanup actions. */
243 int conditional_code;
244 /* A place to move the start of the exception region for any
245 of the conditional cleanups, must be at the end or after
246 the start of the last unconditional cleanup, and before any
247 conditional branch points. */
248 rtx last_unconditional_cleanup;
249 /* When in a conditional context, this is the specific
250 cleanup list associated with last_unconditional_cleanup,
251 where we place the conditionalized cleanups. */
252 tree *cleanup_ptr;
253 } block;
254 /* For switch (C) or case (Pascal) statements,
255 and also for dummies (see `expand_start_case_dummy'). */
256 struct
257 {
258 /* The insn after which the case dispatch should finally
259 be emitted. Zero for a dummy. */
260 rtx start;
261 /* A list of case labels; it is first built as an AVL tree.
262 During expand_end_case, this is converted to a list, and may be
263 rearranged into a nearly balanced binary tree. */
264 struct case_node *case_list;
265 /* Label to jump to if no case matches. */
266 tree default_label;
267 /* The expression to be dispatched on. */
268 tree index_expr;
269 /* Type that INDEX_EXPR should be converted to. */
270 tree nominal_type;
271 /* Number of range exprs in case statement. */
272 int num_ranges;
273 /* Name of this kind of statement, for warnings. */
274 const char *printname;
275 /* Used to save no_line_numbers till we see the first case label.
276 We set this to -1 when we see the first case label in this
277 case statement. */
278 int line_number_status;
279 } case_stmt;
280 } data;
281};
282
283/* Chain of all pending binding contours. */
284struct nesting *block_stack;
285
286/* If any new stacks are added here, add them to POPSTACKS too. */
287
288/* Chain of all pending binding contours that restore stack levels
289 or have cleanups. */
290struct nesting *stack_block_stack;
291
292/* Chain of all pending conditional statements. */
293struct nesting *cond_stack;
294
295/* Chain of all pending loops. */
296struct nesting *loop_stack;
297
298/* Chain of all pending case or switch statements. */
299struct nesting *case_stack;
300
301/* Separate chain including all of the above,
302 chained through the `all' field. */
303struct nesting *nesting_stack;
304
305/* Number of entries on nesting_stack now. */
306int nesting_depth;
307
308/* Allocate and return a new `struct nesting'. */
309
310#define ALLOC_NESTING() \
311 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
312
313/* Pop the nesting stack element by element until we pop off
314 the element which is at the top of STACK.
315 Update all the other stacks, popping off elements from them
316 as we pop them from nesting_stack. */
317
318#define POPSTACK(STACK) \
319do { struct nesting *target = STACK; \
320 struct nesting *this; \
321 do { this = nesting_stack; \
322 if (loop_stack == this) \
323 loop_stack = loop_stack->next; \
324 if (cond_stack == this) \
325 cond_stack = cond_stack->next; \
326 if (block_stack == this) \
327 block_stack = block_stack->next; \
328 if (stack_block_stack == this) \
329 stack_block_stack = stack_block_stack->next; \
330 if (case_stack == this) \
331 case_stack = case_stack->next; \
332 nesting_depth = nesting_stack->depth - 1; \
333 nesting_stack = this->all; \
334 obstack_free (&stmt_obstack, this); } \
335 while (this != target); } while (0)
336\f
337/* In some cases it is impossible to generate code for a forward goto
338 until the label definition is seen. This happens when it may be necessary
339 for the goto to reset the stack pointer: we don't yet know how to do that.
340 So expand_goto puts an entry on this fixup list.
341 Each time a binding contour that resets the stack is exited,
342 we check each fixup.
343 If the target label has now been defined, we can insert the proper code. */
344
345struct goto_fixup
346{
347 /* Points to following fixup. */
348 struct goto_fixup *next;
349 /* Points to the insn before the jump insn.
350 If more code must be inserted, it goes after this insn. */
351 rtx before_jump;
352 /* The LABEL_DECL that this jump is jumping to, or 0
353 for break, continue or return. */
354 tree target;
355 /* The BLOCK for the place where this goto was found. */
356 tree context;
357 /* The CODE_LABEL rtx that this is jumping to. */
358 rtx target_rtl;
359 /* Number of binding contours started in current function
360 before the label reference. */
361 int block_start_count;
362 /* The outermost stack level that should be restored for this jump.
363 Each time a binding contour that resets the stack is exited,
364 if the target label is *not* yet defined, this slot is updated. */
365 rtx stack_level;
366 /* List of lists of cleanup expressions to be run by this goto.
367 There is one element for each block that this goto is within.
368 The tail of this list can be 0,
369 if all remaining elements would be empty.
370 The TREE_VALUE contains the cleanup list of that block as of the
371 time this goto was seen.
372 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
373 tree cleanup_list_list;
374};
375
376static struct goto_fixup *goto_fixup_chain;
377
378/* Within any binding contour that must restore a stack level,
379 all labels are recorded with a chain of these structures. */
380
381struct label_chain
382{
383 /* Points to following fixup. */
384 struct label_chain *next;
385 tree label;
386};
387
388
389/* Non-zero if we are using EH to handle cleanus. */
390static int using_eh_for_cleanups_p = 0;
391
392
393static int n_occurrences PROTO((int, const char *));
394static void expand_goto_internal PROTO((tree, rtx, rtx));
395static int expand_fixup PROTO((tree, rtx, rtx));
396static rtx expand_nl_handler_label PROTO((rtx, rtx));
397static void expand_nl_goto_receiver PROTO((void));
398static void expand_nl_goto_receivers PROTO((struct nesting *));
399static void fixup_gotos PROTO((struct nesting *, rtx, tree,
400 rtx, int));
401static void expand_null_return_1 PROTO((rtx, int));
402static void expand_value_return PROTO((rtx));
403static int tail_recursion_args PROTO((tree, tree));
404static void expand_cleanups PROTO((tree, tree, int, int));
405static void check_seenlabel PROTO((void));
406static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
407static int estimate_case_costs PROTO((case_node_ptr));
408static void group_case_nodes PROTO((case_node_ptr));
409static void balance_case_nodes PROTO((case_node_ptr *,
410 case_node_ptr));
411static int node_has_low_bound PROTO((case_node_ptr, tree));
412static int node_has_high_bound PROTO((case_node_ptr, tree));
413static int node_is_bounded PROTO((case_node_ptr, tree));
414static void emit_jump_if_reachable PROTO((rtx));
415static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
416static int add_case_node PROTO((tree, tree, tree, tree *));
417static struct case_node *case_tree2list PROTO((case_node *, case_node *));
418\f
419void
420using_eh_for_cleanups ()
421{
422 using_eh_for_cleanups_p = 1;
423}
424
425void
426init_stmt ()
427{
428 gcc_obstack_init (&stmt_obstack);
429 init_eh ();
430}
431
432void
433init_stmt_for_function ()
434{
435 /* We are not currently within any block, conditional, loop or case. */
436 block_stack = 0;
437 stack_block_stack = 0;
438 loop_stack = 0;
439 case_stack = 0;
440 cond_stack = 0;
441 nesting_stack = 0;
442 nesting_depth = 0;
443
444 block_start_count = 0;
445
446 /* No gotos have been expanded yet. */
447 goto_fixup_chain = 0;
448
449 /* We are not processing a ({...}) grouping. */
450 expr_stmts_for_value = 0;
451 last_expr_type = 0;
452
453 init_eh_for_function ();
454}
455
456void
457save_stmt_status (p)
458 struct function *p;
459{
460 p->block_stack = block_stack;
461 p->stack_block_stack = stack_block_stack;
462 p->cond_stack = cond_stack;
463 p->loop_stack = loop_stack;
464 p->case_stack = case_stack;
465 p->nesting_stack = nesting_stack;
466 p->nesting_depth = nesting_depth;
467 p->block_start_count = block_start_count;
468 p->last_expr_type = last_expr_type;
469 p->last_expr_value = last_expr_value;
470 p->expr_stmts_for_value = expr_stmts_for_value;
471 p->emit_filename = emit_filename;
472 p->emit_lineno = emit_lineno;
473 p->goto_fixup_chain = goto_fixup_chain;
474 save_eh_status (p);
475}
476
477void
478restore_stmt_status (p)
479 struct function *p;
480{
481 block_stack = p->block_stack;
482 stack_block_stack = p->stack_block_stack;
483 cond_stack = p->cond_stack;
484 loop_stack = p->loop_stack;
485 case_stack = p->case_stack;
486 nesting_stack = p->nesting_stack;
487 nesting_depth = p->nesting_depth;
488 block_start_count = p->block_start_count;
489 last_expr_type = p->last_expr_type;
490 last_expr_value = p->last_expr_value;
491 expr_stmts_for_value = p->expr_stmts_for_value;
492 emit_filename = p->emit_filename;
493 emit_lineno = p->emit_lineno;
494 goto_fixup_chain = p->goto_fixup_chain;
495 restore_eh_status (p);
496}
497\f
498/* Emit a no-op instruction. */
499
500void
501emit_nop ()
502{
503 rtx last_insn;
504
505 last_insn = get_last_insn ();
506 if (!optimize
507 && (GET_CODE (last_insn) == CODE_LABEL
508 || (GET_CODE (last_insn) == NOTE
509 && prev_real_insn (last_insn) == 0)))
510 emit_insn (gen_nop ());
511}
512\f
513/* Return the rtx-label that corresponds to a LABEL_DECL,
514 creating it if necessary. */
515
516rtx
517label_rtx (label)
518 tree label;
519{
520 if (TREE_CODE (label) != LABEL_DECL)
521 abort ();
522
523 if (DECL_RTL (label))
524 return DECL_RTL (label);
525
526 return DECL_RTL (label) = gen_label_rtx ();
527}
528
529/* Add an unconditional jump to LABEL as the next sequential instruction. */
530
531void
532emit_jump (label)
533 rtx label;
534{
535 do_pending_stack_adjust ();
536 emit_jump_insn (gen_jump (label));
537 emit_barrier ();
538}
539
540/* Emit code to jump to the address
541 specified by the pointer expression EXP. */
542
543void
544expand_computed_goto (exp)
545 tree exp;
546{
547 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
548
549#ifdef POINTERS_EXTEND_UNSIGNED
550 x = convert_memory_address (Pmode, x);
551#endif
552
553 emit_queue ();
554 /* Be sure the function is executable. */
555 if (current_function_check_memory_usage)
556 emit_library_call (chkr_check_exec_libfunc, 1,
557 VOIDmode, 1, x, ptr_mode);
558
559 do_pending_stack_adjust ();
560 emit_indirect_jump (x);
561
562 current_function_has_computed_jump = 1;
563}
564\f
565/* Handle goto statements and the labels that they can go to. */
566
567/* Specify the location in the RTL code of a label LABEL,
568 which is a LABEL_DECL tree node.
569
570 This is used for the kind of label that the user can jump to with a
571 goto statement, and for alternatives of a switch or case statement.
572 RTL labels generated for loops and conditionals don't go through here;
573 they are generated directly at the RTL level, by other functions below.
574
575 Note that this has nothing to do with defining label *names*.
576 Languages vary in how they do that and what that even means. */
577
578void
579expand_label (label)
580 tree label;
581{
582 struct label_chain *p;
583
584 do_pending_stack_adjust ();
585 emit_label (label_rtx (label));
586 if (DECL_NAME (label))
587 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
588
589 if (stack_block_stack != 0)
590 {
591 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
592 p->next = stack_block_stack->data.block.label_chain;
593 stack_block_stack->data.block.label_chain = p;
594 p->label = label;
595 }
596}
597
598/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
599 from nested functions. */
600
601void
602declare_nonlocal_label (label)
603 tree label;
604{
605 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
606
607 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
608 LABEL_PRESERVE_P (label_rtx (label)) = 1;
609 if (nonlocal_goto_handler_slots == 0)
610 {
611 emit_stack_save (SAVE_NONLOCAL,
612 &nonlocal_goto_stack_level,
613 PREV_INSN (tail_recursion_reentry));
614 }
615 nonlocal_goto_handler_slots
616 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
617}
618
619/* Generate RTL code for a `goto' statement with target label LABEL.
620 LABEL should be a LABEL_DECL tree node that was or will later be
621 defined with `expand_label'. */
622
623void
624expand_goto (label)
625 tree label;
626{
627 tree context;
628
629 /* Check for a nonlocal goto to a containing function. */
630 context = decl_function_context (label);
631 if (context != 0 && context != current_function_decl)
632 {
633 struct function *p = find_function_data (context);
634 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
635 rtx temp, handler_slot;
636 tree link;
637
638 /* Find the corresponding handler slot for this label. */
639 handler_slot = p->nonlocal_goto_handler_slots;
640 for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
641 link = TREE_CHAIN (link))
642 handler_slot = XEXP (handler_slot, 1);
643 handler_slot = XEXP (handler_slot, 0);
644
645 p->has_nonlocal_label = 1;
646 current_function_has_nonlocal_goto = 1;
647 LABEL_REF_NONLOCAL_P (label_ref) = 1;
648
649 /* Copy the rtl for the slots so that they won't be shared in
650 case the virtual stack vars register gets instantiated differently
651 in the parent than in the child. */
652
653#if HAVE_nonlocal_goto
654 if (HAVE_nonlocal_goto)
655 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
656 copy_rtx (handler_slot),
657 copy_rtx (p->nonlocal_goto_stack_level),
658 label_ref));
659 else
660#endif
661 {
662 rtx addr;
663
664 /* Restore frame pointer for containing function.
665 This sets the actual hard register used for the frame pointer
666 to the location of the function's incoming static chain info.
667 The non-local goto handler will then adjust it to contain the
668 proper value and reload the argument pointer, if needed. */
669 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
670
671 /* We have now loaded the frame pointer hardware register with
672 the address of that corresponds to the start of the virtual
673 stack vars. So replace virtual_stack_vars_rtx in all
674 addresses we use with stack_pointer_rtx. */
675
676 /* Get addr of containing function's current nonlocal goto handler,
677 which will do any cleanups and then jump to the label. */
678 addr = copy_rtx (handler_slot);
679 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
680 hard_frame_pointer_rtx));
681
682 /* Restore the stack pointer. Note this uses fp just restored. */
683 addr = p->nonlocal_goto_stack_level;
684 if (addr)
685 addr = replace_rtx (copy_rtx (addr),
686 virtual_stack_vars_rtx,
687 hard_frame_pointer_rtx);
688
689 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
690
691 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
692 really needed. */
693 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
694 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
695 emit_indirect_jump (temp);
696 }
697 }
698 else
699 expand_goto_internal (label, label_rtx (label), NULL_RTX);
700}
701
702/* Generate RTL code for a `goto' statement with target label BODY.
703 LABEL should be a LABEL_REF.
704 LAST_INSN, if non-0, is the rtx we should consider as the last
705 insn emitted (for the purposes of cleaning up a return). */
706
707static void
708expand_goto_internal (body, label, last_insn)
709 tree body;
710 rtx label;
711 rtx last_insn;
712{
713 struct nesting *block;
714 rtx stack_level = 0;
715
716 if (GET_CODE (label) != CODE_LABEL)
717 abort ();
718
719 /* If label has already been defined, we can tell now
720 whether and how we must alter the stack level. */
721
722 if (PREV_INSN (label) != 0)
723 {
724 /* Find the innermost pending block that contains the label.
725 (Check containment by comparing insn-uids.)
726 Then restore the outermost stack level within that block,
727 and do cleanups of all blocks contained in it. */
728 for (block = block_stack; block; block = block->next)
729 {
730 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
731 break;
732 if (block->data.block.stack_level != 0)
733 stack_level = block->data.block.stack_level;
734 /* Execute the cleanups for blocks we are exiting. */
735 if (block->data.block.cleanups != 0)
736 {
737 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
738 do_pending_stack_adjust ();
739 }
740 }
741
742 if (stack_level)
743 {
744 /* Ensure stack adjust isn't done by emit_jump, as this
745 would clobber the stack pointer. This one should be
746 deleted as dead by flow. */
747 clear_pending_stack_adjust ();
748 do_pending_stack_adjust ();
749 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
750 }
751
752 if (body != 0 && DECL_TOO_LATE (body))
753 error ("jump to `%s' invalidly jumps into binding contour",
754 IDENTIFIER_POINTER (DECL_NAME (body)));
755 }
756 /* Label not yet defined: may need to put this goto
757 on the fixup list. */
758 else if (! expand_fixup (body, label, last_insn))
759 {
760 /* No fixup needed. Record that the label is the target
761 of at least one goto that has no fixup. */
762 if (body != 0)
763 TREE_ADDRESSABLE (body) = 1;
764 }
765
766 emit_jump (label);
767}
768\f
769/* Generate if necessary a fixup for a goto
770 whose target label in tree structure (if any) is TREE_LABEL
771 and whose target in rtl is RTL_LABEL.
772
773 If LAST_INSN is nonzero, we pretend that the jump appears
774 after insn LAST_INSN instead of at the current point in the insn stream.
775
776 The fixup will be used later to insert insns just before the goto.
777 Those insns will restore the stack level as appropriate for the
778 target label, and will (in the case of C++) also invoke any object
779 destructors which have to be invoked when we exit the scopes which
780 are exited by the goto.
781
782 Value is nonzero if a fixup is made. */
783
784static int
785expand_fixup (tree_label, rtl_label, last_insn)
786 tree tree_label;
787 rtx rtl_label;
788 rtx last_insn;
789{
790 struct nesting *block, *end_block;
791
792 /* See if we can recognize which block the label will be output in.
793 This is possible in some very common cases.
794 If we succeed, set END_BLOCK to that block.
795 Otherwise, set it to 0. */
796
797 if (cond_stack
798 && (rtl_label == cond_stack->data.cond.endif_label
799 || rtl_label == cond_stack->data.cond.next_label))
800 end_block = cond_stack;
801 /* If we are in a loop, recognize certain labels which
802 are likely targets. This reduces the number of fixups
803 we need to create. */
804 else if (loop_stack
805 && (rtl_label == loop_stack->data.loop.start_label
806 || rtl_label == loop_stack->data.loop.end_label
807 || rtl_label == loop_stack->data.loop.continue_label))
808 end_block = loop_stack;
809 else
810 end_block = 0;
811
812 /* Now set END_BLOCK to the binding level to which we will return. */
813
814 if (end_block)
815 {
816 struct nesting *next_block = end_block->all;
817 block = block_stack;
818
819 /* First see if the END_BLOCK is inside the innermost binding level.
820 If so, then no cleanups or stack levels are relevant. */
821 while (next_block && next_block != block)
822 next_block = next_block->all;
823
824 if (next_block)
825 return 0;
826
827 /* Otherwise, set END_BLOCK to the innermost binding level
828 which is outside the relevant control-structure nesting. */
829 next_block = block_stack->next;
830 for (block = block_stack; block != end_block; block = block->all)
831 if (block == next_block)
832 next_block = next_block->next;
833 end_block = next_block;
834 }
835
836 /* Does any containing block have a stack level or cleanups?
837 If not, no fixup is needed, and that is the normal case
838 (the only case, for standard C). */
839 for (block = block_stack; block != end_block; block = block->next)
840 if (block->data.block.stack_level != 0
841 || block->data.block.cleanups != 0)
842 break;
843
844 if (block != end_block)
845 {
846 /* Ok, a fixup is needed. Add a fixup to the list of such. */
847 struct goto_fixup *fixup
848 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
849 /* In case an old stack level is restored, make sure that comes
850 after any pending stack adjust. */
851 /* ?? If the fixup isn't to come at the present position,
852 doing the stack adjust here isn't useful. Doing it with our
853 settings at that location isn't useful either. Let's hope
854 someone does it! */
855 if (last_insn == 0)
856 do_pending_stack_adjust ();
857 fixup->target = tree_label;
858 fixup->target_rtl = rtl_label;
859
860 /* Create a BLOCK node and a corresponding matched set of
861 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
862 this point. The notes will encapsulate any and all fixup
863 code which we might later insert at this point in the insn
864 stream. Also, the BLOCK node will be the parent (i.e. the
865 `SUPERBLOCK') of any other BLOCK nodes which we might create
866 later on when we are expanding the fixup code.
867
868 Note that optimization passes (including expand_end_loop)
869 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
870 as a placeholder. */
871
872 {
873 register rtx original_before_jump
874 = last_insn ? last_insn : get_last_insn ();
875 rtx start;
876
877 start_sequence ();
878 pushlevel (0);
879 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
880 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
881 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
882 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
883 end_sequence ();
884 emit_insns_after (start, original_before_jump);
885 }
886
887 fixup->block_start_count = block_start_count;
888 fixup->stack_level = 0;
889 fixup->cleanup_list_list
890 = ((block->data.block.outer_cleanups
891 || block->data.block.cleanups)
892 ? tree_cons (NULL_TREE, block->data.block.cleanups,
893 block->data.block.outer_cleanups)
894 : 0);
895 fixup->next = goto_fixup_chain;
896 goto_fixup_chain = fixup;
897 }
898
899 return block != 0;
900}
901
902
903\f
904/* Expand any needed fixups in the outputmost binding level of the
905 function. FIRST_INSN is the first insn in the function. */
906
907void
908expand_fixups (first_insn)
909 rtx first_insn;
910{
911 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
912}
913
914/* When exiting a binding contour, process all pending gotos requiring fixups.
915 THISBLOCK is the structure that describes the block being exited.
916 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
917 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
918 FIRST_INSN is the insn that began this contour.
919
920 Gotos that jump out of this contour must restore the
921 stack level and do the cleanups before actually jumping.
922
923 DONT_JUMP_IN nonzero means report error there is a jump into this
924 contour from before the beginning of the contour.
925 This is also done if STACK_LEVEL is nonzero. */
926
927static void
928fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
929 struct nesting *thisblock;
930 rtx stack_level;
931 tree cleanup_list;
932 rtx first_insn;
933 int dont_jump_in;
934{
935 register struct goto_fixup *f, *prev;
936
937 /* F is the fixup we are considering; PREV is the previous one. */
938 /* We run this loop in two passes so that cleanups of exited blocks
939 are run first, and blocks that are exited are marked so
940 afterwards. */
941
942 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
943 {
944 /* Test for a fixup that is inactive because it is already handled. */
945 if (f->before_jump == 0)
946 {
947 /* Delete inactive fixup from the chain, if that is easy to do. */
948 if (prev != 0)
949 prev->next = f->next;
950 }
951 /* Has this fixup's target label been defined?
952 If so, we can finalize it. */
953 else if (PREV_INSN (f->target_rtl) != 0)
954 {
955 register rtx cleanup_insns;
956
957 /* Get the first non-label after the label
958 this goto jumps to. If that's before this scope begins,
959 we don't have a jump into the scope. */
960 rtx after_label = f->target_rtl;
961 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
962 after_label = NEXT_INSN (after_label);
963
964 /* If this fixup jumped into this contour from before the beginning
965 of this contour, report an error. */
966 /* ??? Bug: this does not detect jumping in through intermediate
967 blocks that have stack levels or cleanups.
968 It detects only a problem with the innermost block
969 around the label. */
970 if (f->target != 0
971 && (dont_jump_in || stack_level || cleanup_list)
972 /* If AFTER_LABEL is 0, it means the jump goes to the end
973 of the rtl, which means it jumps into this scope. */
974 && (after_label == 0
975 || INSN_UID (first_insn) < INSN_UID (after_label))
976 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
977 && ! DECL_ERROR_ISSUED (f->target))
978 {
979 error_with_decl (f->target,
980 "label `%s' used before containing binding contour");
981 /* Prevent multiple errors for one label. */
982 DECL_ERROR_ISSUED (f->target) = 1;
983 }
984
985 /* We will expand the cleanups into a sequence of their own and
986 then later on we will attach this new sequence to the insn
987 stream just ahead of the actual jump insn. */
988
989 start_sequence ();
990
991 /* Temporarily restore the lexical context where we will
992 logically be inserting the fixup code. We do this for the
993 sake of getting the debugging information right. */
994
995 pushlevel (0);
996 set_block (f->context);
997
998 /* Expand the cleanups for blocks this jump exits. */
999 if (f->cleanup_list_list)
1000 {
1001 tree lists;
1002 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1003 /* Marked elements correspond to blocks that have been closed.
1004 Do their cleanups. */
1005 if (TREE_ADDRESSABLE (lists)
1006 && TREE_VALUE (lists) != 0)
1007 {
1008 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1009 /* Pop any pushes done in the cleanups,
1010 in case function is about to return. */
1011 do_pending_stack_adjust ();
1012 }
1013 }
1014
1015 /* Restore stack level for the biggest contour that this
1016 jump jumps out of. */
1017 if (f->stack_level)
1018 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1019
1020 /* Finish up the sequence containing the insns which implement the
1021 necessary cleanups, and then attach that whole sequence to the
1022 insn stream just ahead of the actual jump insn. Attaching it
1023 at that point insures that any cleanups which are in fact
1024 implicit C++ object destructions (which must be executed upon
1025 leaving the block) appear (to the debugger) to be taking place
1026 in an area of the generated code where the object(s) being
1027 destructed are still "in scope". */
1028
1029 cleanup_insns = get_insns ();
1030 poplevel (1, 0, 0);
1031
1032 end_sequence ();
1033 emit_insns_after (cleanup_insns, f->before_jump);
1034
1035
1036 f->before_jump = 0;
1037 }
1038 }
1039
1040 /* For any still-undefined labels, do the cleanups for this block now.
1041 We must do this now since items in the cleanup list may go out
1042 of scope when the block ends. */
1043 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1044 if (f->before_jump != 0
1045 && PREV_INSN (f->target_rtl) == 0
1046 /* Label has still not appeared. If we are exiting a block with
1047 a stack level to restore, that started before the fixup,
1048 mark this stack level as needing restoration
1049 when the fixup is later finalized. */
1050 && thisblock != 0
1051 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1052 means the label is undefined. That's erroneous, but possible. */
1053 && (thisblock->data.block.block_start_count
1054 <= f->block_start_count))
1055 {
1056 tree lists = f->cleanup_list_list;
1057 rtx cleanup_insns;
1058
1059 for (; lists; lists = TREE_CHAIN (lists))
1060 /* If the following elt. corresponds to our containing block
1061 then the elt. must be for this block. */
1062 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1063 {
1064 start_sequence ();
1065 pushlevel (0);
1066 set_block (f->context);
1067 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1068 do_pending_stack_adjust ();
1069 cleanup_insns = get_insns ();
1070 poplevel (1, 0, 0);
1071 end_sequence ();
1072 if (cleanup_insns != 0)
1073 f->before_jump
1074 = emit_insns_after (cleanup_insns, f->before_jump);
1075
1076 f->cleanup_list_list = TREE_CHAIN (lists);
1077 }
1078
1079 if (stack_level)
1080 f->stack_level = stack_level;
1081 }
1082}
1083\f
1084/* Return the number of times character C occurs in string S. */
1085static int
1086n_occurrences (c, s)
1087 int c;
1088 const char *s;
1089{
1090 int n = 0;
1091 while (*s)
1092 n += (*s++ == c);
1093 return n;
1094}
1095\f
1096/* Generate RTL for an asm statement (explicit assembler code).
1097 BODY is a STRING_CST node containing the assembler code text,
1098 or an ADDR_EXPR containing a STRING_CST. */
1099
1100void
1101expand_asm (body)
1102 tree body;
1103{
1104 if (current_function_check_memory_usage)
1105 {
1106 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1107 return;
1108 }
1109
1110 if (TREE_CODE (body) == ADDR_EXPR)
1111 body = TREE_OPERAND (body, 0);
1112
1113 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1114 TREE_STRING_POINTER (body)));
1115 last_expr_type = 0;
1116}
1117
1118/* Generate RTL for an asm statement with arguments.
1119 STRING is the instruction template.
1120 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1121 Each output or input has an expression in the TREE_VALUE and
1122 a constraint-string in the TREE_PURPOSE.
1123 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1124 that is clobbered by this insn.
1125
1126 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1127 Some elements of OUTPUTS may be replaced with trees representing temporary
1128 values. The caller should copy those temporary values to the originally
1129 specified lvalues.
1130
1131 VOL nonzero means the insn is volatile; don't optimize it. */
1132
1133void
1134expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1135 tree string, outputs, inputs, clobbers;
1136 int vol;
1137 char *filename;
1138 int line;
1139{
1140 rtvec argvec, constraints;
1141 rtx body;
1142 int ninputs = list_length (inputs);
1143 int noutputs = list_length (outputs);
1144 int ninout = 0;
1145 int nclobbers;
1146 tree tail;
1147 register int i;
1148 /* Vector of RTX's of evaluated output operands. */
1149 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1150 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1151 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1152 enum machine_mode *inout_mode
1153 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1154 /* The insn we have emitted. */
1155 rtx insn;
1156
1157 /* An ASM with no outputs needs to be treated as volatile, for now. */
1158 if (noutputs == 0)
1159 vol = 1;
1160
1161 if (current_function_check_memory_usage)
1162 {
1163 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1164 return;
1165 }
1166
1167 /* Count the number of meaningful clobbered registers, ignoring what
1168 we would ignore later. */
1169 nclobbers = 0;
1170 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1171 {
1172 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1173 i = decode_reg_name (regname);
1174 if (i >= 0 || i == -4)
1175 ++nclobbers;
1176 else if (i == -2)
1177 error ("unknown register name `%s' in `asm'", regname);
1178 }
1179
1180 last_expr_type = 0;
1181
1182 /* Check that the number of alternatives is constant across all
1183 operands. */
1184 if (outputs || inputs)
1185 {
1186 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1187 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1188 tree next = inputs;
1189
1190 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1191 {
1192 error ("too many alternatives in `asm'");
1193 return;
1194 }
1195
1196 tmp = outputs;
1197 while (tmp)
1198 {
1199 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1200 if (n_occurrences (',', constraint) != nalternatives)
1201 {
1202 error ("operand constraints for `asm' differ in number of alternatives");
1203 return;
1204 }
1205 if (TREE_CHAIN (tmp))
1206 tmp = TREE_CHAIN (tmp);
1207 else
1208 tmp = next, next = 0;
1209 }
1210 }
1211
1212 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1213 {
1214 tree val = TREE_VALUE (tail);
1215 tree type = TREE_TYPE (val);
1216 char *constraint;
1217 char *p;
1218 int c_len;
1219 int j;
1220 int is_inout = 0;
1221 int allows_reg = 0;
1222 int allows_mem = 0;
1223
1224 /* If there's an erroneous arg, emit no insn. */
1225 if (TREE_TYPE (val) == error_mark_node)
1226 return;
1227
1228 /* Make sure constraint has `=' and does not have `+'. Also, see
1229 if it allows any register. Be liberal on the latter test, since
1230 the worst that happens if we get it wrong is we issue an error
1231 message. */
1232
1233 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1234 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1235
1236 /* Allow the `=' or `+' to not be at the beginning of the string,
1237 since it wasn't explicitly documented that way, and there is a
1238 large body of code that puts it last. Swap the character to
1239 the front, so as not to uglify any place else. */
1240 switch (c_len)
1241 {
1242 default:
1243 if ((p = strchr (constraint, '=')) != NULL)
1244 break;
1245 if ((p = strchr (constraint, '+')) != NULL)
1246 break;
1247 case 0:
1248 error ("output operand constraint lacks `='");
1249 return;
1250 }
1251
1252 if (p != constraint)
1253 {
1254 j = *p;
1255 bcopy (constraint, constraint+1, p-constraint);
1256 *constraint = j;
1257
1258 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1259 }
1260
1261 is_inout = constraint[0] == '+';
1262 /* Replace '+' with '='. */
1263 constraint[0] = '=';
1264 /* Make sure we can specify the matching operand. */
1265 if (is_inout && i > 9)
1266 {
1267 error ("output operand constraint %d contains `+'", i);
1268 return;
1269 }
1270
1271 for (j = 1; j < c_len; j++)
1272 switch (constraint[j])
1273 {
1274 case '+':
1275 case '=':
1276 error ("operand constraint contains '+' or '=' at illegal position.");
1277 return;
1278
1279 case '%':
1280 if (i + 1 == ninputs + noutputs)
1281 {
1282 error ("`%%' constraint used with last operand");
1283 return;
1284 }
1285 break;
1286
1287 case '?': case '!': case '*': case '&':
1288 case 'E': case 'F': case 'G': case 'H':
1289 case 's': case 'i': case 'n':
1290 case 'I': case 'J': case 'K': case 'L': case 'M':
1291 case 'N': case 'O': case 'P': case ',':
1292#ifdef EXTRA_CONSTRAINT
1293 case 'Q': case 'R': case 'S': case 'T': case 'U':
1294#endif
1295 break;
1296
1297 case '0': case '1': case '2': case '3': case '4':
1298 case '5': case '6': case '7': case '8': case '9':
1299 error ("matching constraint not valid in output operand");
1300 break;
1301
1302 case 'V': case 'm': case 'o':
1303 allows_mem = 1;
1304 break;
1305
1306 case '<': case '>':
1307 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1308 excepting those that expand_call created. So match memory
1309 and hope. */
1310 allows_mem = 1;
1311 break;
1312
1313 case 'g': case 'X':
1314 allows_reg = 1;
1315 allows_mem = 1;
1316 break;
1317
1318 case 'p': case 'r':
1319 default:
1320 allows_reg = 1;
1321 break;
1322 }
1323
1324 /* If an output operand is not a decl or indirect ref and our constraint
1325 allows a register, make a temporary to act as an intermediate.
1326 Make the asm insn write into that, then our caller will copy it to
1327 the real output operand. Likewise for promoted variables. */
1328
1329 real_output_rtx[i] = NULL_RTX;
1330 if ((TREE_CODE (val) == INDIRECT_REF
1331 && allows_mem)
1332 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1333 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1334 && ! (GET_CODE (DECL_RTL (val)) == REG
1335 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1336 || ! allows_reg
1337 || is_inout)
1338 {
1339 if (! allows_reg)
1340 mark_addressable (TREE_VALUE (tail));
1341
1342 output_rtx[i]
1343 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1344 EXPAND_MEMORY_USE_WO);
1345
1346 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1347 error ("output number %d not directly addressable", i);
1348 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1349 {
1350 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1351 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1352 if (is_inout)
1353 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1354 }
1355 }
1356 else
1357 {
1358 output_rtx[i] = assign_temp (type, 0, 0, 1);
1359 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1360 }
1361
1362 if (is_inout)
1363 {
1364 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1365 inout_opnum[ninout++] = i;
1366 }
1367 }
1368
1369 ninputs += ninout;
1370 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1371 {
1372 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1373 return;
1374 }
1375
1376 /* Make vectors for the expression-rtx and constraint strings. */
1377
1378 argvec = rtvec_alloc (ninputs);
1379 constraints = rtvec_alloc (ninputs);
1380
1381 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1382 TREE_STRING_POINTER (string), "", 0, argvec,
1383 constraints, filename, line);
1384
1385 MEM_VOLATILE_P (body) = vol;
1386
1387 /* Eval the inputs and put them into ARGVEC.
1388 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1389
1390 i = 0;
1391 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1392 {
1393 int j;
1394 int allows_reg = 0, allows_mem = 0;
1395 char *constraint, *orig_constraint;
1396 int c_len;
1397 rtx op;
1398
1399 /* If there's an erroneous arg, emit no insn,
1400 because the ASM_INPUT would get VOIDmode
1401 and that could cause a crash in reload. */
1402 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1403 return;
1404
1405 /* ??? Can this happen, and does the error message make any sense? */
1406 if (TREE_PURPOSE (tail) == NULL_TREE)
1407 {
1408 error ("hard register `%s' listed as input operand to `asm'",
1409 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1410 return;
1411 }
1412
1413 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1414 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1415 orig_constraint = constraint;
1416
1417 /* Make sure constraint has neither `=', `+', nor '&'. */
1418
1419 for (j = 0; j < c_len; j++)
1420 switch (constraint[j])
1421 {
1422 case '+': case '=': case '&':
1423 if (constraint == orig_constraint)
1424 {
1425 error ("input operand constraint contains `%c'", constraint[j]);
1426 return;
1427 }
1428 break;
1429
1430 case '%':
1431 if (constraint == orig_constraint
1432 && i + 1 == ninputs - ninout)
1433 {
1434 error ("`%%' constraint used with last operand");
1435 return;
1436 }
1437 break;
1438
1439 case 'V': case 'm': case 'o':
1440 allows_mem = 1;
1441 break;
1442
1443 case '<': case '>':
1444 case '?': case '!': case '*':
1445 case 'E': case 'F': case 'G': case 'H': case 'X':
1446 case 's': case 'i': case 'n':
1447 case 'I': case 'J': case 'K': case 'L': case 'M':
1448 case 'N': case 'O': case 'P': case ',':
1449#ifdef EXTRA_CONSTRAINT
1450 case 'Q': case 'R': case 'S': case 'T': case 'U':
1451#endif
1452 break;
1453
1454 /* Whether or not a numeric constraint allows a register is
1455 decided by the matching constraint, and so there is no need
1456 to do anything special with them. We must handle them in
1457 the default case, so that we don't unnecessarily force
1458 operands to memory. */
1459 case '0': case '1': case '2': case '3': case '4':
1460 case '5': case '6': case '7': case '8': case '9':
1461 if (constraint[j] >= '0' + noutputs)
1462 {
1463 error
1464 ("matching constraint references invalid operand number");
1465 return;
1466 }
1467
1468 /* Try and find the real constraint for this dup. */
1469 if ((j == 0 && c_len == 1)
1470 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1471 {
1472 tree o = outputs;
1473 for (j = constraint[j] - '0'; j > 0; --j)
1474 o = TREE_CHAIN (o);
1475
1476 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1477 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1478 j = 0;
1479 break;
1480 }
1481
1482 /* ... fall through ... */
1483
1484 case 'p': case 'r':
1485 default:
1486 allows_reg = 1;
1487 break;
1488
1489 case 'g':
1490 allows_reg = 1;
1491 allows_mem = 1;
1492 break;
1493 }
1494
1495 if (! allows_reg && allows_mem)
1496 mark_addressable (TREE_VALUE (tail));
1497
1498 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1499
1500 if (asm_operand_ok (op, constraint) <= 0)
1501 {
1502 if (allows_reg)
1503 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1504 else if (!allows_mem)
1505 warning ("asm operand %d probably doesn't match constraints", i);
1506 else if (CONSTANT_P (op))
1507 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1508 op);
1509 else if (GET_CODE (op) == REG
1510 || GET_CODE (op) == SUBREG
1511 || GET_CODE (op) == CONCAT)
1512 {
1513 tree type = TREE_TYPE (TREE_VALUE (tail));
1514 rtx memloc = assign_temp (type, 1, 1, 1);
1515
1516 emit_move_insn (memloc, op);
1517 op = memloc;
1518 }
1519 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1520 /* We won't recognize volatile memory as available a
1521 memory_operand at this point. Ignore it. */
1522 ;
1523 else if (queued_subexp_p (op))
1524 ;
1525 else
1526 /* ??? Leave this only until we have experience with what
1527 happens in combine and elsewhere when constraints are
1528 not satisfied. */
1529 warning ("asm operand %d probably doesn't match constraints", i);
1530 }
1531 XVECEXP (body, 3, i) = op;
1532
1533 XVECEXP (body, 4, i) /* constraints */
1534 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1535 orig_constraint);
1536 i++;
1537 }
1538
1539 /* Protect all the operands from the queue,
1540 now that they have all been evaluated. */
1541
1542 for (i = 0; i < ninputs - ninout; i++)
1543 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1544
1545 for (i = 0; i < noutputs; i++)
1546 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1547
1548 /* For in-out operands, copy output rtx to input rtx. */
1549 for (i = 0; i < ninout; i++)
1550 {
1551 static char match[9+1][2]
1552 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1553 int j = inout_opnum[i];
1554
1555 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1556 = output_rtx[j];
1557 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1558 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1559 }
1560
1561 /* Now, for each output, construct an rtx
1562 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1563 ARGVEC CONSTRAINTS))
1564 If there is more than one, put them inside a PARALLEL. */
1565
1566 if (noutputs == 1 && nclobbers == 0)
1567 {
1568 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1569 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1570 }
1571 else if (noutputs == 0 && nclobbers == 0)
1572 {
1573 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1574 insn = emit_insn (body);
1575 }
1576 else
1577 {
1578 rtx obody = body;
1579 int num = noutputs;
1580 if (num == 0) num = 1;
1581 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1582
1583 /* For each output operand, store a SET. */
1584
1585 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1586 {
1587 XVECEXP (body, 0, i)
1588 = gen_rtx_SET (VOIDmode,
1589 output_rtx[i],
1590 gen_rtx_ASM_OPERANDS (VOIDmode,
1591 TREE_STRING_POINTER (string),
1592 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1593 i, argvec, constraints,
1594 filename, line));
1595 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1596 }
1597
1598 /* If there are no outputs (but there are some clobbers)
1599 store the bare ASM_OPERANDS into the PARALLEL. */
1600
1601 if (i == 0)
1602 XVECEXP (body, 0, i++) = obody;
1603
1604 /* Store (clobber REG) for each clobbered register specified. */
1605
1606 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1607 {
1608 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1609 int j = decode_reg_name (regname);
1610
1611 if (j < 0)
1612 {
1613 if (j == -3) /* `cc', which is not a register */
1614 continue;
1615
1616 if (j == -4) /* `memory', don't cache memory across asm */
1617 {
1618 XVECEXP (body, 0, i++)
1619 = gen_rtx_CLOBBER (VOIDmode,
1620 gen_rtx_MEM (BLKmode,
1621 gen_rtx_SCRATCH (VOIDmode)));
1622 continue;
1623 }
1624
1625 /* Ignore unknown register, error already signaled. */
1626 continue;
1627 }
1628
1629 /* Use QImode since that's guaranteed to clobber just one reg. */
1630 XVECEXP (body, 0, i++)
1631 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1632 }
1633
1634 insn = emit_insn (body);
1635 }
1636
1637 /* For any outputs that needed reloading into registers, spill them
1638 back to where they belong. */
1639 for (i = 0; i < noutputs; ++i)
1640 if (real_output_rtx[i])
1641 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1642
1643 free_temp_slots ();
1644}
1645\f
1646/* Generate RTL to evaluate the expression EXP
1647 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1648
1649void
1650expand_expr_stmt (exp)
1651 tree exp;
1652{
1653 /* If -W, warn about statements with no side effects,
1654 except for an explicit cast to void (e.g. for assert()), and
1655 except inside a ({...}) where they may be useful. */
1656 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1657 {
1658 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1659 && !(TREE_CODE (exp) == CONVERT_EXPR
1660 && TREE_TYPE (exp) == void_type_node))
1661 warning_with_file_and_line (emit_filename, emit_lineno,
1662 "statement with no effect");
1663 else if (warn_unused)
1664 warn_if_unused_value (exp);
1665 }
1666
1667 /* If EXP is of function type and we are expanding statements for
1668 value, convert it to pointer-to-function. */
1669 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1670 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1671
1672 last_expr_type = TREE_TYPE (exp);
1673 last_expr_value = expand_expr (exp,
1674 (expr_stmts_for_value
1675 ? NULL_RTX : const0_rtx),
1676 VOIDmode, 0);
1677
1678 /* If all we do is reference a volatile value in memory,
1679 copy it to a register to be sure it is actually touched. */
1680 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1681 && TREE_THIS_VOLATILE (exp))
1682 {
1683 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1684 ;
1685 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1686 copy_to_reg (last_expr_value);
1687 else
1688 {
1689 rtx lab = gen_label_rtx ();
1690
1691 /* Compare the value with itself to reference it. */
1692 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1693 expand_expr (TYPE_SIZE (last_expr_type),
1694 NULL_RTX, VOIDmode, 0),
1695 BLKmode, 0,
1696 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1697 lab);
1698 emit_label (lab);
1699 }
1700 }
1701
1702 /* If this expression is part of a ({...}) and is in memory, we may have
1703 to preserve temporaries. */
1704 preserve_temp_slots (last_expr_value);
1705
1706 /* Free any temporaries used to evaluate this expression. Any temporary
1707 used as a result of this expression will already have been preserved
1708 above. */
1709 free_temp_slots ();
1710
1711 emit_queue ();
1712}
1713
1714/* Warn if EXP contains any computations whose results are not used.
1715 Return 1 if a warning is printed; 0 otherwise. */
1716
1717int
1718warn_if_unused_value (exp)
1719 tree exp;
1720{
1721 if (TREE_USED (exp))
1722 return 0;
1723
1724 switch (TREE_CODE (exp))
1725 {
1726 case PREINCREMENT_EXPR:
1727 case POSTINCREMENT_EXPR:
1728 case PREDECREMENT_EXPR:
1729 case POSTDECREMENT_EXPR:
1730 case MODIFY_EXPR:
1731 case INIT_EXPR:
1732 case TARGET_EXPR:
1733 case CALL_EXPR:
1734 case METHOD_CALL_EXPR:
1735 case RTL_EXPR:
1736 case TRY_CATCH_EXPR:
1737 case WITH_CLEANUP_EXPR:
1738 case EXIT_EXPR:
1739 /* We don't warn about COND_EXPR because it may be a useful
1740 construct if either arm contains a side effect. */
1741 case COND_EXPR:
1742 return 0;
1743
1744 case BIND_EXPR:
1745 /* For a binding, warn if no side effect within it. */
1746 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1747
1748 case SAVE_EXPR:
1749 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1750
1751 case TRUTH_ORIF_EXPR:
1752 case TRUTH_ANDIF_EXPR:
1753 /* In && or ||, warn if 2nd operand has no side effect. */
1754 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1755
1756 case COMPOUND_EXPR:
1757 if (TREE_NO_UNUSED_WARNING (exp))
1758 return 0;
1759 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1760 return 1;
1761 /* Let people do `(foo (), 0)' without a warning. */
1762 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1763 return 0;
1764 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1765
1766 case NOP_EXPR:
1767 case CONVERT_EXPR:
1768 case NON_LVALUE_EXPR:
1769 /* Don't warn about values cast to void. */
1770 if (TREE_TYPE (exp) == void_type_node)
1771 return 0;
1772 /* Don't warn about conversions not explicit in the user's program. */
1773 if (TREE_NO_UNUSED_WARNING (exp))
1774 return 0;
1775 /* Assignment to a cast usually results in a cast of a modify.
1776 Don't complain about that. There can be an arbitrary number of
1777 casts before the modify, so we must loop until we find the first
1778 non-cast expression and then test to see if that is a modify. */
1779 {
1780 tree tem = TREE_OPERAND (exp, 0);
1781
1782 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1783 tem = TREE_OPERAND (tem, 0);
1784
1785 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1786 || TREE_CODE (tem) == CALL_EXPR)
1787 return 0;
1788 }
1789 goto warn;
1790
1791 case INDIRECT_REF:
1792 /* Don't warn about automatic dereferencing of references, since
1793 the user cannot control it. */
1794 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1795 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1796 /* ... fall through ... */
1797
1798 default:
1799 /* Referencing a volatile value is a side effect, so don't warn. */
1800 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1801 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1802 && TREE_THIS_VOLATILE (exp))
1803 return 0;
1804 warn:
1805 warning_with_file_and_line (emit_filename, emit_lineno,
1806 "value computed is not used");
1807 return 1;
1808 }
1809}
1810
1811/* Clear out the memory of the last expression evaluated. */
1812
1813void
1814clear_last_expr ()
1815{
1816 last_expr_type = 0;
1817}
1818
1819/* Begin a statement which will return a value.
1820 Return the RTL_EXPR for this statement expr.
1821 The caller must save that value and pass it to expand_end_stmt_expr. */
1822
1823tree
1824expand_start_stmt_expr ()
1825{
1826 int momentary;
1827 tree t;
1828
1829 /* Make the RTL_EXPR node temporary, not momentary,
1830 so that rtl_expr_chain doesn't become garbage. */
1831 momentary = suspend_momentary ();
1832 t = make_node (RTL_EXPR);
1833 resume_momentary (momentary);
1834 do_pending_stack_adjust ();
1835 start_sequence_for_rtl_expr (t);
1836 NO_DEFER_POP;
1837 expr_stmts_for_value++;
1838 return t;
1839}
1840
1841/* Restore the previous state at the end of a statement that returns a value.
1842 Returns a tree node representing the statement's value and the
1843 insns to compute the value.
1844
1845 The nodes of that expression have been freed by now, so we cannot use them.
1846 But we don't want to do that anyway; the expression has already been
1847 evaluated and now we just want to use the value. So generate a RTL_EXPR
1848 with the proper type and RTL value.
1849
1850 If the last substatement was not an expression,
1851 return something with type `void'. */
1852
1853tree
1854expand_end_stmt_expr (t)
1855 tree t;
1856{
1857 OK_DEFER_POP;
1858
1859 if (last_expr_type == 0)
1860 {
1861 last_expr_type = void_type_node;
1862 last_expr_value = const0_rtx;
1863 }
1864 else if (last_expr_value == 0)
1865 /* There are some cases where this can happen, such as when the
1866 statement is void type. */
1867 last_expr_value = const0_rtx;
1868 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1869 /* Remove any possible QUEUED. */
1870 last_expr_value = protect_from_queue (last_expr_value, 0);
1871
1872 emit_queue ();
1873
1874 TREE_TYPE (t) = last_expr_type;
1875 RTL_EXPR_RTL (t) = last_expr_value;
1876 RTL_EXPR_SEQUENCE (t) = get_insns ();
1877
1878 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1879
1880 end_sequence ();
1881
1882 /* Don't consider deleting this expr or containing exprs at tree level. */
1883 TREE_SIDE_EFFECTS (t) = 1;
1884 /* Propagate volatility of the actual RTL expr. */
1885 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1886
1887 last_expr_type = 0;
1888 expr_stmts_for_value--;
1889
1890 return t;
1891}
1892\f
1893/* Generate RTL for the start of an if-then. COND is the expression
1894 whose truth should be tested.
1895
1896 If EXITFLAG is nonzero, this conditional is visible to
1897 `exit_something'. */
1898
1899void
1900expand_start_cond (cond, exitflag)
1901 tree cond;
1902 int exitflag;
1903{
1904 struct nesting *thiscond = ALLOC_NESTING ();
1905
1906 /* Make an entry on cond_stack for the cond we are entering. */
1907
1908 thiscond->next = cond_stack;
1909 thiscond->all = nesting_stack;
1910 thiscond->depth = ++nesting_depth;
1911 thiscond->data.cond.next_label = gen_label_rtx ();
1912 /* Before we encounter an `else', we don't need a separate exit label
1913 unless there are supposed to be exit statements
1914 to exit this conditional. */
1915 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1916 thiscond->data.cond.endif_label = thiscond->exit_label;
1917 cond_stack = thiscond;
1918 nesting_stack = thiscond;
1919
1920 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1921}
1922
1923/* Generate RTL between then-clause and the elseif-clause
1924 of an if-then-elseif-.... */
1925
1926void
1927expand_start_elseif (cond)
1928 tree cond;
1929{
1930 if (cond_stack->data.cond.endif_label == 0)
1931 cond_stack->data.cond.endif_label = gen_label_rtx ();
1932 emit_jump (cond_stack->data.cond.endif_label);
1933 emit_label (cond_stack->data.cond.next_label);
1934 cond_stack->data.cond.next_label = gen_label_rtx ();
1935 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1936}
1937
1938/* Generate RTL between the then-clause and the else-clause
1939 of an if-then-else. */
1940
1941void
1942expand_start_else ()
1943{
1944 if (cond_stack->data.cond.endif_label == 0)
1945 cond_stack->data.cond.endif_label = gen_label_rtx ();
1946
1947 emit_jump (cond_stack->data.cond.endif_label);
1948 emit_label (cond_stack->data.cond.next_label);
1949 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1950}
1951
1952/* After calling expand_start_else, turn this "else" into an "else if"
1953 by providing another condition. */
1954
1955void
1956expand_elseif (cond)
1957 tree cond;
1958{
1959 cond_stack->data.cond.next_label = gen_label_rtx ();
1960 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1961}
1962
1963/* Generate RTL for the end of an if-then.
1964 Pop the record for it off of cond_stack. */
1965
1966void
1967expand_end_cond ()
1968{
1969 struct nesting *thiscond = cond_stack;
1970
1971 do_pending_stack_adjust ();
1972 if (thiscond->data.cond.next_label)
1973 emit_label (thiscond->data.cond.next_label);
1974 if (thiscond->data.cond.endif_label)
1975 emit_label (thiscond->data.cond.endif_label);
1976
1977 POPSTACK (cond_stack);
1978 last_expr_type = 0;
1979}
1980
1981
1982\f
1983/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1984 loop should be exited by `exit_something'. This is a loop for which
1985 `expand_continue' will jump to the top of the loop.
1986
1987 Make an entry on loop_stack to record the labels associated with
1988 this loop. */
1989
1990struct nesting *
1991expand_start_loop (exit_flag)
1992 int exit_flag;
1993{
1994 register struct nesting *thisloop = ALLOC_NESTING ();
1995
1996 /* Make an entry on loop_stack for the loop we are entering. */
1997
1998 thisloop->next = loop_stack;
1999 thisloop->all = nesting_stack;
2000 thisloop->depth = ++nesting_depth;
2001 thisloop->data.loop.start_label = gen_label_rtx ();
2002 thisloop->data.loop.end_label = gen_label_rtx ();
2003 thisloop->data.loop.alt_end_label = 0;
2004 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2005 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2006 loop_stack = thisloop;
2007 nesting_stack = thisloop;
2008
2009 do_pending_stack_adjust ();
2010 emit_queue ();
2011 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2012 emit_label (thisloop->data.loop.start_label);
2013
2014 return thisloop;
2015}
2016
2017/* Like expand_start_loop but for a loop where the continuation point
2018 (for expand_continue_loop) will be specified explicitly. */
2019
2020struct nesting *
2021expand_start_loop_continue_elsewhere (exit_flag)
2022 int exit_flag;
2023{
2024 struct nesting *thisloop = expand_start_loop (exit_flag);
2025 loop_stack->data.loop.continue_label = gen_label_rtx ();
2026 return thisloop;
2027}
2028
2029/* Specify the continuation point for a loop started with
2030 expand_start_loop_continue_elsewhere.
2031 Use this at the point in the code to which a continue statement
2032 should jump. */
2033
2034void
2035expand_loop_continue_here ()
2036{
2037 do_pending_stack_adjust ();
2038 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2039 emit_label (loop_stack->data.loop.continue_label);
2040}
2041
2042/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2043 Pop the block off of loop_stack. */
2044
2045void
2046expand_end_loop ()
2047{
2048 rtx start_label = loop_stack->data.loop.start_label;
2049 rtx insn = get_last_insn ();
2050 int needs_end_jump = 1;
2051
2052 /* Mark the continue-point at the top of the loop if none elsewhere. */
2053 if (start_label == loop_stack->data.loop.continue_label)
2054 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2055
2056 do_pending_stack_adjust ();
2057
2058 /* If optimizing, perhaps reorder the loop.
2059 First, try to use a condjump near the end.
2060 expand_exit_loop_if_false ends loops with unconditional jumps,
2061 like this:
2062
2063 if (test) goto label;
2064 optional: cleanup
2065 goto loop_stack->data.loop.end_label
2066 barrier
2067 label:
2068
2069 If we find such a pattern, we can end the loop earlier. */
2070
2071 if (optimize
2072 && GET_CODE (insn) == CODE_LABEL
2073 && LABEL_NAME (insn) == NULL
2074 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2075 {
2076 rtx label = insn;
2077 rtx jump = PREV_INSN (PREV_INSN (label));
2078
2079 if (GET_CODE (jump) == JUMP_INSN
2080 && GET_CODE (PATTERN (jump)) == SET
2081 && SET_DEST (PATTERN (jump)) == pc_rtx
2082 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2083 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2084 == loop_stack->data.loop.end_label))
2085 {
2086 rtx prev;
2087
2088 /* The test might be complex and reference LABEL multiple times,
2089 like the loop in loop_iterations to set vtop. To handle this,
2090 we move LABEL. */
2091 insn = PREV_INSN (label);
2092 reorder_insns (label, label, start_label);
2093
2094 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2095 {
2096 /* We ignore line number notes, but if we see any other note,
2097 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2098 NOTE_INSN_LOOP_*, we disable this optimization. */
2099 if (GET_CODE (prev) == NOTE)
2100 {
2101 if (NOTE_LINE_NUMBER (prev) < 0)
2102 break;
2103 continue;
2104 }
2105 if (GET_CODE (prev) == CODE_LABEL)
2106 break;
2107 if (GET_CODE (prev) == JUMP_INSN)
2108 {
2109 if (GET_CODE (PATTERN (prev)) == SET
2110 && SET_DEST (PATTERN (prev)) == pc_rtx
2111 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2112 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2113 == LABEL_REF)
2114 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2115 {
2116 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2117 = start_label;
2118 emit_note_after (NOTE_INSN_LOOP_END, prev);
2119 needs_end_jump = 0;
2120 }
2121 break;
2122 }
2123 }
2124 }
2125 }
2126
2127 /* If the loop starts with a loop exit, roll that to the end where
2128 it will optimize together with the jump back.
2129
2130 We look for the conditional branch to the exit, except that once
2131 we find such a branch, we don't look past 30 instructions.
2132
2133 In more detail, if the loop presently looks like this (in pseudo-C):
2134
2135 start_label:
2136 if (test) goto end_label;
2137 body;
2138 goto start_label;
2139 end_label:
2140
2141 transform it to look like:
2142
2143 goto start_label;
2144 newstart_label:
2145 body;
2146 start_label:
2147 if (test) goto end_label;
2148 goto newstart_label;
2149 end_label:
2150
2151 Here, the `test' may actually consist of some reasonably complex
2152 code, terminating in a test. */
2153
2154 if (optimize
2155 && needs_end_jump
2156 &&
2157 ! (GET_CODE (insn) == JUMP_INSN
2158 && GET_CODE (PATTERN (insn)) == SET
2159 && SET_DEST (PATTERN (insn)) == pc_rtx
2160 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2161 {
2162 int eh_regions = 0;
2163 int num_insns = 0;
2164 rtx last_test_insn = NULL_RTX;
2165
2166 /* Scan insns from the top of the loop looking for a qualified
2167 conditional exit. */
2168 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2169 insn = NEXT_INSN (insn))
2170 {
2171 if (GET_CODE (insn) == NOTE)
2172 {
2173 if (optimize < 2
2174 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2175 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2176 /* The code that actually moves the exit test will
2177 carefully leave BLOCK notes in their original
2178 location. That means, however, that we can't debug
2179 the exit test itself. So, we refuse to move code
2180 containing BLOCK notes at low optimization levels. */
2181 break;
2182
2183 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2184 ++eh_regions;
2185 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2186 {
2187 --eh_regions;
2188 if (eh_regions < 0)
2189 /* We've come to the end of an EH region, but
2190 never saw the beginning of that region. That
2191 means that an EH region begins before the top
2192 of the loop, and ends in the middle of it. The
2193 existence of such a situation violates a basic
2194 assumption in this code, since that would imply
2195 that even when EH_REGIONS is zero, we might
2196 move code out of an exception region. */
2197 abort ();
2198 }
2199
2200 /* We must not walk into a nested loop. */
2201 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2202 break;
2203
2204 /* We already know this INSN is a NOTE, so there's no
2205 point in looking at it to see if it's a JUMP. */
2206 continue;
2207 }
2208
2209 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2210 num_insns++;
2211
2212 if (last_test_insn && num_insns > 30)
2213 break;
2214
2215 if (eh_regions > 0)
2216 /* We don't want to move a partial EH region. Consider:
2217
2218 while ( ( { try {
2219 if (cond ()) 0;
2220 else {
2221 bar();
2222 1;
2223 }
2224 } catch (...) {
2225 1;
2226 } )) {
2227 body;
2228 }
2229
2230 This isn't legal C++, but here's what it's supposed to
2231 mean: if cond() is true, stop looping. Otherwise,
2232 call bar, and keep looping. In addition, if cond
2233 throws an exception, catch it and keep looping. Such
2234 constructs are certainy legal in LISP.
2235
2236 We should not move the `if (cond()) 0' test since then
2237 the EH-region for the try-block would be broken up.
2238 (In this case we would the EH_BEG note for the `try'
2239 and `if cond()' but not the call to bar() or the
2240 EH_END note.)
2241
2242 So we don't look for tests within an EH region. */
2243 continue;
2244
2245 if (GET_CODE (insn) == JUMP_INSN
2246 && GET_CODE (PATTERN (insn)) == SET
2247 && SET_DEST (PATTERN (insn)) == pc_rtx)
2248 {
2249 /* This is indeed a jump. */
2250 rtx dest1 = NULL_RTX;
2251 rtx dest2 = NULL_RTX;
2252 rtx potential_last_test;
2253 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2254 {
2255 /* A conditional jump. */
2256 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2257 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2258 potential_last_test = insn;
2259 }
2260 else
2261 {
2262 /* An unconditional jump. */
2263 dest1 = SET_SRC (PATTERN (insn));
2264 /* Include the BARRIER after the JUMP. */
2265 potential_last_test = NEXT_INSN (insn);
2266 }
2267
2268 do {
2269 if (dest1 && GET_CODE (dest1) == LABEL_REF
2270 && ((XEXP (dest1, 0)
2271 == loop_stack->data.loop.alt_end_label)
2272 || (XEXP (dest1, 0)
2273 == loop_stack->data.loop.end_label)))
2274 {
2275 last_test_insn = potential_last_test;
2276 break;
2277 }
2278
2279 /* If this was a conditional jump, there may be
2280 another label at which we should look. */
2281 dest1 = dest2;
2282 dest2 = NULL_RTX;
2283 } while (dest1);
2284 }
2285 }
2286
2287 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2288 {
2289 /* We found one. Move everything from there up
2290 to the end of the loop, and add a jump into the loop
2291 to jump to there. */
2292 register rtx newstart_label = gen_label_rtx ();
2293 register rtx start_move = start_label;
2294 rtx next_insn;
2295
2296 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2297 then we want to move this note also. */
2298 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2299 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2300 == NOTE_INSN_LOOP_CONT))
2301 start_move = PREV_INSN (start_move);
2302
2303 emit_label_after (newstart_label, PREV_INSN (start_move));
2304
2305 /* Actually move the insns. Start at the beginning, and
2306 keep copying insns until we've copied the
2307 last_test_insn. */
2308 for (insn = start_move; insn; insn = next_insn)
2309 {
2310 /* Figure out which insn comes after this one. We have
2311 to do this before we move INSN. */
2312 if (insn == last_test_insn)
2313 /* We've moved all the insns. */
2314 next_insn = NULL_RTX;
2315 else
2316 next_insn = NEXT_INSN (insn);
2317
2318 if (GET_CODE (insn) == NOTE
2319 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2320 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2321 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2322 NOTE_INSN_BLOCK_ENDs because the correct generation
2323 of debugging information depends on these appearing
2324 in the same order in the RTL and in the tree
2325 structure, where they are represented as BLOCKs.
2326 So, we don't move block notes. Of course, moving
2327 the code inside the block is likely to make it
2328 impossible to debug the instructions in the exit
2329 test, but such is the price of optimization. */
2330 continue;
2331
2332 /* Move the INSN. */
2333 reorder_insns (insn, insn, get_last_insn ());
2334 }
2335
2336 emit_jump_insn_after (gen_jump (start_label),
2337 PREV_INSN (newstart_label));
2338 emit_barrier_after (PREV_INSN (newstart_label));
2339 start_label = newstart_label;
2340 }
2341 }
2342
2343 if (needs_end_jump)
2344 {
2345 emit_jump (start_label);
2346 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2347 }
2348 emit_label (loop_stack->data.loop.end_label);
2349
2350 POPSTACK (loop_stack);
2351
2352 last_expr_type = 0;
2353}
2354
2355/* Generate a jump to the current loop's continue-point.
2356 This is usually the top of the loop, but may be specified
2357 explicitly elsewhere. If not currently inside a loop,
2358 return 0 and do nothing; caller will print an error message. */
2359
2360int
2361expand_continue_loop (whichloop)
2362 struct nesting *whichloop;
2363{
2364 last_expr_type = 0;
2365 if (whichloop == 0)
2366 whichloop = loop_stack;
2367 if (whichloop == 0)
2368 return 0;
2369 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2370 NULL_RTX);
2371 return 1;
2372}
2373
2374/* Generate a jump to exit the current loop. If not currently inside a loop,
2375 return 0 and do nothing; caller will print an error message. */
2376
2377int
2378expand_exit_loop (whichloop)
2379 struct nesting *whichloop;
2380{
2381 last_expr_type = 0;
2382 if (whichloop == 0)
2383 whichloop = loop_stack;
2384 if (whichloop == 0)
2385 return 0;
2386 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2387 return 1;
2388}
2389
2390/* Generate a conditional jump to exit the current loop if COND
2391 evaluates to zero. If not currently inside a loop,
2392 return 0 and do nothing; caller will print an error message. */
2393
2394int
2395expand_exit_loop_if_false (whichloop, cond)
2396 struct nesting *whichloop;
2397 tree cond;
2398{
2399 rtx label = gen_label_rtx ();
2400 rtx last_insn;
2401 last_expr_type = 0;
2402
2403 if (whichloop == 0)
2404 whichloop = loop_stack;
2405 if (whichloop == 0)
2406 return 0;
2407 /* In order to handle fixups, we actually create a conditional jump
2408 around a unconditional branch to exit the loop. If fixups are
2409 necessary, they go before the unconditional branch. */
2410
2411
2412 do_jump (cond, NULL_RTX, label);
2413 last_insn = get_last_insn ();
2414 if (GET_CODE (last_insn) == CODE_LABEL)
2415 whichloop->data.loop.alt_end_label = last_insn;
2416 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2417 NULL_RTX);
2418 emit_label (label);
2419
2420 return 1;
2421}
2422
2423/* Return nonzero if the loop nest is empty. Else return zero. */
2424
2425int
2426stmt_loop_nest_empty ()
2427{
2428 return (loop_stack == NULL);
2429}
2430
2431/* Return non-zero if we should preserve sub-expressions as separate
2432 pseudos. We never do so if we aren't optimizing. We always do so
2433 if -fexpensive-optimizations.
2434
2435 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2436 the loop may still be a small one. */
2437
2438int
2439preserve_subexpressions_p ()
2440{
2441 rtx insn;
2442
2443 if (flag_expensive_optimizations)
2444 return 1;
2445
2446 if (optimize == 0 || loop_stack == 0)
2447 return 0;
2448
2449 insn = get_last_insn_anywhere ();
2450
2451 return (insn
2452 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2453 < n_non_fixed_regs * 3));
2454
2455}
2456
2457/* Generate a jump to exit the current loop, conditional, binding contour
2458 or case statement. Not all such constructs are visible to this function,
2459 only those started with EXIT_FLAG nonzero. Individual languages use
2460 the EXIT_FLAG parameter to control which kinds of constructs you can
2461 exit this way.
2462
2463 If not currently inside anything that can be exited,
2464 return 0 and do nothing; caller will print an error message. */
2465
2466int
2467expand_exit_something ()
2468{
2469 struct nesting *n;
2470 last_expr_type = 0;
2471 for (n = nesting_stack; n; n = n->all)
2472 if (n->exit_label != 0)
2473 {
2474 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2475 return 1;
2476 }
2477
2478 return 0;
2479}
2480\f
2481/* Generate RTL to return from the current function, with no value.
2482 (That is, we do not do anything about returning any value.) */
2483
2484void
2485expand_null_return ()
2486{
2487 struct nesting *block = block_stack;
2488 rtx last_insn = 0;
2489
2490 /* Does any pending block have cleanups? */
2491
2492 while (block && block->data.block.cleanups == 0)
2493 block = block->next;
2494
2495 /* If yes, use a goto to return, since that runs cleanups. */
2496
2497 expand_null_return_1 (last_insn, block != 0);
2498}
2499
2500/* Generate RTL to return from the current function, with value VAL. */
2501
2502static void
2503expand_value_return (val)
2504 rtx val;
2505{
2506 struct nesting *block = block_stack;
2507 rtx last_insn = get_last_insn ();
2508 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2509
2510 /* Copy the value to the return location
2511 unless it's already there. */
2512
2513 if (return_reg != val)
2514 {
2515#ifdef PROMOTE_FUNCTION_RETURN
2516 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2517 int unsignedp = TREE_UNSIGNED (type);
2518 enum machine_mode old_mode
2519 = DECL_MODE (DECL_RESULT (current_function_decl));
2520 enum machine_mode mode
2521 = promote_mode (type, old_mode, &unsignedp, 1);
2522
2523 if (mode != old_mode)
2524 val = convert_modes (mode, old_mode, val, unsignedp);
2525#endif
2526 emit_move_insn (return_reg, val);
2527 }
2528 if (GET_CODE (return_reg) == REG
2529 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2530 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2531 /* Handle calls that return values in multiple non-contiguous locations.
2532 The Irix 6 ABI has examples of this. */
2533 else if (GET_CODE (return_reg) == PARALLEL)
2534 {
2535 int i;
2536
2537 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2538 {
2539 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2540
2541 if (GET_CODE (x) == REG
2542 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2543 emit_insn (gen_rtx_USE (VOIDmode, x));
2544 }
2545 }
2546
2547 /* Does any pending block have cleanups? */
2548
2549 while (block && block->data.block.cleanups == 0)
2550 block = block->next;
2551
2552 /* If yes, use a goto to return, since that runs cleanups.
2553 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2554
2555 expand_null_return_1 (last_insn, block != 0);
2556}
2557
2558/* Output a return with no value. If LAST_INSN is nonzero,
2559 pretend that the return takes place after LAST_INSN.
2560 If USE_GOTO is nonzero then don't use a return instruction;
2561 go to the return label instead. This causes any cleanups
2562 of pending blocks to be executed normally. */
2563
2564static void
2565expand_null_return_1 (last_insn, use_goto)
2566 rtx last_insn;
2567 int use_goto;
2568{
2569 rtx end_label = cleanup_label ? cleanup_label : return_label;
2570
2571 clear_pending_stack_adjust ();
2572 do_pending_stack_adjust ();
2573 last_expr_type = 0;
2574
2575 /* PCC-struct return always uses an epilogue. */
2576 if (current_function_returns_pcc_struct || use_goto)
2577 {
2578 if (end_label == 0)
2579 end_label = return_label = gen_label_rtx ();
2580 expand_goto_internal (NULL_TREE, end_label, last_insn);
2581 return;
2582 }
2583
2584 /* Otherwise output a simple return-insn if one is available,
2585 unless it won't do the job. */
2586#ifdef HAVE_return
2587 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2588 {
2589 emit_jump_insn (gen_return ());
2590 emit_barrier ();
2591 return;
2592 }
2593#endif
2594
2595 /* Otherwise jump to the epilogue. */
2596 expand_goto_internal (NULL_TREE, end_label, last_insn);
2597}
2598\f
2599/* Generate RTL to evaluate the expression RETVAL and return it
2600 from the current function. */
2601
2602void
2603expand_return (retval)
2604 tree retval;
2605{
2606 /* If there are any cleanups to be performed, then they will
2607 be inserted following LAST_INSN. It is desirable
2608 that the last_insn, for such purposes, should be the
2609 last insn before computing the return value. Otherwise, cleanups
2610 which call functions can clobber the return value. */
2611 /* ??? rms: I think that is erroneous, because in C++ it would
2612 run destructors on variables that might be used in the subsequent
2613 computation of the return value. */
2614 rtx last_insn = 0;
2615 register rtx val = 0;
2616 register rtx op0;
2617 tree retval_rhs;
2618 int cleanups;
2619
2620 /* If function wants no value, give it none. */
2621 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2622 {
2623 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2624 emit_queue ();
2625 expand_null_return ();
2626 return;
2627 }
2628
2629 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2630 /* This is not sufficient. We also need to watch for cleanups of the
2631 expression we are about to expand. Unfortunately, we cannot know
2632 if it has cleanups until we expand it, and we want to change how we
2633 expand it depending upon if we need cleanups. We can't win. */
2634#if 0
2635 cleanups = any_pending_cleanups (1);
2636#else
2637 cleanups = 1;
2638#endif
2639
2640 if (TREE_CODE (retval) == RESULT_DECL)
2641 retval_rhs = retval;
2642 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2643 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2644 retval_rhs = TREE_OPERAND (retval, 1);
2645 else if (TREE_TYPE (retval) == void_type_node)
2646 /* Recognize tail-recursive call to void function. */
2647 retval_rhs = retval;
2648 else
2649 retval_rhs = NULL_TREE;
2650
2651 /* Only use `last_insn' if there are cleanups which must be run. */
2652 if (cleanups || cleanup_label != 0)
2653 last_insn = get_last_insn ();
2654
2655 /* Distribute return down conditional expr if either of the sides
2656 may involve tail recursion (see test below). This enhances the number
2657 of tail recursions we see. Don't do this always since it can produce
2658 sub-optimal code in some cases and we distribute assignments into
2659 conditional expressions when it would help. */
2660
2661 if (optimize && retval_rhs != 0
2662 && frame_offset == 0
2663 && TREE_CODE (retval_rhs) == COND_EXPR
2664 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2665 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2666 {
2667 rtx label = gen_label_rtx ();
2668 tree expr;
2669
2670 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2671 start_cleanup_deferral ();
2672 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2673 DECL_RESULT (current_function_decl),
2674 TREE_OPERAND (retval_rhs, 1));
2675 TREE_SIDE_EFFECTS (expr) = 1;
2676 expand_return (expr);
2677 emit_label (label);
2678
2679 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2680 DECL_RESULT (current_function_decl),
2681 TREE_OPERAND (retval_rhs, 2));
2682 TREE_SIDE_EFFECTS (expr) = 1;
2683 expand_return (expr);
2684 end_cleanup_deferral ();
2685 return;
2686 }
2687
2688 /* Attempt to optimize the call if it is tail recursive. */
2689 if (optimize_tail_recursion (retval_rhs, last_insn))
2690 return;
2691
2692#ifdef HAVE_return
2693 /* This optimization is safe if there are local cleanups
2694 because expand_null_return takes care of them.
2695 ??? I think it should also be safe when there is a cleanup label,
2696 because expand_null_return takes care of them, too.
2697 Any reason why not? */
2698 if (HAVE_return && cleanup_label == 0
2699 && ! current_function_returns_pcc_struct
2700 && BRANCH_COST <= 1)
2701 {
2702 /* If this is return x == y; then generate
2703 if (x == y) return 1; else return 0;
2704 if we can do it with explicit return insns and branches are cheap,
2705 but not if we have the corresponding scc insn. */
2706 int has_scc = 0;
2707 if (retval_rhs)
2708 switch (TREE_CODE (retval_rhs))
2709 {
2710 case EQ_EXPR:
2711#ifdef HAVE_seq
2712 has_scc = HAVE_seq;
2713#endif
2714 case NE_EXPR:
2715#ifdef HAVE_sne
2716 has_scc = HAVE_sne;
2717#endif
2718 case GT_EXPR:
2719#ifdef HAVE_sgt
2720 has_scc = HAVE_sgt;
2721#endif
2722 case GE_EXPR:
2723#ifdef HAVE_sge
2724 has_scc = HAVE_sge;
2725#endif
2726 case LT_EXPR:
2727#ifdef HAVE_slt
2728 has_scc = HAVE_slt;
2729#endif
2730 case LE_EXPR:
2731#ifdef HAVE_sle
2732 has_scc = HAVE_sle;
2733#endif
2734 case TRUTH_ANDIF_EXPR:
2735 case TRUTH_ORIF_EXPR:
2736 case TRUTH_AND_EXPR:
2737 case TRUTH_OR_EXPR:
2738 case TRUTH_NOT_EXPR:
2739 case TRUTH_XOR_EXPR:
2740 if (! has_scc)
2741 {
2742 op0 = gen_label_rtx ();
2743 jumpifnot (retval_rhs, op0);
2744 expand_value_return (const1_rtx);
2745 emit_label (op0);
2746 expand_value_return (const0_rtx);
2747 return;
2748 }
2749 break;
2750
2751 default:
2752 break;
2753 }
2754 }
2755#endif /* HAVE_return */
2756
2757 /* If the result is an aggregate that is being returned in one (or more)
2758 registers, load the registers here. The compiler currently can't handle
2759 copying a BLKmode value into registers. We could put this code in a
2760 more general area (for use by everyone instead of just function
2761 call/return), but until this feature is generally usable it is kept here
2762 (and in expand_call). The value must go into a pseudo in case there
2763 are cleanups that will clobber the real return register. */
2764
2765 if (retval_rhs != 0
2766 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2767 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2768 {
2769 int i, bitpos, xbitpos;
2770 int big_endian_correction = 0;
2771 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2772 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2773 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2774 (unsigned int)BITS_PER_WORD);
2775 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2776 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2777 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2778 enum machine_mode tmpmode, result_reg_mode;
2779
2780 /* Structures whose size is not a multiple of a word are aligned
2781 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2782 machine, this means we must skip the empty high order bytes when
2783 calculating the bit offset. */
2784 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2785 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2786 * BITS_PER_UNIT));
2787
2788 /* Copy the structure BITSIZE bits at a time. */
2789 for (bitpos = 0, xbitpos = big_endian_correction;
2790 bitpos < bytes * BITS_PER_UNIT;
2791 bitpos += bitsize, xbitpos += bitsize)
2792 {
2793 /* We need a new destination pseudo each time xbitpos is
2794 on a word boundary and when xbitpos == big_endian_correction
2795 (the first time through). */
2796 if (xbitpos % BITS_PER_WORD == 0
2797 || xbitpos == big_endian_correction)
2798 {
2799 /* Generate an appropriate register. */
2800 dst = gen_reg_rtx (word_mode);
2801 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2802
2803 /* Clobber the destination before we move anything into it. */
2804 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2805 }
2806
2807 /* We need a new source operand each time bitpos is on a word
2808 boundary. */
2809 if (bitpos % BITS_PER_WORD == 0)
2810 src = operand_subword_force (result_val,
2811 bitpos / BITS_PER_WORD,
2812 BLKmode);
2813
2814 /* Use bitpos for the source extraction (left justified) and
2815 xbitpos for the destination store (right justified). */
2816 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2817 extract_bit_field (src, bitsize,
2818 bitpos % BITS_PER_WORD, 1,
2819 NULL_RTX, word_mode,
2820 word_mode,
2821 bitsize / BITS_PER_UNIT,
2822 BITS_PER_WORD),
2823 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2824 }
2825
2826 /* Find the smallest integer mode large enough to hold the
2827 entire structure and use that mode instead of BLKmode
2828 on the USE insn for the return register. */
2829 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2830 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2831 tmpmode != MAX_MACHINE_MODE;
2832 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2833 {
2834 /* Have we found a large enough mode? */
2835 if (GET_MODE_SIZE (tmpmode) >= bytes)
2836 break;
2837 }
2838
2839 /* No suitable mode found. */
2840 if (tmpmode == MAX_MACHINE_MODE)
2841 abort ();
2842
2843 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2844
2845 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2846 result_reg_mode = word_mode;
2847 else
2848 result_reg_mode = tmpmode;
2849 result_reg = gen_reg_rtx (result_reg_mode);
2850
2851 emit_queue ();
2852 for (i = 0; i < n_regs; i++)
2853 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2854 result_pseudos[i]);
2855
2856 if (tmpmode != result_reg_mode)
2857 result_reg = gen_lowpart (tmpmode, result_reg);
2858
2859 expand_value_return (result_reg);
2860 }
2861 else if (cleanups
2862 && retval_rhs != 0
2863 && TREE_TYPE (retval_rhs) != void_type_node
2864 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2865 {
2866 /* Calculate the return value into a pseudo reg. */
2867 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2868 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2869 val = force_not_mem (val);
2870 emit_queue ();
2871 /* Return the calculated value, doing cleanups first. */
2872 expand_value_return (val);
2873 }
2874 else
2875 {
2876 /* No cleanups or no hard reg used;
2877 calculate value into hard return reg. */
2878 expand_expr (retval, const0_rtx, VOIDmode, 0);
2879 emit_queue ();
2880 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2881 }
2882}
2883
2884/* Return 1 if the end of the generated RTX is not a barrier.
2885 This means code already compiled can drop through. */
2886
2887int
2888drop_through_at_end_p ()
2889{
2890 rtx insn = get_last_insn ();
2891 while (insn && GET_CODE (insn) == NOTE)
2892 insn = PREV_INSN (insn);
2893 return insn && GET_CODE (insn) != BARRIER;
2894}
2895\f
2896/* Test CALL_EXPR to determine if it is a potential tail recursion call
2897 and emit code to optimize the tail recursion. LAST_INSN indicates where
2898 to place the jump to the tail recursion label. Return TRUE if the
2899 call was optimized into a goto.
2900
2901 This is only used by expand_return, but expand_call is expected to
2902 use it soon. */
2903
2904int
2905optimize_tail_recursion (call_expr, last_insn)
2906 tree call_expr;
2907 rtx last_insn;
2908{
2909 /* For tail-recursive call to current function,
2910 just jump back to the beginning.
2911 It's unsafe if any auto variable in this function
2912 has its address taken; for simplicity,
2913 require stack frame to be empty. */
2914 if (optimize && call_expr != 0
2915 && frame_offset == 0
2916 && TREE_CODE (call_expr) == CALL_EXPR
2917 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2918 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2919 /* Finish checking validity, and if valid emit code
2920 to set the argument variables for the new call. */
2921 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2922 DECL_ARGUMENTS (current_function_decl)))
2923 {
2924 if (tail_recursion_label == 0)
2925 {
2926 tail_recursion_label = gen_label_rtx ();
2927 emit_label_after (tail_recursion_label,
2928 tail_recursion_reentry);
2929 }
2930 emit_queue ();
2931 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2932 emit_barrier ();
2933 return 1;
2934 }
2935
2936 return 0;
2937}
2938
2939/* Emit code to alter this function's formal parms for a tail-recursive call.
2940 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2941 FORMALS is the chain of decls of formals.
2942 Return 1 if this can be done;
2943 otherwise return 0 and do not emit any code. */
2944
2945static int
2946tail_recursion_args (actuals, formals)
2947 tree actuals, formals;
2948{
2949 register tree a = actuals, f = formals;
2950 register int i;
2951 register rtx *argvec;
2952
2953 /* Check that number and types of actuals are compatible
2954 with the formals. This is not always true in valid C code.
2955 Also check that no formal needs to be addressable
2956 and that all formals are scalars. */
2957
2958 /* Also count the args. */
2959
2960 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2961 {
2962 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2963 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2964 return 0;
2965 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2966 return 0;
2967 }
2968 if (a != 0 || f != 0)
2969 return 0;
2970
2971 /* Compute all the actuals. */
2972
2973 argvec = (rtx *) alloca (i * sizeof (rtx));
2974
2975 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2976 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2977
2978 /* Find which actual values refer to current values of previous formals.
2979 Copy each of them now, before any formal is changed. */
2980
2981 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2982 {
2983 int copy = 0;
2984 register int j;
2985 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2986 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2987 { copy = 1; break; }
2988 if (copy)
2989 argvec[i] = copy_to_reg (argvec[i]);
2990 }
2991
2992 /* Store the values of the actuals into the formals. */
2993
2994 for (f = formals, a = actuals, i = 0; f;
2995 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2996 {
2997 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2998 emit_move_insn (DECL_RTL (f), argvec[i]);
2999 else
3000 convert_move (DECL_RTL (f), argvec[i],
3001 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3002 }
3003
3004 free_temp_slots ();
3005 return 1;
3006}
3007\f
3008/* Generate the RTL code for entering a binding contour.
3009 The variables are declared one by one, by calls to `expand_decl'.
3010
3011 EXIT_FLAG is nonzero if this construct should be visible to
3012 `exit_something'. */
3013
3014void
3015expand_start_bindings (exit_flag)
3016 int exit_flag;
3017{
3018 struct nesting *thisblock = ALLOC_NESTING ();
3019 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3020
3021 /* Make an entry on block_stack for the block we are entering. */
3022
3023 thisblock->next = block_stack;
3024 thisblock->all = nesting_stack;
3025 thisblock->depth = ++nesting_depth;
3026 thisblock->data.block.stack_level = 0;
3027 thisblock->data.block.cleanups = 0;
3028 thisblock->data.block.function_call_count = 0;
3029 thisblock->data.block.exception_region = 0;
3030 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
3031
3032 thisblock->data.block.conditional_code = 0;
3033 thisblock->data.block.last_unconditional_cleanup = note;
3034 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3035
3036 if (block_stack
3037 && !(block_stack->data.block.cleanups == NULL_TREE
3038 && block_stack->data.block.outer_cleanups == NULL_TREE))
3039 thisblock->data.block.outer_cleanups
3040 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3041 block_stack->data.block.outer_cleanups);
3042 else
3043 thisblock->data.block.outer_cleanups = 0;
3044 thisblock->data.block.label_chain = 0;
3045 thisblock->data.block.innermost_stack_block = stack_block_stack;
3046 thisblock->data.block.first_insn = note;
3047 thisblock->data.block.block_start_count = ++block_start_count;
3048 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3049 block_stack = thisblock;
3050 nesting_stack = thisblock;
3051
3052 /* Make a new level for allocating stack slots. */
3053 push_temp_slots ();
3054}
3055
3056/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3057 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3058 expand_expr are made. After we end the region, we know that all
3059 space for all temporaries that were created by TARGET_EXPRs will be
3060 destroyed and their space freed for reuse. */
3061
3062void
3063expand_start_target_temps ()
3064{
3065 /* This is so that even if the result is preserved, the space
3066 allocated will be freed, as we know that it is no longer in use. */
3067 push_temp_slots ();
3068
3069 /* Start a new binding layer that will keep track of all cleanup
3070 actions to be performed. */
3071 expand_start_bindings (0);
3072
3073 target_temp_slot_level = temp_slot_level;
3074}
3075
3076void
3077expand_end_target_temps ()
3078{
3079 expand_end_bindings (NULL_TREE, 0, 0);
3080
3081 /* This is so that even if the result is preserved, the space
3082 allocated will be freed, as we know that it is no longer in use. */
3083 pop_temp_slots ();
3084}
3085
3086/* Mark top block of block_stack as an implicit binding for an
3087 exception region. This is used to prevent infinite recursion when
3088 ending a binding with expand_end_bindings. It is only ever called
3089 by expand_eh_region_start, as that it the only way to create a
3090 block stack for a exception region. */
3091
3092void
3093mark_block_as_eh_region ()
3094{
3095 block_stack->data.block.exception_region = 1;
3096 if (block_stack->next
3097 && block_stack->next->data.block.conditional_code)
3098 {
3099 block_stack->data.block.conditional_code
3100 = block_stack->next->data.block.conditional_code;
3101 block_stack->data.block.last_unconditional_cleanup
3102 = block_stack->next->data.block.last_unconditional_cleanup;
3103 block_stack->data.block.cleanup_ptr
3104 = block_stack->next->data.block.cleanup_ptr;
3105 }
3106}
3107
3108/* True if we are currently emitting insns in an area of output code
3109 that is controlled by a conditional expression. This is used by
3110 the cleanup handling code to generate conditional cleanup actions. */
3111
3112int
3113conditional_context ()
3114{
3115 return block_stack && block_stack->data.block.conditional_code;
3116}
3117
3118/* Mark top block of block_stack as not for an implicit binding for an
3119 exception region. This is only ever done by expand_eh_region_end
3120 to let expand_end_bindings know that it is being called explicitly
3121 to end the binding layer for just the binding layer associated with
3122 the exception region, otherwise expand_end_bindings would try and
3123 end all implicit binding layers for exceptions regions, and then
3124 one normal binding layer. */
3125
3126void
3127mark_block_as_not_eh_region ()
3128{
3129 block_stack->data.block.exception_region = 0;
3130}
3131
3132/* True if the top block of block_stack was marked as for an exception
3133 region by mark_block_as_eh_region. */
3134
3135int
3136is_eh_region ()
3137{
3138 return block_stack && block_stack->data.block.exception_region;
3139}
3140
3141/* Given a pointer to a BLOCK node, save a pointer to the most recently
3142 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3143 BLOCK node. */
3144
3145void
3146remember_end_note (block)
3147 register tree block;
3148{
3149 BLOCK_END_NOTE (block) = last_block_end_note;
3150 last_block_end_note = NULL_RTX;
3151}
3152
3153/* Emit a handler label for a nonlocal goto handler.
3154 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3155
3156static rtx
3157expand_nl_handler_label (slot, before_insn)
3158 rtx slot, before_insn;
3159{
3160 rtx insns;
3161 rtx handler_label = gen_label_rtx ();
3162
3163 /* Don't let jump_optimize delete the handler. */
3164 LABEL_PRESERVE_P (handler_label) = 1;
3165
3166 start_sequence ();
3167 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3168 insns = get_insns ();
3169 end_sequence ();
3170 emit_insns_before (insns, before_insn);
3171
3172 emit_label (handler_label);
3173
3174 return handler_label;
3175}
3176
3177/* Emit code to restore vital registers at the beginning of a nonlocal goto
3178 handler. */
3179static void
3180expand_nl_goto_receiver ()
3181{
3182#ifdef HAVE_nonlocal_goto
3183 if (! HAVE_nonlocal_goto)
3184#endif
3185 /* First adjust our frame pointer to its actual value. It was
3186 previously set to the start of the virtual area corresponding to
3187 the stacked variables when we branched here and now needs to be
3188 adjusted to the actual hardware fp value.
3189
3190 Assignments are to virtual registers are converted by
3191 instantiate_virtual_regs into the corresponding assignment
3192 to the underlying register (fp in this case) that makes
3193 the original assignment true.
3194 So the following insn will actually be
3195 decrementing fp by STARTING_FRAME_OFFSET. */
3196 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3197
3198#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3199 if (fixed_regs[ARG_POINTER_REGNUM])
3200 {
3201#ifdef ELIMINABLE_REGS
3202 /* If the argument pointer can be eliminated in favor of the
3203 frame pointer, we don't need to restore it. We assume here
3204 that if such an elimination is present, it can always be used.
3205 This is the case on all known machines; if we don't make this
3206 assumption, we do unnecessary saving on many machines. */
3207 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3208 size_t i;
3209
3210 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3211 if (elim_regs[i].from == ARG_POINTER_REGNUM
3212 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3213 break;
3214
3215 if (i == sizeof elim_regs / sizeof elim_regs [0])
3216#endif
3217 {
3218 /* Now restore our arg pointer from the address at which it
3219 was saved in our stack frame.
3220 If there hasn't be space allocated for it yet, make
3221 some now. */
3222 if (arg_pointer_save_area == 0)
3223 arg_pointer_save_area
3224 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3225 emit_move_insn (virtual_incoming_args_rtx,
3226 /* We need a pseudo here, or else
3227 instantiate_virtual_regs_1 complains. */
3228 copy_to_reg (arg_pointer_save_area));
3229 }
3230 }
3231#endif
3232
3233#ifdef HAVE_nonlocal_goto_receiver
3234 if (HAVE_nonlocal_goto_receiver)
3235 emit_insn (gen_nonlocal_goto_receiver ());
3236#endif
3237}
3238
3239/* Make handlers for nonlocal gotos taking place in the function calls in
3240 block THISBLOCK. */
3241
3242static void
3243expand_nl_goto_receivers (thisblock)
3244 struct nesting *thisblock;
3245{
3246 tree link;
3247 rtx afterward = gen_label_rtx ();
3248 rtx insns, slot;
3249 rtx label_list;
3250 int any_invalid;
3251
3252 /* Record the handler address in the stack slot for that purpose,
3253 during this block, saving and restoring the outer value. */
3254 if (thisblock->next != 0)
3255 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3256 {
3257 rtx save_receiver = gen_reg_rtx (Pmode);
3258 emit_move_insn (XEXP (slot, 0), save_receiver);
3259
3260 start_sequence ();
3261 emit_move_insn (save_receiver, XEXP (slot, 0));
3262 insns = get_insns ();
3263 end_sequence ();
3264 emit_insns_before (insns, thisblock->data.block.first_insn);
3265 }
3266
3267 /* Jump around the handlers; they run only when specially invoked. */
3268 emit_jump (afterward);
3269
3270 /* Make a separate handler for each label. */
3271 link = nonlocal_labels;
3272 slot = nonlocal_goto_handler_slots;
3273 label_list = NULL_RTX;
3274 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3275 /* Skip any labels we shouldn't be able to jump to from here,
3276 we generate one special handler for all of them below which just calls
3277 abort. */
3278 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3279 {
3280 rtx lab;
3281 lab = expand_nl_handler_label (XEXP (slot, 0),
3282 thisblock->data.block.first_insn);
3283 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3284
3285 expand_nl_goto_receiver ();
3286
3287 /* Jump to the "real" nonlocal label. */
3288 expand_goto (TREE_VALUE (link));
3289 }
3290
3291 /* A second pass over all nonlocal labels; this time we handle those
3292 we should not be able to jump to at this point. */
3293 link = nonlocal_labels;
3294 slot = nonlocal_goto_handler_slots;
3295 any_invalid = 0;
3296 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3297 if (DECL_TOO_LATE (TREE_VALUE (link)))
3298 {
3299 rtx lab;
3300 lab = expand_nl_handler_label (XEXP (slot, 0),
3301 thisblock->data.block.first_insn);
3302 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3303 any_invalid = 1;
3304 }
3305
3306 if (any_invalid)
3307 {
3308 expand_nl_goto_receiver ();
3309 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3310 VOIDmode, 0);
3311 emit_barrier ();
3312 }
3313
3314 nonlocal_goto_handler_labels = label_list;
3315 emit_label (afterward);
3316}
3317
3318/* Generate RTL code to terminate a binding contour.
3319
3320 VARS is the chain of VAR_DECL nodes for the variables bound in this
3321 contour. There may actually be other nodes in this chain, but any
3322 nodes other than VAR_DECLS are ignored.
3323
3324 MARK_ENDS is nonzero if we should put a note at the beginning
3325 and end of this binding contour.
3326
3327 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3328 (That is true automatically if the contour has a saved stack level.) */
3329
3330void
3331expand_end_bindings (vars, mark_ends, dont_jump_in)
3332 tree vars;
3333 int mark_ends;
3334 int dont_jump_in;
3335{
3336 register struct nesting *thisblock;
3337 register tree decl;
3338
3339 while (block_stack->data.block.exception_region)
3340 {
3341 /* Because we don't need or want a new temporary level and
3342 because we didn't create one in expand_eh_region_start,
3343 create a fake one now to avoid removing one in
3344 expand_end_bindings. */
3345 push_temp_slots ();
3346
3347 block_stack->data.block.exception_region = 0;
3348
3349 expand_end_bindings (NULL_TREE, 0, 0);
3350 }
3351
3352 /* Since expand_eh_region_start does an expand_start_bindings, we
3353 have to first end all the bindings that were created by
3354 expand_eh_region_start. */
3355
3356 thisblock = block_stack;
3357
3358 if (warn_unused)
3359 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3360 if (TREE_CODE (decl) == VAR_DECL
3361 && ! TREE_USED (decl)
3362 && ! DECL_IN_SYSTEM_HEADER (decl)
3363 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3364 warning_with_decl (decl, "unused variable `%s'");
3365
3366 if (thisblock->exit_label)
3367 {
3368 do_pending_stack_adjust ();
3369 emit_label (thisblock->exit_label);
3370 }
3371
3372 /* If necessary, make handlers for nonlocal gotos taking
3373 place in the function calls in this block. */
3374 if (function_call_count != thisblock->data.block.function_call_count
3375 && nonlocal_labels
3376 /* Make handler for outermost block
3377 if there were any nonlocal gotos to this function. */
3378 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3379 /* Make handler for inner block if it has something
3380 special to do when you jump out of it. */
3381 : (thisblock->data.block.cleanups != 0
3382 || thisblock->data.block.stack_level != 0)))
3383 expand_nl_goto_receivers (thisblock);
3384
3385 /* Don't allow jumping into a block that has a stack level.
3386 Cleanups are allowed, though. */
3387 if (dont_jump_in
3388 || thisblock->data.block.stack_level != 0)
3389 {
3390 struct label_chain *chain;
3391
3392 /* Any labels in this block are no longer valid to go to.
3393 Mark them to cause an error message. */
3394 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3395 {
3396 DECL_TOO_LATE (chain->label) = 1;
3397 /* If any goto without a fixup came to this label,
3398 that must be an error, because gotos without fixups
3399 come from outside all saved stack-levels. */
3400 if (TREE_ADDRESSABLE (chain->label))
3401 error_with_decl (chain->label,
3402 "label `%s' used before containing binding contour");
3403 }
3404 }
3405
3406 /* Restore stack level in effect before the block
3407 (only if variable-size objects allocated). */
3408 /* Perform any cleanups associated with the block. */
3409
3410 if (thisblock->data.block.stack_level != 0
3411 || thisblock->data.block.cleanups != 0)
3412 {
3413 /* Only clean up here if this point can actually be reached. */
3414 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3415
3416 /* Don't let cleanups affect ({...}) constructs. */
3417 int old_expr_stmts_for_value = expr_stmts_for_value;
3418 rtx old_last_expr_value = last_expr_value;
3419 tree old_last_expr_type = last_expr_type;
3420 expr_stmts_for_value = 0;
3421
3422 /* Do the cleanups. */
3423 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3424 if (reachable)
3425 do_pending_stack_adjust ();
3426
3427 expr_stmts_for_value = old_expr_stmts_for_value;
3428 last_expr_value = old_last_expr_value;
3429 last_expr_type = old_last_expr_type;
3430
3431 /* Restore the stack level. */
3432
3433 if (reachable && thisblock->data.block.stack_level != 0)
3434 {
3435 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3436 thisblock->data.block.stack_level, NULL_RTX);
3437 if (nonlocal_goto_handler_slots != 0)
3438 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3439 NULL_RTX);
3440 }
3441
3442 /* Any gotos out of this block must also do these things.
3443 Also report any gotos with fixups that came to labels in this
3444 level. */
3445 fixup_gotos (thisblock,
3446 thisblock->data.block.stack_level,
3447 thisblock->data.block.cleanups,
3448 thisblock->data.block.first_insn,
3449 dont_jump_in);
3450 }
3451
3452 /* Mark the beginning and end of the scope if requested.
3453 We do this now, after running cleanups on the variables
3454 just going out of scope, so they are in scope for their cleanups. */
3455
3456 if (mark_ends)
3457 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3458 else
3459 /* Get rid of the beginning-mark if we don't make an end-mark. */
3460 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3461
3462 /* If doing stupid register allocation, make sure lives of all
3463 register variables declared here extend thru end of scope. */
3464
3465 if (obey_regdecls)
3466 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3467 if (TREE_CODE (decl) == VAR_DECL && DECL_RTL (decl))
3468 use_variable (DECL_RTL (decl));
3469
3470 /* Restore the temporary level of TARGET_EXPRs. */
3471 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3472
3473 /* Restore block_stack level for containing block. */
3474
3475 stack_block_stack = thisblock->data.block.innermost_stack_block;
3476 POPSTACK (block_stack);
3477
3478 /* Pop the stack slot nesting and free any slots at this level. */
3479 pop_temp_slots ();
3480}
3481\f
3482/* Generate RTL for the automatic variable declaration DECL.
3483 (Other kinds of declarations are simply ignored if seen here.) */
3484
3485void
3486expand_decl (decl)
3487 register tree decl;
3488{
3489 struct nesting *thisblock = block_stack;
3490 tree type;
3491
3492 type = TREE_TYPE (decl);
3493
3494 /* Only automatic variables need any expansion done.
3495 Static and external variables, and external functions,
3496 will be handled by `assemble_variable' (called from finish_decl).
3497 TYPE_DECL and CONST_DECL require nothing.
3498 PARM_DECLs are handled in `assign_parms'. */
3499
3500 if (TREE_CODE (decl) != VAR_DECL)
3501 return;
3502 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3503 return;
3504
3505 /* Create the RTL representation for the variable. */
3506
3507 if (type == error_mark_node)
3508 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3509 else if (DECL_SIZE (decl) == 0)
3510 /* Variable with incomplete type. */
3511 {
3512 if (DECL_INITIAL (decl) == 0)
3513 /* Error message was already done; now avoid a crash. */
3514 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3515 else
3516 /* An initializer is going to decide the size of this array.
3517 Until we know the size, represent its address with a reg. */
3518 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3519 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3520 }
3521 else if (DECL_MODE (decl) != BLKmode
3522 /* If -ffloat-store, don't put explicit float vars
3523 into regs. */
3524 && !(flag_float_store
3525 && TREE_CODE (type) == REAL_TYPE)
3526 && ! TREE_THIS_VOLATILE (decl)
3527 && ! TREE_ADDRESSABLE (decl)
3528 && (DECL_REGISTER (decl) || ! obey_regdecls)
3529 /* if -fcheck-memory-usage, check all variables. */
3530 && ! current_function_check_memory_usage)
3531 {
3532 /* Automatic variable that can go in a register. */
3533 int unsignedp = TREE_UNSIGNED (type);
3534 enum machine_mode reg_mode
3535 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3536
3537 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3538 mark_user_reg (DECL_RTL (decl));
3539
3540 if (POINTER_TYPE_P (type))
3541 mark_reg_pointer (DECL_RTL (decl),
3542 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3543 / BITS_PER_UNIT));
3544 }
3545
3546 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3547 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3548 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3549 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3550 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3551 {
3552 /* Variable of fixed size that goes on the stack. */
3553 rtx oldaddr = 0;
3554 rtx addr;
3555
3556 /* If we previously made RTL for this decl, it must be an array
3557 whose size was determined by the initializer.
3558 The old address was a register; set that register now
3559 to the proper address. */
3560 if (DECL_RTL (decl) != 0)
3561 {
3562 if (GET_CODE (DECL_RTL (decl)) != MEM
3563 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3564 abort ();
3565 oldaddr = XEXP (DECL_RTL (decl), 0);
3566 }
3567
3568 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3569 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3570 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3571
3572 /* Set alignment we actually gave this decl. */
3573 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3574 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3575
3576 if (oldaddr)
3577 {
3578 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3579 if (addr != oldaddr)
3580 emit_move_insn (oldaddr, addr);
3581 }
3582
3583 /* If this is a memory ref that contains aggregate components,
3584 mark it as such for cse and loop optimize. */
3585 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3586 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3587#if 0
3588 /* If this is in memory because of -ffloat-store,
3589 set the volatile bit, to prevent optimizations from
3590 undoing the effects. */
3591 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3592 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3593#endif
3594
3595 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3596 }
3597 else
3598 /* Dynamic-size object: must push space on the stack. */
3599 {
3600 rtx address, size;
3601
3602 /* Record the stack pointer on entry to block, if have
3603 not already done so. */
3604 if (thisblock->data.block.stack_level == 0)
3605 {
3606 do_pending_stack_adjust ();
3607 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3608 &thisblock->data.block.stack_level,
3609 thisblock->data.block.first_insn);
3610 stack_block_stack = thisblock;
3611 }
3612
3613 /* Compute the variable's size, in bytes. */
3614 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3615 DECL_SIZE (decl),
3616 size_int (BITS_PER_UNIT)),
3617 NULL_RTX, VOIDmode, 0);
3618 free_temp_slots ();
3619
3620 /* Allocate space on the stack for the variable. Note that
3621 DECL_ALIGN says how the variable is to be aligned and we
3622 cannot use it to conclude anything about the alignment of
3623 the size. */
3624 address = allocate_dynamic_stack_space (size, NULL_RTX,
3625 TYPE_ALIGN (TREE_TYPE (decl)));
3626
3627 /* Reference the variable indirect through that rtx. */
3628 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3629
3630 /* If this is a memory ref that contains aggregate components,
3631 mark it as such for cse and loop optimize. */
3632 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3633 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3634
3635 /* Indicate the alignment we actually gave this variable. */
3636#ifdef STACK_BOUNDARY
3637 DECL_ALIGN (decl) = STACK_BOUNDARY;
3638#else
3639 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3640#endif
3641 }
3642
3643 if (TREE_THIS_VOLATILE (decl))
3644 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3645#if 0 /* A variable is not necessarily unchanging
3646 just because it is const. RTX_UNCHANGING_P
3647 means no change in the function,
3648 not merely no change in the variable's scope.
3649 It is correct to set RTX_UNCHANGING_P if the variable's scope
3650 is the whole function. There's no convenient way to test that. */
3651 if (TREE_READONLY (decl))
3652 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3653#endif
3654
3655 /* If doing stupid register allocation, make sure life of any
3656 register variable starts here, at the start of its scope. */
3657
3658 if (obey_regdecls)
3659 use_variable (DECL_RTL (decl));
3660}
3661
3662
3663\f
3664/* Emit code to perform the initialization of a declaration DECL. */
3665
3666void
3667expand_decl_init (decl)
3668 tree decl;
3669{
3670 int was_used = TREE_USED (decl);
3671
3672 /* If this is a CONST_DECL, we don't have to generate any code, but
3673 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3674 to be set while in the obstack containing the constant. If we don't
3675 do this, we can lose if we have functions nested three deep and the middle
3676 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3677 the innermost function is the first to expand that STRING_CST. */
3678 if (TREE_CODE (decl) == CONST_DECL)
3679 {
3680 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3681 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3682 EXPAND_INITIALIZER);
3683 return;
3684 }
3685
3686 if (TREE_STATIC (decl))
3687 return;
3688
3689 /* Compute and store the initial value now. */
3690
3691 if (DECL_INITIAL (decl) == error_mark_node)
3692 {
3693 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3694
3695 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3696 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3697 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3698 0, 0);
3699 emit_queue ();
3700 }
3701 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3702 {
3703 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3704 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3705 emit_queue ();
3706 }
3707
3708 /* Don't let the initialization count as "using" the variable. */
3709 TREE_USED (decl) = was_used;
3710
3711 /* Free any temporaries we made while initializing the decl. */
3712 preserve_temp_slots (NULL_RTX);
3713 free_temp_slots ();
3714}
3715
3716/* CLEANUP is an expression to be executed at exit from this binding contour;
3717 for example, in C++, it might call the destructor for this variable.
3718
3719 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3720 CLEANUP multiple times, and have the correct semantics. This
3721 happens in exception handling, for gotos, returns, breaks that
3722 leave the current scope.
3723
3724 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3725 that is not associated with any particular variable. */
3726
3727int
3728expand_decl_cleanup (decl, cleanup)
3729 tree decl, cleanup;
3730{
3731 struct nesting *thisblock = block_stack;
3732
3733 /* Error if we are not in any block. */
3734 if (thisblock == 0)
3735 return 0;
3736
3737 /* Record the cleanup if there is one. */
3738
3739 if (cleanup != 0)
3740 {
3741 tree t;
3742 rtx seq;
3743 tree *cleanups = &thisblock->data.block.cleanups;
3744 int cond_context = conditional_context ();
3745
3746 if (cond_context)
3747 {
3748 rtx flag = gen_reg_rtx (word_mode);
3749 rtx set_flag_0;
3750 tree cond;
3751
3752 start_sequence ();
3753 emit_move_insn (flag, const0_rtx);
3754 set_flag_0 = get_insns ();
3755 end_sequence ();
3756
3757 thisblock->data.block.last_unconditional_cleanup
3758 = emit_insns_after (set_flag_0,
3759 thisblock->data.block.last_unconditional_cleanup);
3760
3761 emit_move_insn (flag, const1_rtx);
3762
3763 /* All cleanups must be on the function_obstack. */
3764 push_obstacks_nochange ();
3765 resume_temporary_allocation ();
3766
3767 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3768 DECL_RTL (cond) = flag;
3769
3770 /* Conditionalize the cleanup. */
3771 cleanup = build (COND_EXPR, void_type_node,
3772 truthvalue_conversion (cond),
3773 cleanup, integer_zero_node);
3774 cleanup = fold (cleanup);
3775
3776 pop_obstacks ();
3777
3778 cleanups = thisblock->data.block.cleanup_ptr;
3779 }
3780
3781 /* All cleanups must be on the function_obstack. */
3782 push_obstacks_nochange ();
3783 resume_temporary_allocation ();
3784 cleanup = unsave_expr (cleanup);
3785 pop_obstacks ();
3786
3787 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3788
3789 if (! cond_context)
3790 /* If this block has a cleanup, it belongs in stack_block_stack. */
3791 stack_block_stack = thisblock;
3792
3793 if (cond_context)
3794 {
3795 start_sequence ();
3796 }
3797
3798 /* If this was optimized so that there is no exception region for the
3799 cleanup, then mark the TREE_LIST node, so that we can later tell
3800 if we need to call expand_eh_region_end. */
3801 if (! using_eh_for_cleanups_p
3802 || expand_eh_region_start_tree (decl, cleanup))
3803 TREE_ADDRESSABLE (t) = 1;
3804 /* If that started a new EH region, we're in a new block. */
3805 thisblock = block_stack;
3806
3807 if (cond_context)
3808 {
3809 seq = get_insns ();
3810 end_sequence ();
3811 if (seq)
3812 thisblock->data.block.last_unconditional_cleanup
3813 = emit_insns_after (seq,
3814 thisblock->data.block.last_unconditional_cleanup);
3815 }
3816 else
3817 {
3818 thisblock->data.block.last_unconditional_cleanup
3819 = get_last_insn ();
3820 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3821 }
3822 }
3823 return 1;
3824}
3825
3826/* Like expand_decl_cleanup, but suppress generating an exception handler
3827 to perform the cleanup. */
3828
3829int
3830expand_decl_cleanup_no_eh (decl, cleanup)
3831 tree decl, cleanup;
3832{
3833 int save_eh = using_eh_for_cleanups_p;
3834 int result;
3835
3836 using_eh_for_cleanups_p = 0;
3837 result = expand_decl_cleanup (decl, cleanup);
3838 using_eh_for_cleanups_p = save_eh;
3839
3840 return result;
3841}
3842
3843/* Arrange for the top element of the dynamic cleanup chain to be
3844 popped if we exit the current binding contour. DECL is the
3845 associated declaration, if any, otherwise NULL_TREE. If the
3846 current contour is left via an exception, then __sjthrow will pop
3847 the top element off the dynamic cleanup chain. The code that
3848 avoids doing the action we push into the cleanup chain in the
3849 exceptional case is contained in expand_cleanups.
3850
3851 This routine is only used by expand_eh_region_start, and that is
3852 the only way in which an exception region should be started. This
3853 routine is only used when using the setjmp/longjmp codegen method
3854 for exception handling. */
3855
3856int
3857expand_dcc_cleanup (decl)
3858 tree decl;
3859{
3860 struct nesting *thisblock = block_stack;
3861 tree cleanup;
3862
3863 /* Error if we are not in any block. */
3864 if (thisblock == 0)
3865 return 0;
3866
3867 /* Record the cleanup for the dynamic handler chain. */
3868
3869 /* All cleanups must be on the function_obstack. */
3870 push_obstacks_nochange ();
3871 resume_temporary_allocation ();
3872 cleanup = make_node (POPDCC_EXPR);
3873 pop_obstacks ();
3874
3875 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3876 thisblock->data.block.cleanups
3877 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3878
3879 /* If this block has a cleanup, it belongs in stack_block_stack. */
3880 stack_block_stack = thisblock;
3881 return 1;
3882}
3883
3884/* Arrange for the top element of the dynamic handler chain to be
3885 popped if we exit the current binding contour. DECL is the
3886 associated declaration, if any, otherwise NULL_TREE. If the current
3887 contour is left via an exception, then __sjthrow will pop the top
3888 element off the dynamic handler chain. The code that avoids doing
3889 the action we push into the handler chain in the exceptional case
3890 is contained in expand_cleanups.
3891
3892 This routine is only used by expand_eh_region_start, and that is
3893 the only way in which an exception region should be started. This
3894 routine is only used when using the setjmp/longjmp codegen method
3895 for exception handling. */
3896
3897int
3898expand_dhc_cleanup (decl)
3899 tree decl;
3900{
3901 struct nesting *thisblock = block_stack;
3902 tree cleanup;
3903
3904 /* Error if we are not in any block. */
3905 if (thisblock == 0)
3906 return 0;
3907
3908 /* Record the cleanup for the dynamic handler chain. */
3909
3910 /* All cleanups must be on the function_obstack. */
3911 push_obstacks_nochange ();
3912 resume_temporary_allocation ();
3913 cleanup = make_node (POPDHC_EXPR);
3914 pop_obstacks ();
3915
3916 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3917 thisblock->data.block.cleanups
3918 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3919
3920 /* If this block has a cleanup, it belongs in stack_block_stack. */
3921 stack_block_stack = thisblock;
3922 return 1;
3923}
3924\f
3925/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3926 DECL_ELTS is the list of elements that belong to DECL's type.
3927 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3928
3929void
3930expand_anon_union_decl (decl, cleanup, decl_elts)
3931 tree decl, cleanup, decl_elts;
3932{
3933 struct nesting *thisblock = block_stack;
3934 rtx x;
3935
3936 expand_decl (decl);
3937 expand_decl_cleanup (decl, cleanup);
3938 x = DECL_RTL (decl);
3939
3940 while (decl_elts)
3941 {
3942 tree decl_elt = TREE_VALUE (decl_elts);
3943 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3944 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3945
3946 /* Propagate the union's alignment to the elements. */
3947 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3948
3949 /* If the element has BLKmode and the union doesn't, the union is
3950 aligned such that the element doesn't need to have BLKmode, so
3951 change the element's mode to the appropriate one for its size. */
3952 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3953 DECL_MODE (decl_elt) = mode
3954 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3955 MODE_INT, 1);
3956
3957 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3958 instead create a new MEM rtx with the proper mode. */
3959 if (GET_CODE (x) == MEM)
3960 {
3961 if (mode == GET_MODE (x))
3962 DECL_RTL (decl_elt) = x;
3963 else
3964 {
3965 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
396