1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* $FreeBSD: src/contrib/gcc/stmt.c,v 1.1.1.4.2.2 2002/05/01 19:57:46 obrien Exp $ */
23 /* $DragonFly: src/contrib/gcc/Attic/stmt.c,v 1.2 2003/06/17 04:24:01 dillon Exp $ */
26 /* This file handles the generation of rtl code from tree structure
27 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
28 It also creates the rtl expressions for parameters and auto variables
29 and has full responsibility for allocating stack slots.
31 The functions whose names start with `expand_' are called by the
32 parser to generate RTL instructions for various kinds of constructs.
34 Some control and binding constructs require calling several such
35 functions at different times. For example, a simple if-then
36 is expanded by calling `expand_start_cond' (with the condition-expression
37 as argument) before parsing the then-clause and calling `expand_end_cond'
38 after parsing the then-clause. */
48 #include "insn-flags.h"
49 #include "insn-config.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Assume that case vectors are not pc-relative. */
65 #ifndef CASE_VECTOR_PC_RELATIVE
66 #define CASE_VECTOR_PC_RELATIVE 0
69 /* Filename and line number of last line-number note,
70 whether we actually emitted it or not. */
74 /* Nonzero if within a ({...}) grouping, in which case we must
75 always compute a value for each expr-stmt in case it is the last one. */
77 int expr_stmts_for_value;
79 /* Each time we expand an expression-statement,
80 record the expr's type and its RTL value here. */
82 static tree last_expr_type;
83 static rtx last_expr_value;
85 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
86 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
87 This is used by the `remember_end_note' function to record the endpoint
88 of each generated block in its associated BLOCK node. */
90 static rtx last_block_end_note;
92 /* Number of binding contours started so far in this function. */
94 int block_start_count;
96 /* Functions and data structures for expanding case statements. */
98 /* Case label structure, used to hold info on labels within case
99 statements. We handle "range" labels; for a single-value label
100 as in C, the high and low limits are the same.
102 An AVL tree of case nodes is initially created, and later transformed
103 to a list linked via the RIGHT fields in the nodes. Nodes with
104 higher case values are later in the list.
106 Switch statements can be output in one of two forms. A branch table
107 is used if there are more than a few labels and the labels are dense
108 within the range between the smallest and largest case value. If a
109 branch table is used, no further manipulations are done with the case
112 The alternative to the use of a branch table is to generate a series
113 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
114 and PARENT fields to hold a binary tree. Initially the tree is
115 totally unbalanced, with everything on the right. We balance the tree
116 with nodes on the left having lower case values than the parent
117 and nodes on the right having higher values. We then output the tree
122 struct case_node *left; /* Left son in binary tree */
123 struct case_node *right; /* Right son in binary tree; also node chain */
124 struct case_node *parent; /* Parent of node in binary tree */
125 tree low; /* Lowest index value for this label */
126 tree high; /* Highest index value for this label */
127 tree code_label; /* Label to jump to when node matches */
131 typedef struct case_node case_node;
132 typedef struct case_node *case_node_ptr;
134 /* These are used by estimate_case_costs and balance_case_nodes. */
136 /* This must be a signed type, and non-ANSI compilers lack signed char. */
137 static short *cost_table;
138 static int use_cost_table;
140 /* Stack of control and binding constructs we are currently inside.
142 These constructs begin when you call `expand_start_WHATEVER'
143 and end when you call `expand_end_WHATEVER'. This stack records
144 info about how the construct began that tells the end-function
145 what to do. It also may provide information about the construct
146 to alter the behavior of other constructs within the body.
147 For example, they may affect the behavior of C `break' and `continue'.
149 Each construct gets one `struct nesting' object.
150 All of these objects are chained through the `all' field.
151 `nesting_stack' points to the first object (innermost construct).
152 The position of an entry on `nesting_stack' is in its `depth' field.
154 Each type of construct has its own individual stack.
155 For example, loops have `loop_stack'. Each object points to the
156 next object of the same type through the `next' field.
158 Some constructs are visible to `break' exit-statements and others
159 are not. Which constructs are visible depends on the language.
160 Therefore, the data structure allows each construct to be visible
161 or not, according to the args given when the construct is started.
162 The construct is visible if the `exit_label' field is non-null.
163 In that case, the value should be a CODE_LABEL rtx. */
168 struct nesting *next;
173 /* For conds (if-then and if-then-else statements). */
176 /* Label for the end of the if construct.
177 There is none if EXITFLAG was not set
178 and no `else' has been seen yet. */
180 /* Label for the end of this alternative.
181 This may be the end of the if or the next else/elseif. */
187 /* Label at the top of the loop; place to loop back to. */
189 /* Label at the end of the whole construct. */
191 /* Label before a jump that branches to the end of the whole
192 construct. This is where destructors go if any. */
194 /* Label for `continue' statement to jump to;
195 this is in front of the stepper of the loop. */
198 /* For variable binding contours. */
201 /* Sequence number of this binding contour within the function,
202 in order of entry. */
203 int block_start_count;
204 /* Nonzero => value to restore stack to on exit. */
206 /* The NOTE that starts this contour.
207 Used by expand_goto to check whether the destination
208 is within each contour or not. */
210 /* Innermost containing binding contour that has a stack level. */
211 struct nesting *innermost_stack_block;
212 /* List of cleanups to be run on exit from this contour.
213 This is a list of expressions to be evaluated.
214 The TREE_PURPOSE of each link is the ..._DECL node
215 which the cleanup pertains to. */
217 /* List of cleanup-lists of blocks containing this block,
218 as they were at the locus where this block appears.
219 There is an element for each containing block,
220 ordered innermost containing block first.
221 The tail of this list can be 0,
222 if all remaining elements would be empty lists.
223 The element's TREE_VALUE is the cleanup-list of that block,
224 which may be null. */
226 /* Chain of labels defined inside this binding contour.
227 For contours that have stack levels or cleanups. */
228 struct label_chain *label_chain;
229 /* Number of function calls seen, as of start of this block. */
230 int function_call_count;
231 /* Nonzero if this is associated with a EH region. */
232 int exception_region;
233 /* The saved target_temp_slot_level from our outer block.
234 We may reset target_temp_slot_level to be the level of
235 this block, if that is done, target_temp_slot_level
236 reverts to the saved target_temp_slot_level at the very
238 int target_temp_slot_level;
239 /* True if we are currently emitting insns in an area of
240 output code that is controlled by a conditional
241 expression. This is used by the cleanup handling code to
242 generate conditional cleanup actions. */
243 int conditional_code;
244 /* A place to move the start of the exception region for any
245 of the conditional cleanups, must be at the end or after
246 the start of the last unconditional cleanup, and before any
247 conditional branch points. */
248 rtx last_unconditional_cleanup;
249 /* When in a conditional context, this is the specific
250 cleanup list associated with last_unconditional_cleanup,
251 where we place the conditionalized cleanups. */
254 /* For switch (C) or case (Pascal) statements,
255 and also for dummies (see `expand_start_case_dummy'). */
258 /* The insn after which the case dispatch should finally
259 be emitted. Zero for a dummy. */
261 /* A list of case labels; it is first built as an AVL tree.
262 During expand_end_case, this is converted to a list, and may be
263 rearranged into a nearly balanced binary tree. */
264 struct case_node *case_list;
265 /* Label to jump to if no case matches. */
267 /* The expression to be dispatched on. */
269 /* Type that INDEX_EXPR should be converted to. */
271 /* Number of range exprs in case statement. */
273 /* Name of this kind of statement, for warnings. */
274 const char *printname;
275 /* Used to save no_line_numbers till we see the first case label.
276 We set this to -1 when we see the first case label in this
278 int line_number_status;
283 /* Chain of all pending binding contours. */
284 struct nesting *block_stack;
286 /* If any new stacks are added here, add them to POPSTACKS too. */
288 /* Chain of all pending binding contours that restore stack levels
290 struct nesting *stack_block_stack;
292 /* Chain of all pending conditional statements. */
293 struct nesting *cond_stack;
295 /* Chain of all pending loops. */
296 struct nesting *loop_stack;
298 /* Chain of all pending case or switch statements. */
299 struct nesting *case_stack;
301 /* Separate chain including all of the above,
302 chained through the `all' field. */
303 struct nesting *nesting_stack;
305 /* Number of entries on nesting_stack now. */
308 /* Allocate and return a new `struct nesting'. */
310 #define ALLOC_NESTING() \
311 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
313 /* Pop the nesting stack element by element until we pop off
314 the element which is at the top of STACK.
315 Update all the other stacks, popping off elements from them
316 as we pop them from nesting_stack. */
318 #define POPSTACK(STACK) \
319 do { struct nesting *target = STACK; \
320 struct nesting *this; \
321 do { this = nesting_stack; \
322 if (loop_stack == this) \
323 loop_stack = loop_stack->next; \
324 if (cond_stack == this) \
325 cond_stack = cond_stack->next; \
326 if (block_stack == this) \
327 block_stack = block_stack->next; \
328 if (stack_block_stack == this) \
329 stack_block_stack = stack_block_stack->next; \
330 if (case_stack == this) \
331 case_stack = case_stack->next; \
332 nesting_depth = nesting_stack->depth - 1; \
333 nesting_stack = this->all; \
334 obstack_free (&stmt_obstack, this); } \
335 while (this != target); } while (0)
337 /* In some cases it is impossible to generate code for a forward goto
338 until the label definition is seen. This happens when it may be necessary
339 for the goto to reset the stack pointer: we don't yet know how to do that.
340 So expand_goto puts an entry on this fixup list.
341 Each time a binding contour that resets the stack is exited,
343 If the target label has now been defined, we can insert the proper code. */
347 /* Points to following fixup. */
348 struct goto_fixup *next;
349 /* Points to the insn before the jump insn.
350 If more code must be inserted, it goes after this insn. */
352 /* The LABEL_DECL that this jump is jumping to, or 0
353 for break, continue or return. */
355 /* The BLOCK for the place where this goto was found. */
357 /* The CODE_LABEL rtx that this is jumping to. */
359 /* Number of binding contours started in current function
360 before the label reference. */
361 int block_start_count;
362 /* The outermost stack level that should be restored for this jump.
363 Each time a binding contour that resets the stack is exited,
364 if the target label is *not* yet defined, this slot is updated. */
366 /* List of lists of cleanup expressions to be run by this goto.
367 There is one element for each block that this goto is within.
368 The tail of this list can be 0,
369 if all remaining elements would be empty.
370 The TREE_VALUE contains the cleanup list of that block as of the
371 time this goto was seen.
372 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
373 tree cleanup_list_list;
376 static struct goto_fixup *goto_fixup_chain;
378 /* Within any binding contour that must restore a stack level,
379 all labels are recorded with a chain of these structures. */
383 /* Points to following fixup. */
384 struct label_chain *next;
389 /* Non-zero if we are using EH to handle cleanus. */
390 static int using_eh_for_cleanups_p = 0;
393 static int n_occurrences PROTO((int, const char *));
394 static void expand_goto_internal PROTO((tree, rtx, rtx));
395 static int expand_fixup PROTO((tree, rtx, rtx));
396 static rtx expand_nl_handler_label PROTO((rtx, rtx));
397 static void expand_nl_goto_receiver PROTO((void));
398 static void expand_nl_goto_receivers PROTO((struct nesting *));
399 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
401 static void expand_null_return_1 PROTO((rtx, int));
402 static void expand_value_return PROTO((rtx));
403 static int tail_recursion_args PROTO((tree, tree));
404 static void expand_cleanups PROTO((tree, tree, int, int));
405 static void check_seenlabel PROTO((void));
406 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
407 static int estimate_case_costs PROTO((case_node_ptr));
408 static void group_case_nodes PROTO((case_node_ptr));
409 static void balance_case_nodes PROTO((case_node_ptr *,
411 static int node_has_low_bound PROTO((case_node_ptr, tree));
412 static int node_has_high_bound PROTO((case_node_ptr, tree));
413 static int node_is_bounded PROTO((case_node_ptr, tree));
414 static void emit_jump_if_reachable PROTO((rtx));
415 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
416 static int add_case_node PROTO((tree, tree, tree, tree *));
417 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
420 using_eh_for_cleanups ()
422 using_eh_for_cleanups_p = 1;
428 gcc_obstack_init (&stmt_obstack);
433 init_stmt_for_function ()
435 /* We are not currently within any block, conditional, loop or case. */
437 stack_block_stack = 0;
444 block_start_count = 0;
446 /* No gotos have been expanded yet. */
447 goto_fixup_chain = 0;
449 /* We are not processing a ({...}) grouping. */
450 expr_stmts_for_value = 0;
453 init_eh_for_function ();
460 p->block_stack = block_stack;
461 p->stack_block_stack = stack_block_stack;
462 p->cond_stack = cond_stack;
463 p->loop_stack = loop_stack;
464 p->case_stack = case_stack;
465 p->nesting_stack = nesting_stack;
466 p->nesting_depth = nesting_depth;
467 p->block_start_count = block_start_count;
468 p->last_expr_type = last_expr_type;
469 p->last_expr_value = last_expr_value;
470 p->expr_stmts_for_value = expr_stmts_for_value;
471 p->emit_filename = emit_filename;
472 p->emit_lineno = emit_lineno;
473 p->goto_fixup_chain = goto_fixup_chain;
478 restore_stmt_status (p)
481 block_stack = p->block_stack;
482 stack_block_stack = p->stack_block_stack;
483 cond_stack = p->cond_stack;
484 loop_stack = p->loop_stack;
485 case_stack = p->case_stack;
486 nesting_stack = p->nesting_stack;
487 nesting_depth = p->nesting_depth;
488 block_start_count = p->block_start_count;
489 last_expr_type = p->last_expr_type;
490 last_expr_value = p->last_expr_value;
491 expr_stmts_for_value = p->expr_stmts_for_value;
492 emit_filename = p->emit_filename;
493 emit_lineno = p->emit_lineno;
494 goto_fixup_chain = p->goto_fixup_chain;
495 restore_eh_status (p);
498 /* Emit a no-op instruction. */
505 last_insn = get_last_insn ();
507 && (GET_CODE (last_insn) == CODE_LABEL
508 || (GET_CODE (last_insn) == NOTE
509 && prev_real_insn (last_insn) == 0)))
510 emit_insn (gen_nop ());
513 /* Return the rtx-label that corresponds to a LABEL_DECL,
514 creating it if necessary. */
520 if (TREE_CODE (label) != LABEL_DECL)
523 if (DECL_RTL (label))
524 return DECL_RTL (label);
526 return DECL_RTL (label) = gen_label_rtx ();
529 /* Add an unconditional jump to LABEL as the next sequential instruction. */
535 do_pending_stack_adjust ();
536 emit_jump_insn (gen_jump (label));
540 /* Emit code to jump to the address
541 specified by the pointer expression EXP. */
544 expand_computed_goto (exp)
547 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
549 #ifdef POINTERS_EXTEND_UNSIGNED
550 x = convert_memory_address (Pmode, x);
554 /* Be sure the function is executable. */
555 if (current_function_check_memory_usage)
556 emit_library_call (chkr_check_exec_libfunc, 1,
557 VOIDmode, 1, x, ptr_mode);
559 do_pending_stack_adjust ();
560 emit_indirect_jump (x);
562 current_function_has_computed_jump = 1;
565 /* Handle goto statements and the labels that they can go to. */
567 /* Specify the location in the RTL code of a label LABEL,
568 which is a LABEL_DECL tree node.
570 This is used for the kind of label that the user can jump to with a
571 goto statement, and for alternatives of a switch or case statement.
572 RTL labels generated for loops and conditionals don't go through here;
573 they are generated directly at the RTL level, by other functions below.
575 Note that this has nothing to do with defining label *names*.
576 Languages vary in how they do that and what that even means. */
582 struct label_chain *p;
584 do_pending_stack_adjust ();
585 emit_label (label_rtx (label));
586 if (DECL_NAME (label))
587 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
589 if (stack_block_stack != 0)
591 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
592 p->next = stack_block_stack->data.block.label_chain;
593 stack_block_stack->data.block.label_chain = p;
598 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
599 from nested functions. */
602 declare_nonlocal_label (label)
605 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
607 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
608 LABEL_PRESERVE_P (label_rtx (label)) = 1;
609 if (nonlocal_goto_handler_slots == 0)
611 emit_stack_save (SAVE_NONLOCAL,
612 &nonlocal_goto_stack_level,
613 PREV_INSN (tail_recursion_reentry));
615 nonlocal_goto_handler_slots
616 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
619 /* Generate RTL code for a `goto' statement with target label LABEL.
620 LABEL should be a LABEL_DECL tree node that was or will later be
621 defined with `expand_label'. */
629 /* Check for a nonlocal goto to a containing function. */
630 context = decl_function_context (label);
631 if (context != 0 && context != current_function_decl)
633 struct function *p = find_function_data (context);
634 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
635 rtx temp, handler_slot;
638 /* Find the corresponding handler slot for this label. */
639 handler_slot = p->nonlocal_goto_handler_slots;
640 for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
641 link = TREE_CHAIN (link))
642 handler_slot = XEXP (handler_slot, 1);
643 handler_slot = XEXP (handler_slot, 0);
645 p->has_nonlocal_label = 1;
646 current_function_has_nonlocal_goto = 1;
647 LABEL_REF_NONLOCAL_P (label_ref) = 1;
649 /* Copy the rtl for the slots so that they won't be shared in
650 case the virtual stack vars register gets instantiated differently
651 in the parent than in the child. */
653 #if HAVE_nonlocal_goto
654 if (HAVE_nonlocal_goto)
655 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
656 copy_rtx (handler_slot),
657 copy_rtx (p->nonlocal_goto_stack_level),
664 /* Restore frame pointer for containing function.
665 This sets the actual hard register used for the frame pointer
666 to the location of the function's incoming static chain info.
667 The non-local goto handler will then adjust it to contain the
668 proper value and reload the argument pointer, if needed. */
669 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
671 /* We have now loaded the frame pointer hardware register with
672 the address of that corresponds to the start of the virtual
673 stack vars. So replace virtual_stack_vars_rtx in all
674 addresses we use with stack_pointer_rtx. */
676 /* Get addr of containing function's current nonlocal goto handler,
677 which will do any cleanups and then jump to the label. */
678 addr = copy_rtx (handler_slot);
679 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
680 hard_frame_pointer_rtx));
682 /* Restore the stack pointer. Note this uses fp just restored. */
683 addr = p->nonlocal_goto_stack_level;
685 addr = replace_rtx (copy_rtx (addr),
686 virtual_stack_vars_rtx,
687 hard_frame_pointer_rtx);
689 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
691 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
693 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
694 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
695 emit_indirect_jump (temp);
699 expand_goto_internal (label, label_rtx (label), NULL_RTX);
702 /* Generate RTL code for a `goto' statement with target label BODY.
703 LABEL should be a LABEL_REF.
704 LAST_INSN, if non-0, is the rtx we should consider as the last
705 insn emitted (for the purposes of cleaning up a return). */
708 expand_goto_internal (body, label, last_insn)
713 struct nesting *block;
716 if (GET_CODE (label) != CODE_LABEL)
719 /* If label has already been defined, we can tell now
720 whether and how we must alter the stack level. */
722 if (PREV_INSN (label) != 0)
724 /* Find the innermost pending block that contains the label.
725 (Check containment by comparing insn-uids.)
726 Then restore the outermost stack level within that block,
727 and do cleanups of all blocks contained in it. */
728 for (block = block_stack; block; block = block->next)
730 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
732 if (block->data.block.stack_level != 0)
733 stack_level = block->data.block.stack_level;
734 /* Execute the cleanups for blocks we are exiting. */
735 if (block->data.block.cleanups != 0)
737 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
738 do_pending_stack_adjust ();
744 /* Ensure stack adjust isn't done by emit_jump, as this
745 would clobber the stack pointer. This one should be
746 deleted as dead by flow. */
747 clear_pending_stack_adjust ();
748 do_pending_stack_adjust ();
749 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
752 if (body != 0 && DECL_TOO_LATE (body))
753 error ("jump to `%s' invalidly jumps into binding contour",
754 IDENTIFIER_POINTER (DECL_NAME (body)));
756 /* Label not yet defined: may need to put this goto
757 on the fixup list. */
758 else if (! expand_fixup (body, label, last_insn))
760 /* No fixup needed. Record that the label is the target
761 of at least one goto that has no fixup. */
763 TREE_ADDRESSABLE (body) = 1;
769 /* Generate if necessary a fixup for a goto
770 whose target label in tree structure (if any) is TREE_LABEL
771 and whose target in rtl is RTL_LABEL.
773 If LAST_INSN is nonzero, we pretend that the jump appears
774 after insn LAST_INSN instead of at the current point in the insn stream.
776 The fixup will be used later to insert insns just before the goto.
777 Those insns will restore the stack level as appropriate for the
778 target label, and will (in the case of C++) also invoke any object
779 destructors which have to be invoked when we exit the scopes which
780 are exited by the goto.
782 Value is nonzero if a fixup is made. */
785 expand_fixup (tree_label, rtl_label, last_insn)
790 struct nesting *block, *end_block;
792 /* See if we can recognize which block the label will be output in.
793 This is possible in some very common cases.
794 If we succeed, set END_BLOCK to that block.
795 Otherwise, set it to 0. */
798 && (rtl_label == cond_stack->data.cond.endif_label
799 || rtl_label == cond_stack->data.cond.next_label))
800 end_block = cond_stack;
801 /* If we are in a loop, recognize certain labels which
802 are likely targets. This reduces the number of fixups
803 we need to create. */
805 && (rtl_label == loop_stack->data.loop.start_label
806 || rtl_label == loop_stack->data.loop.end_label
807 || rtl_label == loop_stack->data.loop.continue_label))
808 end_block = loop_stack;
812 /* Now set END_BLOCK to the binding level to which we will return. */
816 struct nesting *next_block = end_block->all;
819 /* First see if the END_BLOCK is inside the innermost binding level.
820 If so, then no cleanups or stack levels are relevant. */
821 while (next_block && next_block != block)
822 next_block = next_block->all;
827 /* Otherwise, set END_BLOCK to the innermost binding level
828 which is outside the relevant control-structure nesting. */
829 next_block = block_stack->next;
830 for (block = block_stack; block != end_block; block = block->all)
831 if (block == next_block)
832 next_block = next_block->next;
833 end_block = next_block;
836 /* Does any containing block have a stack level or cleanups?
837 If not, no fixup is needed, and that is the normal case
838 (the only case, for standard C). */
839 for (block = block_stack; block != end_block; block = block->next)
840 if (block->data.block.stack_level != 0
841 || block->data.block.cleanups != 0)
844 if (block != end_block)
846 /* Ok, a fixup is needed. Add a fixup to the list of such. */
847 struct goto_fixup *fixup
848 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
849 /* In case an old stack level is restored, make sure that comes
850 after any pending stack adjust. */
851 /* ?? If the fixup isn't to come at the present position,
852 doing the stack adjust here isn't useful. Doing it with our
853 settings at that location isn't useful either. Let's hope
856 do_pending_stack_adjust ();
857 fixup->target = tree_label;
858 fixup->target_rtl = rtl_label;
860 /* Create a BLOCK node and a corresponding matched set of
861 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
862 this point. The notes will encapsulate any and all fixup
863 code which we might later insert at this point in the insn
864 stream. Also, the BLOCK node will be the parent (i.e. the
865 `SUPERBLOCK') of any other BLOCK nodes which we might create
866 later on when we are expanding the fixup code.
868 Note that optimization passes (including expand_end_loop)
869 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
873 register rtx original_before_jump
874 = last_insn ? last_insn : get_last_insn ();
879 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
880 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
881 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
882 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
884 emit_insns_after (start, original_before_jump);
887 fixup->block_start_count = block_start_count;
888 fixup->stack_level = 0;
889 fixup->cleanup_list_list
890 = ((block->data.block.outer_cleanups
891 || block->data.block.cleanups)
892 ? tree_cons (NULL_TREE, block->data.block.cleanups,
893 block->data.block.outer_cleanups)
895 fixup->next = goto_fixup_chain;
896 goto_fixup_chain = fixup;
904 /* Expand any needed fixups in the outputmost binding level of the
905 function. FIRST_INSN is the first insn in the function. */
908 expand_fixups (first_insn)
911 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
914 /* When exiting a binding contour, process all pending gotos requiring fixups.
915 THISBLOCK is the structure that describes the block being exited.
916 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
917 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
918 FIRST_INSN is the insn that began this contour.
920 Gotos that jump out of this contour must restore the
921 stack level and do the cleanups before actually jumping.
923 DONT_JUMP_IN nonzero means report error there is a jump into this
924 contour from before the beginning of the contour.
925 This is also done if STACK_LEVEL is nonzero. */
928 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
929 struct nesting *thisblock;
935 register struct goto_fixup *f, *prev;
937 /* F is the fixup we are considering; PREV is the previous one. */
938 /* We run this loop in two passes so that cleanups of exited blocks
939 are run first, and blocks that are exited are marked so
942 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
944 /* Test for a fixup that is inactive because it is already handled. */
945 if (f->before_jump == 0)
947 /* Delete inactive fixup from the chain, if that is easy to do. */
949 prev->next = f->next;
951 /* Has this fixup's target label been defined?
952 If so, we can finalize it. */
953 else if (PREV_INSN (f->target_rtl) != 0)
955 register rtx cleanup_insns;
957 /* Get the first non-label after the label
958 this goto jumps to. If that's before this scope begins,
959 we don't have a jump into the scope. */
960 rtx after_label = f->target_rtl;
961 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
962 after_label = NEXT_INSN (after_label);
964 /* If this fixup jumped into this contour from before the beginning
965 of this contour, report an error. */
966 /* ??? Bug: this does not detect jumping in through intermediate
967 blocks that have stack levels or cleanups.
968 It detects only a problem with the innermost block
971 && (dont_jump_in || stack_level || cleanup_list)
972 /* If AFTER_LABEL is 0, it means the jump goes to the end
973 of the rtl, which means it jumps into this scope. */
975 || INSN_UID (first_insn) < INSN_UID (after_label))
976 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
977 && ! DECL_ERROR_ISSUED (f->target))
979 error_with_decl (f->target,
980 "label `%s' used before containing binding contour");
981 /* Prevent multiple errors for one label. */
982 DECL_ERROR_ISSUED (f->target) = 1;
985 /* We will expand the cleanups into a sequence of their own and
986 then later on we will attach this new sequence to the insn
987 stream just ahead of the actual jump insn. */
991 /* Temporarily restore the lexical context where we will
992 logically be inserting the fixup code. We do this for the
993 sake of getting the debugging information right. */
996 set_block (f->context);
998 /* Expand the cleanups for blocks this jump exits. */
999 if (f->cleanup_list_list)
1002 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1003 /* Marked elements correspond to blocks that have been closed.
1004 Do their cleanups. */
1005 if (TREE_ADDRESSABLE (lists)
1006 && TREE_VALUE (lists) != 0)
1008 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1009 /* Pop any pushes done in the cleanups,
1010 in case function is about to return. */
1011 do_pending_stack_adjust ();
1015 /* Restore stack level for the biggest contour that this
1016 jump jumps out of. */
1018 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1020 /* Finish up the sequence containing the insns which implement the
1021 necessary cleanups, and then attach that whole sequence to the
1022 insn stream just ahead of the actual jump insn. Attaching it
1023 at that point insures that any cleanups which are in fact
1024 implicit C++ object destructions (which must be executed upon
1025 leaving the block) appear (to the debugger) to be taking place
1026 in an area of the generated code where the object(s) being
1027 destructed are still "in scope". */
1029 cleanup_insns = get_insns ();
1033 emit_insns_after (cleanup_insns, f->before_jump);
1040 /* For any still-undefined labels, do the cleanups for this block now.
1041 We must do this now since items in the cleanup list may go out
1042 of scope when the block ends. */
1043 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1044 if (f->before_jump != 0
1045 && PREV_INSN (f->target_rtl) == 0
1046 /* Label has still not appeared. If we are exiting a block with
1047 a stack level to restore, that started before the fixup,
1048 mark this stack level as needing restoration
1049 when the fixup is later finalized. */
1051 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1052 means the label is undefined. That's erroneous, but possible. */
1053 && (thisblock->data.block.block_start_count
1054 <= f->block_start_count))
1056 tree lists = f->cleanup_list_list;
1059 for (; lists; lists = TREE_CHAIN (lists))
1060 /* If the following elt. corresponds to our containing block
1061 then the elt. must be for this block. */
1062 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1066 set_block (f->context);
1067 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1068 do_pending_stack_adjust ();
1069 cleanup_insns = get_insns ();
1072 if (cleanup_insns != 0)
1074 = emit_insns_after (cleanup_insns, f->before_jump);
1076 f->cleanup_list_list = TREE_CHAIN (lists);
1080 f->stack_level = stack_level;
1084 /* Return the number of times character C occurs in string S. */
1086 n_occurrences (c, s)
1096 /* Generate RTL for an asm statement (explicit assembler code).
1097 BODY is a STRING_CST node containing the assembler code text,
1098 or an ADDR_EXPR containing a STRING_CST. */
1104 if (current_function_check_memory_usage)
1106 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1110 if (TREE_CODE (body) == ADDR_EXPR)
1111 body = TREE_OPERAND (body, 0);
1113 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1114 TREE_STRING_POINTER (body)));
1118 /* Generate RTL for an asm statement with arguments.
1119 STRING is the instruction template.
1120 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1121 Each output or input has an expression in the TREE_VALUE and
1122 a constraint-string in the TREE_PURPOSE.
1123 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1124 that is clobbered by this insn.
1126 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1127 Some elements of OUTPUTS may be replaced with trees representing temporary
1128 values. The caller should copy those temporary values to the originally
1131 VOL nonzero means the insn is volatile; don't optimize it. */
1134 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1135 tree string, outputs, inputs, clobbers;
1140 rtvec argvec, constraints;
1142 int ninputs = list_length (inputs);
1143 int noutputs = list_length (outputs);
1148 /* Vector of RTX's of evaluated output operands. */
1149 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1150 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1151 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1152 enum machine_mode *inout_mode
1153 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1154 /* The insn we have emitted. */
1157 /* An ASM with no outputs needs to be treated as volatile, for now. */
1161 if (current_function_check_memory_usage)
1163 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1167 /* Count the number of meaningful clobbered registers, ignoring what
1168 we would ignore later. */
1170 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1172 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1173 i = decode_reg_name (regname);
1174 if (i >= 0 || i == -4)
1177 error ("unknown register name `%s' in `asm'", regname);
1182 /* Check that the number of alternatives is constant across all
1184 if (outputs || inputs)
1186 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1187 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1190 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1192 error ("too many alternatives in `asm'");
1199 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1200 if (n_occurrences (',', constraint) != nalternatives)
1202 error ("operand constraints for `asm' differ in number of alternatives");
1205 if (TREE_CHAIN (tmp))
1206 tmp = TREE_CHAIN (tmp);
1208 tmp = next, next = 0;
1212 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1214 tree val = TREE_VALUE (tail);
1215 tree type = TREE_TYPE (val);
1224 /* If there's an erroneous arg, emit no insn. */
1225 if (TREE_TYPE (val) == error_mark_node)
1228 /* Make sure constraint has `=' and does not have `+'. Also, see
1229 if it allows any register. Be liberal on the latter test, since
1230 the worst that happens if we get it wrong is we issue an error
1233 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1234 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1236 /* Allow the `=' or `+' to not be at the beginning of the string,
1237 since it wasn't explicitly documented that way, and there is a
1238 large body of code that puts it last. Swap the character to
1239 the front, so as not to uglify any place else. */
1243 if ((p = strchr (constraint, '=')) != NULL)
1245 if ((p = strchr (constraint, '+')) != NULL)
1248 error ("output operand constraint lacks `='");
1252 if (p != constraint)
1255 bcopy (constraint, constraint+1, p-constraint);
1258 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1261 is_inout = constraint[0] == '+';
1262 /* Replace '+' with '='. */
1263 constraint[0] = '=';
1264 /* Make sure we can specify the matching operand. */
1265 if (is_inout && i > 9)
1267 error ("output operand constraint %d contains `+'", i);
1271 for (j = 1; j < c_len; j++)
1272 switch (constraint[j])
1276 error ("operand constraint contains '+' or '=' at illegal position.");
1280 if (i + 1 == ninputs + noutputs)
1282 error ("`%%' constraint used with last operand");
1287 case '?': case '!': case '*': case '&':
1288 case 'E': case 'F': case 'G': case 'H':
1289 case 's': case 'i': case 'n':
1290 case 'I': case 'J': case 'K': case 'L': case 'M':
1291 case 'N': case 'O': case 'P': case ',':
1292 #ifdef EXTRA_CONSTRAINT
1293 case 'Q': case 'R': case 'S': case 'T': case 'U':
1297 case '0': case '1': case '2': case '3': case '4':
1298 case '5': case '6': case '7': case '8': case '9':
1299 error ("matching constraint not valid in output operand");
1302 case 'V': case 'm': case 'o':
1307 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1308 excepting those that expand_call created. So match memory
1324 /* If an output operand is not a decl or indirect ref and our constraint
1325 allows a register, make a temporary to act as an intermediate.
1326 Make the asm insn write into that, then our caller will copy it to
1327 the real output operand. Likewise for promoted variables. */
1329 real_output_rtx[i] = NULL_RTX;
1330 if ((TREE_CODE (val) == INDIRECT_REF
1332 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1333 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1334 && ! (GET_CODE (DECL_RTL (val)) == REG
1335 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1340 mark_addressable (TREE_VALUE (tail));
1343 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1344 EXPAND_MEMORY_USE_WO);
1346 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1347 error ("output number %d not directly addressable", i);
1348 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1350 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1351 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1353 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1358 output_rtx[i] = assign_temp (type, 0, 0, 1);
1359 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1364 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1365 inout_opnum[ninout++] = i;
1370 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1372 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1376 /* Make vectors for the expression-rtx and constraint strings. */
1378 argvec = rtvec_alloc (ninputs);
1379 constraints = rtvec_alloc (ninputs);
1381 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1382 TREE_STRING_POINTER (string), "", 0, argvec,
1383 constraints, filename, line);
1385 MEM_VOLATILE_P (body) = vol;
1387 /* Eval the inputs and put them into ARGVEC.
1388 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1391 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1394 int allows_reg = 0, allows_mem = 0;
1395 char *constraint, *orig_constraint;
1399 /* If there's an erroneous arg, emit no insn,
1400 because the ASM_INPUT would get VOIDmode
1401 and that could cause a crash in reload. */
1402 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1405 /* ??? Can this happen, and does the error message make any sense? */
1406 if (TREE_PURPOSE (tail) == NULL_TREE)
1408 error ("hard register `%s' listed as input operand to `asm'",
1409 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1413 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1414 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1415 orig_constraint = constraint;
1417 /* Make sure constraint has neither `=', `+', nor '&'. */
1419 for (j = 0; j < c_len; j++)
1420 switch (constraint[j])
1422 case '+': case '=': case '&':
1423 if (constraint == orig_constraint)
1425 error ("input operand constraint contains `%c'", constraint[j]);
1431 if (constraint == orig_constraint
1432 && i + 1 == ninputs - ninout)
1434 error ("`%%' constraint used with last operand");
1439 case 'V': case 'm': case 'o':
1444 case '?': case '!': case '*':
1445 case 'E': case 'F': case 'G': case 'H': case 'X':
1446 case 's': case 'i': case 'n':
1447 case 'I': case 'J': case 'K': case 'L': case 'M':
1448 case 'N': case 'O': case 'P': case ',':
1449 #ifdef EXTRA_CONSTRAINT
1450 case 'Q': case 'R': case 'S': case 'T': case 'U':
1454 /* Whether or not a numeric constraint allows a register is
1455 decided by the matching constraint, and so there is no need
1456 to do anything special with them. We must handle them in
1457 the default case, so that we don't unnecessarily force
1458 operands to memory. */
1459 case '0': case '1': case '2': case '3': case '4':
1460 case '5': case '6': case '7': case '8': case '9':
1461 if (constraint[j] >= '0' + noutputs)
1464 ("matching constraint references invalid operand number");
1468 /* Try and find the real constraint for this dup. */
1469 if ((j == 0 && c_len == 1)
1470 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1473 for (j = constraint[j] - '0'; j > 0; --j)
1476 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1477 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1482 /* ... fall through ... */
1495 if (! allows_reg && allows_mem)
1496 mark_addressable (TREE_VALUE (tail));
1498 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1500 if (asm_operand_ok (op, constraint) <= 0)
1503 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1504 else if (!allows_mem)
1505 warning ("asm operand %d probably doesn't match constraints", i);
1506 else if (CONSTANT_P (op))
1507 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1509 else if (GET_CODE (op) == REG
1510 || GET_CODE (op) == SUBREG
1511 || GET_CODE (op) == CONCAT)
1513 tree type = TREE_TYPE (TREE_VALUE (tail));
1514 rtx memloc = assign_temp (type, 1, 1, 1);
1516 emit_move_insn (memloc, op);
1519 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1520 /* We won't recognize volatile memory as available a
1521 memory_operand at this point. Ignore it. */
1523 else if (queued_subexp_p (op))
1526 /* ??? Leave this only until we have experience with what
1527 happens in combine and elsewhere when constraints are
1529 warning ("asm operand %d probably doesn't match constraints", i);
1531 XVECEXP (body, 3, i) = op;
1533 XVECEXP (body, 4, i) /* constraints */
1534 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1539 /* Protect all the operands from the queue,
1540 now that they have all been evaluated. */
1542 for (i = 0; i < ninputs - ninout; i++)
1543 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1545 for (i = 0; i < noutputs; i++)
1546 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1548 /* For in-out operands, copy output rtx to input rtx. */
1549 for (i = 0; i < ninout; i++)
1551 static char match[9+1][2]
1552 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1553 int j = inout_opnum[i];
1555 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1557 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1558 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1561 /* Now, for each output, construct an rtx
1562 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1563 ARGVEC CONSTRAINTS))
1564 If there is more than one, put them inside a PARALLEL. */
1566 if (noutputs == 1 && nclobbers == 0)
1568 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1569 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1571 else if (noutputs == 0 && nclobbers == 0)
1573 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1574 insn = emit_insn (body);
1580 if (num == 0) num = 1;
1581 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1583 /* For each output operand, store a SET. */
1585 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1587 XVECEXP (body, 0, i)
1588 = gen_rtx_SET (VOIDmode,
1590 gen_rtx_ASM_OPERANDS (VOIDmode,
1591 TREE_STRING_POINTER (string),
1592 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1593 i, argvec, constraints,
1595 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1598 /* If there are no outputs (but there are some clobbers)
1599 store the bare ASM_OPERANDS into the PARALLEL. */
1602 XVECEXP (body, 0, i++) = obody;
1604 /* Store (clobber REG) for each clobbered register specified. */
1606 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1608 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1609 int j = decode_reg_name (regname);
1613 if (j == -3) /* `cc', which is not a register */
1616 if (j == -4) /* `memory', don't cache memory across asm */
1618 XVECEXP (body, 0, i++)
1619 = gen_rtx_CLOBBER (VOIDmode,
1620 gen_rtx_MEM (BLKmode,
1621 gen_rtx_SCRATCH (VOIDmode)));
1625 /* Ignore unknown register, error already signaled. */
1629 /* Use QImode since that's guaranteed to clobber just one reg. */
1630 XVECEXP (body, 0, i++)
1631 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1634 insn = emit_insn (body);
1637 /* For any outputs that needed reloading into registers, spill them
1638 back to where they belong. */
1639 for (i = 0; i < noutputs; ++i)
1640 if (real_output_rtx[i])
1641 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1646 /* Generate RTL to evaluate the expression EXP
1647 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1650 expand_expr_stmt (exp)
1653 /* If -W, warn about statements with no side effects,
1654 except for an explicit cast to void (e.g. for assert()), and
1655 except inside a ({...}) where they may be useful. */
1656 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1658 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1659 && !(TREE_CODE (exp) == CONVERT_EXPR
1660 && TREE_TYPE (exp) == void_type_node))
1661 warning_with_file_and_line (emit_filename, emit_lineno,
1662 "statement with no effect");
1663 else if (warn_unused)
1664 warn_if_unused_value (exp);
1667 /* If EXP is of function type and we are expanding statements for
1668 value, convert it to pointer-to-function. */
1669 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1670 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1672 last_expr_type = TREE_TYPE (exp);
1673 last_expr_value = expand_expr (exp,
1674 (expr_stmts_for_value
1675 ? NULL_RTX : const0_rtx),
1678 /* If all we do is reference a volatile value in memory,
1679 copy it to a register to be sure it is actually touched. */
1680 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1681 && TREE_THIS_VOLATILE (exp))
1683 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1685 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1686 copy_to_reg (last_expr_value);
1689 rtx lab = gen_label_rtx ();
1691 /* Compare the value with itself to reference it. */
1692 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1693 expand_expr (TYPE_SIZE (last_expr_type),
1694 NULL_RTX, VOIDmode, 0),
1696 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1702 /* If this expression is part of a ({...}) and is in memory, we may have
1703 to preserve temporaries. */
1704 preserve_temp_slots (last_expr_value);
1706 /* Free any temporaries used to evaluate this expression. Any temporary
1707 used as a result of this expression will already have been preserved
1714 /* Warn if EXP contains any computations whose results are not used.
1715 Return 1 if a warning is printed; 0 otherwise. */
1718 warn_if_unused_value (exp)
1721 if (TREE_USED (exp))
1724 switch (TREE_CODE (exp))
1726 case PREINCREMENT_EXPR:
1727 case POSTINCREMENT_EXPR:
1728 case PREDECREMENT_EXPR:
1729 case POSTDECREMENT_EXPR:
1734 case METHOD_CALL_EXPR:
1736 case TRY_CATCH_EXPR:
1737 case WITH_CLEANUP_EXPR:
1739 /* We don't warn about COND_EXPR because it may be a useful
1740 construct if either arm contains a side effect. */
1745 /* For a binding, warn if no side effect within it. */
1746 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1749 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1751 case TRUTH_ORIF_EXPR:
1752 case TRUTH_ANDIF_EXPR:
1753 /* In && or ||, warn if 2nd operand has no side effect. */
1754 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1757 if (TREE_NO_UNUSED_WARNING (exp))
1759 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1761 /* Let people do `(foo (), 0)' without a warning. */
1762 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1764 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1768 case NON_LVALUE_EXPR:
1769 /* Don't warn about values cast to void. */
1770 if (TREE_TYPE (exp) == void_type_node)
1772 /* Don't warn about conversions not explicit in the user's program. */
1773 if (TREE_NO_UNUSED_WARNING (exp))
1775 /* Assignment to a cast usually results in a cast of a modify.
1776 Don't complain about that. There can be an arbitrary number of
1777 casts before the modify, so we must loop until we find the first
1778 non-cast expression and then test to see if that is a modify. */
1780 tree tem = TREE_OPERAND (exp, 0);
1782 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1783 tem = TREE_OPERAND (tem, 0);
1785 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1786 || TREE_CODE (tem) == CALL_EXPR)
1792 /* Don't warn about automatic dereferencing of references, since
1793 the user cannot control it. */
1794 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1795 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1796 /* ... fall through ... */
1799 /* Referencing a volatile value is a side effect, so don't warn. */
1800 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1801 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1802 && TREE_THIS_VOLATILE (exp))
1805 warning_with_file_and_line (emit_filename, emit_lineno,
1806 "value computed is not used");
1811 /* Clear out the memory of the last expression evaluated. */
1819 /* Begin a statement which will return a value.
1820 Return the RTL_EXPR for this statement expr.
1821 The caller must save that value and pass it to expand_end_stmt_expr. */
1824 expand_start_stmt_expr ()
1829 /* Make the RTL_EXPR node temporary, not momentary,
1830 so that rtl_expr_chain doesn't become garbage. */
1831 momentary = suspend_momentary ();
1832 t = make_node (RTL_EXPR);
1833 resume_momentary (momentary);
1834 do_pending_stack_adjust ();
1835 start_sequence_for_rtl_expr (t);
1837 expr_stmts_for_value++;
1841 /* Restore the previous state at the end of a statement that returns a value.
1842 Returns a tree node representing the statement's value and the
1843 insns to compute the value.
1845 The nodes of that expression have been freed by now, so we cannot use them.
1846 But we don't want to do that anyway; the expression has already been
1847 evaluated and now we just want to use the value. So generate a RTL_EXPR
1848 with the proper type and RTL value.
1850 If the last substatement was not an expression,
1851 return something with type `void'. */
1854 expand_end_stmt_expr (t)
1859 if (last_expr_type == 0)
1861 last_expr_type = void_type_node;
1862 last_expr_value = const0_rtx;
1864 else if (last_expr_value == 0)
1865 /* There are some cases where this can happen, such as when the
1866 statement is void type. */
1867 last_expr_value = const0_rtx;
1868 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1869 /* Remove any possible QUEUED. */
1870 last_expr_value = protect_from_queue (last_expr_value, 0);
1874 TREE_TYPE (t) = last_expr_type;
1875 RTL_EXPR_RTL (t) = last_expr_value;
1876 RTL_EXPR_SEQUENCE (t) = get_insns ();
1878 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1882 /* Don't consider deleting this expr or containing exprs at tree level. */
1883 TREE_SIDE_EFFECTS (t) = 1;
1884 /* Propagate volatility of the actual RTL expr. */
1885 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1888 expr_stmts_for_value--;
1893 /* Generate RTL for the start of an if-then. COND is the expression
1894 whose truth should be tested.
1896 If EXITFLAG is nonzero, this conditional is visible to
1897 `exit_something'. */
1900 expand_start_cond (cond, exitflag)
1904 struct nesting *thiscond = ALLOC_NESTING ();
1906 /* Make an entry on cond_stack for the cond we are entering. */
1908 thiscond->next = cond_stack;
1909 thiscond->all = nesting_stack;
1910 thiscond->depth = ++nesting_depth;
1911 thiscond->data.cond.next_label = gen_label_rtx ();
1912 /* Before we encounter an `else', we don't need a separate exit label
1913 unless there are supposed to be exit statements
1914 to exit this conditional. */
1915 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1916 thiscond->data.cond.endif_label = thiscond->exit_label;
1917 cond_stack = thiscond;
1918 nesting_stack = thiscond;
1920 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1923 /* Generate RTL between then-clause and the elseif-clause
1924 of an if-then-elseif-.... */
1927 expand_start_elseif (cond)
1930 if (cond_stack->data.cond.endif_label == 0)
1931 cond_stack->data.cond.endif_label = gen_label_rtx ();
1932 emit_jump (cond_stack->data.cond.endif_label);
1933 emit_label (cond_stack->data.cond.next_label);
1934 cond_stack->data.cond.next_label = gen_label_rtx ();
1935 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1938 /* Generate RTL between the then-clause and the else-clause
1939 of an if-then-else. */
1942 expand_start_else ()
1944 if (cond_stack->data.cond.endif_label == 0)
1945 cond_stack->data.cond.endif_label = gen_label_rtx ();
1947 emit_jump (cond_stack->data.cond.endif_label);
1948 emit_label (cond_stack->data.cond.next_label);
1949 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1952 /* After calling expand_start_else, turn this "else" into an "else if"
1953 by providing another condition. */
1956 expand_elseif (cond)
1959 cond_stack->data.cond.next_label = gen_label_rtx ();
1960 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1963 /* Generate RTL for the end of an if-then.
1964 Pop the record for it off of cond_stack. */
1969 struct nesting *thiscond = cond_stack;
1971 do_pending_stack_adjust ();
1972 if (thiscond->data.cond.next_label)
1973 emit_label (thiscond->data.cond.next_label);
1974 if (thiscond->data.cond.endif_label)
1975 emit_label (thiscond->data.cond.endif_label);
1977 POPSTACK (cond_stack);
1983 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1984 loop should be exited by `exit_something'. This is a loop for which
1985 `expand_continue' will jump to the top of the loop.
1987 Make an entry on loop_stack to record the labels associated with
1991 expand_start_loop (exit_flag)
1994 register struct nesting *thisloop = ALLOC_NESTING ();
1996 /* Make an entry on loop_stack for the loop we are entering. */
1998 thisloop->next = loop_stack;
1999 thisloop->all = nesting_stack;
2000 thisloop->depth = ++nesting_depth;
2001 thisloop->data.loop.start_label = gen_label_rtx ();
2002 thisloop->data.loop.end_label = gen_label_rtx ();
2003 thisloop->data.loop.alt_end_label = 0;
2004 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2005 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2006 loop_stack = thisloop;
2007 nesting_stack = thisloop;
2009 do_pending_stack_adjust ();
2011 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2012 emit_label (thisloop->data.loop.start_label);
2017 /* Like expand_start_loop but for a loop where the continuation point
2018 (for expand_continue_loop) will be specified explicitly. */
2021 expand_start_loop_continue_elsewhere (exit_flag)
2024 struct nesting *thisloop = expand_start_loop (exit_flag);
2025 loop_stack->data.loop.continue_label = gen_label_rtx ();
2029 /* Specify the continuation point for a loop started with
2030 expand_start_loop_continue_elsewhere.
2031 Use this at the point in the code to which a continue statement
2035 expand_loop_continue_here ()
2037 do_pending_stack_adjust ();
2038 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2039 emit_label (loop_stack->data.loop.continue_label);
2042 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2043 Pop the block off of loop_stack. */
2048 rtx start_label = loop_stack->data.loop.start_label;
2049 rtx insn = get_last_insn ();
2050 int needs_end_jump = 1;
2052 /* Mark the continue-point at the top of the loop if none elsewhere. */
2053 if (start_label == loop_stack->data.loop.continue_label)
2054 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2056 do_pending_stack_adjust ();
2058 /* If optimizing, perhaps reorder the loop.
2059 First, try to use a condjump near the end.
2060 expand_exit_loop_if_false ends loops with unconditional jumps,
2063 if (test) goto label;
2065 goto loop_stack->data.loop.end_label
2069 If we find such a pattern, we can end the loop earlier. */
2072 && GET_CODE (insn) == CODE_LABEL
2073 && LABEL_NAME (insn) == NULL
2074 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2077 rtx jump = PREV_INSN (PREV_INSN (label));
2079 if (GET_CODE (jump) == JUMP_INSN
2080 && GET_CODE (PATTERN (jump)) == SET
2081 && SET_DEST (PATTERN (jump)) == pc_rtx
2082 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2083 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2084 == loop_stack->data.loop.end_label))
2088 /* The test might be complex and reference LABEL multiple times,
2089 like the loop in loop_iterations to set vtop. To handle this,
2091 insn = PREV_INSN (label);
2092 reorder_insns (label, label, start_label);
2094 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2096 /* We ignore line number notes, but if we see any other note,
2097 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2098 NOTE_INSN_LOOP_*, we disable this optimization. */
2099 if (GET_CODE (prev) == NOTE)
2101 if (NOTE_LINE_NUMBER (prev) < 0)
2105 if (GET_CODE (prev) == CODE_LABEL)
2107 if (GET_CODE (prev) == JUMP_INSN)
2109 if (GET_CODE (PATTERN (prev)) == SET
2110 && SET_DEST (PATTERN (prev)) == pc_rtx
2111 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2112 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2114 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2116 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2118 emit_note_after (NOTE_INSN_LOOP_END, prev);
2127 /* If the loop starts with a loop exit, roll that to the end where
2128 it will optimize together with the jump back.
2130 We look for the conditional branch to the exit, except that once
2131 we find such a branch, we don't look past 30 instructions.
2133 In more detail, if the loop presently looks like this (in pseudo-C):
2136 if (test) goto end_label;
2141 transform it to look like:
2147 if (test) goto end_label;
2148 goto newstart_label;
2151 Here, the `test' may actually consist of some reasonably complex
2152 code, terminating in a test. */
2157 ! (GET_CODE (insn) == JUMP_INSN
2158 && GET_CODE (PATTERN (insn)) == SET
2159 && SET_DEST (PATTERN (insn)) == pc_rtx
2160 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2164 rtx last_test_insn = NULL_RTX;
2166 /* Scan insns from the top of the loop looking for a qualified
2167 conditional exit. */
2168 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2169 insn = NEXT_INSN (insn))
2171 if (GET_CODE (insn) == NOTE)
2174 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2175 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2176 /* The code that actually moves the exit test will
2177 carefully leave BLOCK notes in their original
2178 location. That means, however, that we can't debug
2179 the exit test itself. So, we refuse to move code
2180 containing BLOCK notes at low optimization levels. */
2183 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2185 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2189 /* We've come to the end of an EH region, but
2190 never saw the beginning of that region. That
2191 means that an EH region begins before the top
2192 of the loop, and ends in the middle of it. The
2193 existence of such a situation violates a basic
2194 assumption in this code, since that would imply
2195 that even when EH_REGIONS is zero, we might
2196 move code out of an exception region. */
2200 /* We must not walk into a nested loop. */
2201 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2204 /* We already know this INSN is a NOTE, so there's no
2205 point in looking at it to see if it's a JUMP. */
2209 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2212 if (last_test_insn && num_insns > 30)
2216 /* We don't want to move a partial EH region. Consider:
2230 This isn't legal C++, but here's what it's supposed to
2231 mean: if cond() is true, stop looping. Otherwise,
2232 call bar, and keep looping. In addition, if cond
2233 throws an exception, catch it and keep looping. Such
2234 constructs are certainy legal in LISP.
2236 We should not move the `if (cond()) 0' test since then
2237 the EH-region for the try-block would be broken up.
2238 (In this case we would the EH_BEG note for the `try'
2239 and `if cond()' but not the call to bar() or the
2242 So we don't look for tests within an EH region. */
2245 if (GET_CODE (insn) == JUMP_INSN
2246 && GET_CODE (PATTERN (insn)) == SET
2247 && SET_DEST (PATTERN (insn)) == pc_rtx)
2249 /* This is indeed a jump. */
2250 rtx dest1 = NULL_RTX;
2251 rtx dest2 = NULL_RTX;
2252 rtx potential_last_test;
2253 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2255 /* A conditional jump. */
2256 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2257 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2258 potential_last_test = insn;
2262 /* An unconditional jump. */
2263 dest1 = SET_SRC (PATTERN (insn));
2264 /* Include the BARRIER after the JUMP. */
2265 potential_last_test = NEXT_INSN (insn);
2269 if (dest1 && GET_CODE (dest1) == LABEL_REF
2270 && ((XEXP (dest1, 0)
2271 == loop_stack->data.loop.alt_end_label)
2273 == loop_stack->data.loop.end_label)))
2275 last_test_insn = potential_last_test;
2279 /* If this was a conditional jump, there may be
2280 another label at which we should look. */
2287 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2289 /* We found one. Move everything from there up
2290 to the end of the loop, and add a jump into the loop
2291 to jump to there. */
2292 register rtx newstart_label = gen_label_rtx ();
2293 register rtx start_move = start_label;
2296 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2297 then we want to move this note also. */
2298 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2299 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2300 == NOTE_INSN_LOOP_CONT))
2301 start_move = PREV_INSN (start_move);
2303 emit_label_after (newstart_label, PREV_INSN (start_move));
2305 /* Actually move the insns. Start at the beginning, and
2306 keep copying insns until we've copied the
2308 for (insn = start_move; insn; insn = next_insn)
2310 /* Figure out which insn comes after this one. We have
2311 to do this before we move INSN. */
2312 if (insn == last_test_insn)
2313 /* We've moved all the insns. */
2314 next_insn = NULL_RTX;
2316 next_insn = NEXT_INSN (insn);
2318 if (GET_CODE (insn) == NOTE
2319 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2320 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2321 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2322 NOTE_INSN_BLOCK_ENDs because the correct generation
2323 of debugging information depends on these appearing
2324 in the same order in the RTL and in the tree
2325 structure, where they are represented as BLOCKs.
2326 So, we don't move block notes. Of course, moving
2327 the code inside the block is likely to make it
2328 impossible to debug the instructions in the exit
2329 test, but such is the price of optimization. */
2332 /* Move the INSN. */
2333 reorder_insns (insn, insn, get_last_insn ());
2336 emit_jump_insn_after (gen_jump (start_label),
2337 PREV_INSN (newstart_label));
2338 emit_barrier_after (PREV_INSN (newstart_label));
2339 start_label = newstart_label;
2345 emit_jump (start_label);
2346 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2348 emit_label (loop_stack->data.loop.end_label);
2350 POPSTACK (loop_stack);
2355 /* Generate a jump to the current loop's continue-point.
2356 This is usually the top of the loop, but may be specified
2357 explicitly elsewhere. If not currently inside a loop,
2358 return 0 and do nothing; caller will print an error message. */
2361 expand_continue_loop (whichloop)
2362 struct nesting *whichloop;
2366 whichloop = loop_stack;
2369 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2374 /* Generate a jump to exit the current loop. If not currently inside a loop,
2375 return 0 and do nothing; caller will print an error message. */
2378 expand_exit_loop (whichloop)
2379 struct nesting *whichloop;
2383 whichloop = loop_stack;
2386 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2390 /* Generate a conditional jump to exit the current loop if COND
2391 evaluates to zero. If not currently inside a loop,
2392 return 0 and do nothing; caller will print an error message. */
2395 expand_exit_loop_if_false (whichloop, cond)
2396 struct nesting *whichloop;
2399 rtx label = gen_label_rtx ();
2404 whichloop = loop_stack;
2407 /* In order to handle fixups, we actually create a conditional jump
2408 around a unconditional branch to exit the loop. If fixups are
2409 necessary, they go before the unconditional branch. */
2412 do_jump (cond, NULL_RTX, label);
2413 last_insn = get_last_insn ();
2414 if (GET_CODE (last_insn) == CODE_LABEL)
2415 whichloop->data.loop.alt_end_label = last_insn;
2416 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2423 /* Return nonzero if the loop nest is empty. Else return zero. */
2426 stmt_loop_nest_empty ()
2428 return (loop_stack == NULL);
2431 /* Return non-zero if we should preserve sub-expressions as separate
2432 pseudos. We never do so if we aren't optimizing. We always do so
2433 if -fexpensive-optimizations.
2435 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2436 the loop may still be a small one. */
2439 preserve_subexpressions_p ()
2443 if (flag_expensive_optimizations)
2446 if (optimize == 0 || loop_stack == 0)
2449 insn = get_last_insn_anywhere ();
2452 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2453 < n_non_fixed_regs * 3));
2457 /* Generate a jump to exit the current loop, conditional, binding contour
2458 or case statement. Not all such constructs are visible to this function,
2459 only those started with EXIT_FLAG nonzero. Individual languages use
2460 the EXIT_FLAG parameter to control which kinds of constructs you can
2463 If not currently inside anything that can be exited,
2464 return 0 and do nothing; caller will print an error message. */
2467 expand_exit_something ()
2471 for (n = nesting_stack; n; n = n->all)
2472 if (n->exit_label != 0)
2474 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2481 /* Generate RTL to return from the current function, with no value.
2482 (That is, we do not do anything about returning any value.) */
2485 expand_null_return ()
2487 struct nesting *block = block_stack;
2490 /* Does any pending block have cleanups? */
2492 while (block && block->data.block.cleanups == 0)
2493 block = block->next;
2495 /* If yes, use a goto to return, since that runs cleanups. */
2497 expand_null_return_1 (last_insn, block != 0);
2500 /* Generate RTL to return from the current function, with value VAL. */
2503 expand_value_return (val)
2506 struct nesting *block = block_stack;
2507 rtx last_insn = get_last_insn ();
2508 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2510 /* Copy the value to the return location
2511 unless it's already there. */
2513 if (return_reg != val)
2515 #ifdef PROMOTE_FUNCTION_RETURN
2516 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2517 int unsignedp = TREE_UNSIGNED (type);
2518 enum machine_mode old_mode
2519 = DECL_MODE (DECL_RESULT (current_function_decl));
2520 enum machine_mode mode
2521 = promote_mode (type, old_mode, &unsignedp, 1);
2523 if (mode != old_mode)
2524 val = convert_modes (mode, old_mode, val, unsignedp);
2526 emit_move_insn (return_reg, val);
2528 if (GET_CODE (return_reg) == REG
2529 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2530 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2531 /* Handle calls that return values in multiple non-contiguous locations.
2532 The Irix 6 ABI has examples of this. */
2533 else if (GET_CODE (return_reg) == PARALLEL)
2537 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2539 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2541 if (GET_CODE (x) == REG
2542 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2543 emit_insn (gen_rtx_USE (VOIDmode, x));
2547 /* Does any pending block have cleanups? */
2549 while (block && block->data.block.cleanups == 0)
2550 block = block->next;
2552 /* If yes, use a goto to return, since that runs cleanups.
2553 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2555 expand_null_return_1 (last_insn, block != 0);
2558 /* Output a return with no value. If LAST_INSN is nonzero,
2559 pretend that the return takes place after LAST_INSN.
2560 If USE_GOTO is nonzero then don't use a return instruction;
2561 go to the return label instead. This causes any cleanups
2562 of pending blocks to be executed normally. */
2565 expand_null_return_1 (last_insn, use_goto)
2569 rtx end_label = cleanup_label ? cleanup_label : return_label;
2571 clear_pending_stack_adjust ();
2572 do_pending_stack_adjust ();
2575 /* PCC-struct return always uses an epilogue. */
2576 if (current_function_returns_pcc_struct || use_goto)
2579 end_label = return_label = gen_label_rtx ();
2580 expand_goto_internal (NULL_TREE, end_label, last_insn);
2584 /* Otherwise output a simple return-insn if one is available,
2585 unless it won't do the job. */
2587 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2589 emit_jump_insn (gen_return ());
2595 /* Otherwise jump to the epilogue. */
2596 expand_goto_internal (NULL_TREE, end_label, last_insn);
2599 /* Generate RTL to evaluate the expression RETVAL and return it
2600 from the current function. */
2603 expand_return (retval)
2606 /* If there are any cleanups to be performed, then they will
2607 be inserted following LAST_INSN. It is desirable
2608 that the last_insn, for such purposes, should be the
2609 last insn before computing the return value. Otherwise, cleanups
2610 which call functions can clobber the return value. */
2611 /* ??? rms: I think that is erroneous, because in C++ it would
2612 run destructors on variables that might be used in the subsequent
2613 computation of the return value. */
2615 register rtx val = 0;
2620 /* If function wants no value, give it none. */
2621 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2623 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2625 expand_null_return ();
2629 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2630 /* This is not sufficient. We also need to watch for cleanups of the
2631 expression we are about to expand. Unfortunately, we cannot know
2632 if it has cleanups until we expand it, and we want to change how we
2633 expand it depending upon if we need cleanups. We can't win. */
2635 cleanups = any_pending_cleanups (1);
2640 if (TREE_CODE (retval) == RESULT_DECL)
2641 retval_rhs = retval;
2642 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2643 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2644 retval_rhs = TREE_OPERAND (retval, 1);
2645 else if (TREE_TYPE (retval) == void_type_node)
2646 /* Recognize tail-recursive call to void function. */
2647 retval_rhs = retval;
2649 retval_rhs = NULL_TREE;
2651 /* Only use `last_insn' if there are cleanups which must be run. */
2652 if (cleanups || cleanup_label != 0)
2653 last_insn = get_last_insn ();
2655 /* Distribute return down conditional expr if either of the sides
2656 may involve tail recursion (see test below). This enhances the number
2657 of tail recursions we see. Don't do this always since it can produce
2658 sub-optimal code in some cases and we distribute assignments into
2659 conditional expressions when it would help. */
2661 if (optimize && retval_rhs != 0
2662 && frame_offset == 0
2663 && TREE_CODE (retval_rhs) == COND_EXPR
2664 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2665 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2667 rtx label = gen_label_rtx ();
2670 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2671 start_cleanup_deferral ();
2672 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2673 DECL_RESULT (current_function_decl),
2674 TREE_OPERAND (retval_rhs, 1));
2675 TREE_SIDE_EFFECTS (expr) = 1;
2676 expand_return (expr);
2679 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2680 DECL_RESULT (current_function_decl),
2681 TREE_OPERAND (retval_rhs, 2));
2682 TREE_SIDE_EFFECTS (expr) = 1;
2683 expand_return (expr);
2684 end_cleanup_deferral ();
2688 /* Attempt to optimize the call if it is tail recursive. */
2689 if (optimize_tail_recursion (retval_rhs, last_insn))
2693 /* This optimization is safe if there are local cleanups
2694 because expand_null_return takes care of them.
2695 ??? I think it should also be safe when there is a cleanup label,
2696 because expand_null_return takes care of them, too.
2697 Any reason why not? */
2698 if (HAVE_return && cleanup_label == 0
2699 && ! current_function_returns_pcc_struct
2700 && BRANCH_COST <= 1)
2702 /* If this is return x == y; then generate
2703 if (x == y) return 1; else return 0;
2704 if we can do it with explicit return insns and branches are cheap,
2705 but not if we have the corresponding scc insn. */
2708 switch (TREE_CODE (retval_rhs))
2734 case TRUTH_ANDIF_EXPR:
2735 case TRUTH_ORIF_EXPR:
2736 case TRUTH_AND_EXPR:
2738 case TRUTH_NOT_EXPR:
2739 case TRUTH_XOR_EXPR:
2742 op0 = gen_label_rtx ();
2743 jumpifnot (retval_rhs, op0);
2744 expand_value_return (const1_rtx);
2746 expand_value_return (const0_rtx);
2755 #endif /* HAVE_return */
2757 /* If the result is an aggregate that is being returned in one (or more)
2758 registers, load the registers here. The compiler currently can't handle
2759 copying a BLKmode value into registers. We could put this code in a
2760 more general area (for use by everyone instead of just function
2761 call/return), but until this feature is generally usable it is kept here
2762 (and in expand_call). The value must go into a pseudo in case there
2763 are cleanups that will clobber the real return register. */
2766 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2767 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2769 int i, bitpos, xbitpos;
2770 int big_endian_correction = 0;
2771 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2772 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2773 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2774 (unsigned int)BITS_PER_WORD);
2775 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2776 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2777 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2778 enum machine_mode tmpmode, result_reg_mode;
2780 /* Structures whose size is not a multiple of a word are aligned
2781 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2782 machine, this means we must skip the empty high order bytes when
2783 calculating the bit offset. */
2784 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2785 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2788 /* Copy the structure BITSIZE bits at a time. */
2789 for (bitpos = 0, xbitpos = big_endian_correction;
2790 bitpos < bytes * BITS_PER_UNIT;
2791 bitpos += bitsize, xbitpos += bitsize)
2793 /* We need a new destination pseudo each time xbitpos is
2794 on a word boundary and when xbitpos == big_endian_correction
2795 (the first time through). */
2796 if (xbitpos % BITS_PER_WORD == 0
2797 || xbitpos == big_endian_correction)
2799 /* Generate an appropriate register. */
2800 dst = gen_reg_rtx (word_mode);
2801 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2803 /* Clobber the destination before we move anything into it. */
2804 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2807 /* We need a new source operand each time bitpos is on a word
2809 if (bitpos % BITS_PER_WORD == 0)
2810 src = operand_subword_force (result_val,
2811 bitpos / BITS_PER_WORD,
2814 /* Use bitpos for the source extraction (left justified) and
2815 xbitpos for the destination store (right justified). */
2816 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2817 extract_bit_field (src, bitsize,
2818 bitpos % BITS_PER_WORD, 1,
2819 NULL_RTX, word_mode,
2821 bitsize / BITS_PER_UNIT,
2823 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2826 /* Find the smallest integer mode large enough to hold the
2827 entire structure and use that mode instead of BLKmode
2828 on the USE insn for the return register. */
2829 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2830 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2831 tmpmode != MAX_MACHINE_MODE;
2832 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2834 /* Have we found a large enough mode? */
2835 if (GET_MODE_SIZE (tmpmode) >= bytes)
2839 /* No suitable mode found. */
2840 if (tmpmode == MAX_MACHINE_MODE)
2843 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2845 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2846 result_reg_mode = word_mode;
2848 result_reg_mode = tmpmode;
2849 result_reg = gen_reg_rtx (result_reg_mode);
2852 for (i = 0; i < n_regs; i++)
2853 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2856 if (tmpmode != result_reg_mode)
2857 result_reg = gen_lowpart (tmpmode, result_reg);
2859 expand_value_return (result_reg);
2863 && TREE_TYPE (retval_rhs) != void_type_node
2864 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2866 /* Calculate the return value into a pseudo reg. */
2867 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2868 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2869 val = force_not_mem (val);
2871 /* Return the calculated value, doing cleanups first. */
2872 expand_value_return (val);
2876 /* No cleanups or no hard reg used;
2877 calculate value into hard return reg. */
2878 expand_expr (retval, const0_rtx, VOIDmode, 0);
2880 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2884 /* Return 1 if the end of the generated RTX is not a barrier.
2885 This means code already compiled can drop through. */
2888 drop_through_at_end_p ()
2890 rtx insn = get_last_insn ();
2891 while (insn && GET_CODE (insn) == NOTE)
2892 insn = PREV_INSN (insn);
2893 return insn && GET_CODE (insn) != BARRIER;
2896 /* Test CALL_EXPR to determine if it is a potential tail recursion call
2897 and emit code to optimize the tail recursion. LAST_INSN indicates where
2898 to place the jump to the tail recursion label. Return TRUE if the
2899 call was optimized into a goto.
2901 This is only used by expand_return, but expand_call is expected to
2905 optimize_tail_recursion (call_expr, last_insn)
2909 /* For tail-recursive call to current function,
2910 just jump back to the beginning.
2911 It's unsafe if any auto variable in this function
2912 has its address taken; for simplicity,
2913 require stack frame to be empty. */
2914 if (optimize && call_expr != 0
2915 && frame_offset == 0
2916 && TREE_CODE (call_expr) == CALL_EXPR
2917 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2918 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2919 /* Finish checking validity, and if valid emit code
2920 to set the argument variables for the new call. */
2921 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2922 DECL_ARGUMENTS (current_function_decl)))
2924 if (tail_recursion_label == 0)
2926 tail_recursion_label = gen_label_rtx ();
2927 emit_label_after (tail_recursion_label,
2928 tail_recursion_reentry);
2931 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2939 /* Emit code to alter this function's formal parms for a tail-recursive call.
2940 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2941 FORMALS is the chain of decls of formals.
2942 Return 1 if this can be done;
2943 otherwise return 0 and do not emit any code. */
2946 tail_recursion_args (actuals, formals)
2947 tree actuals, formals;
2949 register tree a = actuals, f = formals;
2951 register rtx *argvec;
2953 /* Check that number and types of actuals are compatible
2954 with the formals. This is not always true in valid C code.
2955 Also check that no formal needs to be addressable
2956 and that all formals are scalars. */
2958 /* Also count the args. */
2960 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2962 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2963 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2965 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2968 if (a != 0 || f != 0)
2971 /* Compute all the actuals. */
2973 argvec = (rtx *) alloca (i * sizeof (rtx));
2975 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2976 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2978 /* Find which actual values refer to current values of previous formals.
2979 Copy each of them now, before any formal is changed. */
2981 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2985 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2986 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2987 { copy = 1; break; }
2989 argvec[i] = copy_to_reg (argvec[i]);
2992 /* Store the values of the actuals into the formals. */
2994 for (f = formals, a = actuals, i = 0; f;
2995 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2997 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2998 emit_move_insn (DECL_RTL (f), argvec[i]);
3000 convert_move (DECL_RTL (f), argvec[i],
3001 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3008 /* Generate the RTL code for entering a binding contour.
3009 The variables are declared one by one, by calls to `expand_decl'.
3011 EXIT_FLAG is nonzero if this construct should be visible to
3012 `exit_something'. */
3015 expand_start_bindings (exit_flag)
3018 struct nesting *thisblock = ALLOC_NESTING ();
3019 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3021 /* Make an entry on block_stack for the block we are entering. */
3023 thisblock->next = block_stack;
3024 thisblock->all = nesting_stack;
3025 thisblock->depth = ++nesting_depth;
3026 thisblock->data.block.stack_level = 0;
3027 thisblock->data.block.cleanups = 0;
3028 thisblock->data.block.function_call_count = 0;
3029 thisblock->data.block.exception_region = 0;
3030 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
3032 thisblock->data.block.conditional_code = 0;
3033 thisblock->data.block.last_unconditional_cleanup = note;
3034 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3037 && !(block_stack->data.block.cleanups == NULL_TREE
3038 && block_stack->data.block.outer_cleanups == NULL_TREE))
3039 thisblock->data.block.outer_cleanups
3040 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3041 block_stack->data.block.outer_cleanups);
3043 thisblock->data.block.outer_cleanups = 0;
3044 thisblock->data.block.label_chain = 0;
3045 thisblock->data.block.innermost_stack_block = stack_block_stack;
3046 thisblock->data.block.first_insn = note;
3047 thisblock->data.block.block_start_count = ++block_start_count;
3048 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3049 block_stack = thisblock;
3050 nesting_stack = thisblock;
3052 /* Make a new level for allocating stack slots. */
3056 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3057 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3058 expand_expr are made. After we end the region, we know that all
3059 space for all temporaries that were created by TARGET_EXPRs will be
3060 destroyed and their space freed for reuse. */
3063 expand_start_target_temps ()
3065 /* This is so that even if the result is preserved, the space
3066 allocated will be freed, as we know that it is no longer in use. */
3069 /* Start a new binding layer that will keep track of all cleanup
3070 actions to be performed. */
3071 expand_start_bindings (0);
3073 target_temp_slot_level = temp_slot_level;
3077 expand_end_target_temps ()
3079 expand_end_bindings (NULL_TREE, 0, 0);
3081 /* This is so that even if the result is preserved, the space
3082 allocated will be freed, as we know that it is no longer in use. */
3086 /* Mark top block of block_stack as an implicit binding for an
3087 exception region. This is used to prevent infinite recursion when
3088 ending a binding with expand_end_bindings. It is only ever called
3089 by expand_eh_region_start, as that it the only way to create a
3090 block stack for a exception region. */
3093 mark_block_as_eh_region ()
3095 block_stack->data.block.exception_region = 1;
3096 if (block_stack->next
3097 && block_stack->next->data.block.conditional_code)
3099 block_stack->data.block.conditional_code
3100 = block_stack->next->data.block.conditional_code;
3101 block_stack->data.block.last_unconditional_cleanup
3102 = block_stack->next->data.block.last_unconditional_cleanup;
3103 block_stack->data.block.cleanup_ptr
3104 = block_stack->next->data.block.cleanup_ptr;
3108 /* True if we are currently emitting insns in an area of output code
3109 that is controlled by a conditional expression. This is used by
3110 the cleanup handling code to generate conditional cleanup actions. */
3113 conditional_context ()
3115 return block_stack && block_stack->data.block.conditional_code;
3118 /* Mark top block of block_stack as not for an implicit binding for an
3119 exception region. This is only ever done by expand_eh_region_end
3120 to let expand_end_bindings know that it is being called explicitly
3121 to end the binding layer for just the binding layer associated with
3122 the exception region, otherwise expand_end_bindings would try and
3123 end all implicit binding layers for exceptions regions, and then
3124 one normal binding layer. */
3127 mark_block_as_not_eh_region ()
3129 block_stack->data.block.exception_region = 0;
3132 /* True if the top block of block_stack was marked as for an exception
3133 region by mark_block_as_eh_region. */
3138 return block_stack && block_stack->data.block.exception_region;
3141 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3142 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3146 remember_end_note (block)
3147 register tree block;
3149 BLOCK_END_NOTE (block) = last_block_end_note;
3150 last_block_end_note = NULL_RTX;
3153 /* Emit a handler label for a nonlocal goto handler.
3154 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3157 expand_nl_handler_label (slot, before_insn)
3158 rtx slot, before_insn;
3161 rtx handler_label = gen_label_rtx ();
3163 /* Don't let jump_optimize delete the handler. */
3164 LABEL_PRESERVE_P (handler_label) = 1;
3167 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3168 insns = get_insns ();
3170 emit_insns_before (insns, before_insn);
3172 emit_label (handler_label);
3174 return handler_label;
3177 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3180 expand_nl_goto_receiver ()
3182 #ifdef HAVE_nonlocal_goto
3183 if (! HAVE_nonlocal_goto)
3185 /* First adjust our frame pointer to its actual value. It was
3186 previously set to the start of the virtual area corresponding to
3187 the stacked variables when we branched here and now needs to be
3188 adjusted to the actual hardware fp value.
3190 Assignments are to virtual registers are converted by
3191 instantiate_virtual_regs into the corresponding assignment
3192 to the underlying register (fp in this case) that makes
3193 the original assignment true.
3194 So the following insn will actually be
3195 decrementing fp by STARTING_FRAME_OFFSET. */
3196 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3198 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3199 if (fixed_regs[ARG_POINTER_REGNUM])
3201 #ifdef ELIMINABLE_REGS
3202 /* If the argument pointer can be eliminated in favor of the
3203 frame pointer, we don't need to restore it. We assume here
3204 that if such an elimination is present, it can always be used.
3205 This is the case on all known machines; if we don't make this
3206 assumption, we do unnecessary saving on many machines. */
3207 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3210 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3211 if (elim_regs[i].from == ARG_POINTER_REGNUM
3212 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3215 if (i == sizeof elim_regs / sizeof elim_regs [0])
3218 /* Now restore our arg pointer from the address at which it
3219 was saved in our stack frame.
3220 If there hasn't be space allocated for it yet, make
3222 if (arg_pointer_save_area == 0)
3223 arg_pointer_save_area
3224 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3225 emit_move_insn (virtual_incoming_args_rtx,
3226 /* We need a pseudo here, or else
3227 instantiate_virtual_regs_1 complains. */
3228 copy_to_reg (arg_pointer_save_area));
3233 #ifdef HAVE_nonlocal_goto_receiver
3234 if (HAVE_nonlocal_goto_receiver)
3235 emit_insn (gen_nonlocal_goto_receiver ());
3239 /* Make handlers for nonlocal gotos taking place in the function calls in
3243 expand_nl_goto_receivers (thisblock)
3244 struct nesting *thisblock;
3247 rtx afterward = gen_label_rtx ();
3252 /* Record the handler address in the stack slot for that purpose,
3253 during this block, saving and restoring the outer value. */
3254 if (thisblock->next != 0)
3255 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3257 rtx save_receiver = gen_reg_rtx (Pmode);
3258 emit_move_insn (XEXP (slot, 0), save_receiver);
3261 emit_move_insn (save_receiver, XEXP (slot, 0));
3262 insns = get_insns ();
3264 emit_insns_before (insns, thisblock->data.block.first_insn);
3267 /* Jump around the handlers; they run only when specially invoked. */
3268 emit_jump (afterward);
3270 /* Make a separate handler for each label. */
3271 link = nonlocal_labels;
3272 slot = nonlocal_goto_handler_slots;
3273 label_list = NULL_RTX;
3274 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3275 /* Skip any labels we shouldn't be able to jump to from here,
3276 we generate one special handler for all of them below which just calls
3278 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3281 lab = expand_nl_handler_label (XEXP (slot, 0),
3282 thisblock->data.block.first_insn);
3283 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3285 expand_nl_goto_receiver ();
3287 /* Jump to the "real" nonlocal label. */
3288 expand_goto (TREE_VALUE (link));
3291 /* A second pass over all nonlocal labels; this time we handle those
3292 we should not be able to jump to at this point. */
3293 link = nonlocal_labels;
3294 slot = nonlocal_goto_handler_slots;
3296 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3297 if (DECL_TOO_LATE (TREE_VALUE (link)))
3300 lab = expand_nl_handler_label (XEXP (slot, 0),
3301 thisblock->data.block.first_insn);
3302 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3308 expand_nl_goto_receiver ();
3309 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3314 nonlocal_goto_handler_labels = label_list;
3315 emit_label (afterward);
3318 /* Generate RTL code to terminate a binding contour.
3320 VARS is the chain of VAR_DECL nodes for the variables bound in this
3321 contour. There may actually be other nodes in this chain, but any
3322 nodes other than VAR_DECLS are ignored.
3324 MARK_ENDS is nonzero if we should put a note at the beginning
3325 and end of this binding contour.
3327 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3328 (That is true automatically if the contour has a saved stack level.) */
3331 expand_end_bindings (vars, mark_ends, dont_jump_in)
3336 register struct nesting *thisblock;
3339 while (block_stack->data.block.exception_region)
3341 /* Because we don't need or want a new temporary level and
3342 because we didn't create one in expand_eh_region_start,
3343 create a fake one now to avoid removing one in
3344 expand_end_bindings. */
3347 block_stack->data.block.exception_region = 0;
3349 expand_end_bindings (NULL_TREE, 0, 0);
3352 /* Since expand_eh_region_start does an expand_start_bindings, we
3353 have to first end all the bindings that were created by
3354 expand_eh_region_start. */
3356 thisblock = block_stack;
3359 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3360 if (TREE_CODE (decl) == VAR_DECL
3361 && ! TREE_USED (decl)
3362 && ! DECL_IN_SYSTEM_HEADER (decl)
3363 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3364 warning_with_decl (decl, "unused variable `%s'");
3366 if (thisblock->exit_label)
3368 do_pending_stack_adjust ();
3369 emit_label (thisblock->exit_label);
3372 /* If necessary, make handlers for nonlocal gotos taking
3373 place in the function calls in this block. */
3374 if (function_call_count != thisblock->data.block.function_call_count
3376 /* Make handler for outermost block
3377 if there were any nonlocal gotos to this function. */
3378 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3379 /* Make handler for inner block if it has something
3380 special to do when you jump out of it. */
3381 : (thisblock->data.block.cleanups != 0
3382 || thisblock->data.block.stack_level != 0)))
3383 expand_nl_goto_receivers (thisblock);
3385 /* Don't allow jumping into a block that has a stack level.
3386 Cleanups are allowed, though. */
3388 || thisblock->data.block.stack_level != 0)
3390 struct label_chain *chain;
3392 /* Any labels in this block are no longer valid to go to.
3393 Mark them to cause an error message. */
3394 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3396 DECL_TOO_LATE (chain->label) = 1;
3397 /* If any goto without a fixup came to this label,
3398 that must be an error, because gotos without fixups
3399 come from outside all saved stack-levels. */
3400 if (TREE_ADDRESSABLE (chain->label))
3401 error_with_decl (chain->label,
3402 "label `%s' used before containing binding contour");
3406 /* Restore stack level in effect before the block
3407 (only if variable-size objects allocated). */
3408 /* Perform any cleanups associated with the block. */
3410 if (thisblock->data.block.stack_level != 0
3411 || thisblock->data.block.cleanups != 0)
3413 /* Only clean up here if this point can actually be reached. */
3414 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3416 /* Don't let cleanups affect ({...}) constructs. */
3417 int old_expr_stmts_for_value = expr_stmts_for_value;
3418 rtx old_last_expr_value = last_expr_value;
3419 tree old_last_expr_type = last_expr_type;
3420 expr_stmts_for_value = 0;
3422 /* Do the cleanups. */
3423 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3425 do_pending_stack_adjust ();
3427 expr_stmts_for_value = old_expr_stmts_for_value;
3428 last_expr_value = old_last_expr_value;
3429 last_expr_type = old_last_expr_type;
3431 /* Restore the stack level. */
3433 if (reachable && thisblock->data.block.stack_level != 0)
3435 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3436 thisblock->data.block.stack_level, NULL_RTX);
3437 if (nonlocal_goto_handler_slots != 0)
3438 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3442 /* Any gotos out of this block must also do these things.
3443 Also report any gotos with fixups that came to labels in this
3445 fixup_gotos (thisblock,
3446 thisblock->data.block.stack_level,
3447 thisblock->data.block.cleanups,
3448 thisblock->data.block.first_insn,
3452 /* Mark the beginning and end of the scope if requested.
3453 We do this now, after running cleanups on the variables
3454 just going out of scope, so they are in scope for their cleanups. */
3457 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3459 /* Get rid of the beginning-mark if we don't make an end-mark. */
3460 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3462 /* If doing stupid register allocation, make sure lives of all
3463 register variables declared here extend thru end of scope. */
3466 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3467 if (TREE_CODE (decl) == VAR_DECL && DECL_RTL (decl))
3468 use_variable (DECL_RTL (decl));
3470 /* Restore the temporary level of TARGET_EXPRs. */
3471 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3473 /* Restore block_stack level for containing block. */
3475 stack_block_stack = thisblock->data.block.innermost_stack_block;
3476 POPSTACK (block_stack);
3478 /* Pop the stack slot nesting and free any slots at this level. */
3482 /* Generate RTL for the automatic variable declaration DECL.
3483 (Other kinds of declarations are simply ignored if seen here.) */
3489 struct nesting *thisblock = block_stack;
3492 type = TREE_TYPE (decl);
3494 /* Only automatic variables need any expansion done.
3495 Static and external variables, and external functions,
3496 will be handled by `assemble_variable' (called from finish_decl).
3497 TYPE_DECL and CONST_DECL require nothing.
3498 PARM_DECLs are handled in `assign_parms'. */
3500 if (TREE_CODE (decl) != VAR_DECL)
3502 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3505 /* Create the RTL representation for the variable. */
3507 if (type == error_mark_node)
3508 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3509 else if (DECL_SIZE (decl) == 0)
3510 /* Variable with incomplete type. */
3512 if (DECL_INITIAL (decl) == 0)
3513 /* Error message was already done; now avoid a crash. */
3514 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3516 /* An initializer is going to decide the size of this array.
3517 Until we know the size, represent its address with a reg. */
3518 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3519 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3521 else if (DECL_MODE (decl) != BLKmode
3522 /* If -ffloat-store, don't put explicit float vars
3524 && !(flag_float_store
3525 && TREE_CODE (type) == REAL_TYPE)
3526 && ! TREE_THIS_VOLATILE (decl)
3527 && ! TREE_ADDRESSABLE (decl)
3528 && (DECL_REGISTER (decl) || ! obey_regdecls)
3529 /* if -fcheck-memory-usage, check all variables. */
3530 && ! current_function_check_memory_usage)
3532 /* Automatic variable that can go in a register. */
3533 int unsignedp = TREE_UNSIGNED (type);
3534 enum machine_mode reg_mode
3535 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3537 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3538 mark_user_reg (DECL_RTL (decl));
3540 if (POINTER_TYPE_P (type))
3541 mark_reg_pointer (DECL_RTL (decl),
3542 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3546 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3547 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3548 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3549 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3550 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3552 /* Variable of fixed size that goes on the stack. */
3556 /* If we previously made RTL for this decl, it must be an array
3557 whose size was determined by the initializer.
3558 The old address was a register; set that register now
3559 to the proper address. */
3560 if (DECL_RTL (decl) != 0)
3562 if (GET_CODE (DECL_RTL (decl)) != MEM
3563 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3565 oldaddr = XEXP (DECL_RTL (decl), 0);
3568 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3569 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3570 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3572 /* Set alignment we actually gave this decl. */
3573 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3574 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3578 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3579 if (addr != oldaddr)
3580 emit_move_insn (oldaddr, addr);
3583 /* If this is a memory ref that contains aggregate components,
3584 mark it as such for cse and loop optimize. */
3585 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3586 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3588 /* If this is in memory because of -ffloat-store,
3589 set the volatile bit, to prevent optimizations from
3590 undoing the effects. */
3591 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3592 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3595 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3598 /* Dynamic-size object: must push space on the stack. */
3602 /* Record the stack pointer on entry to block, if have
3603 not already done so. */
3604 if (thisblock->data.block.stack_level == 0)
3606 do_pending_stack_adjust ();
3607 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3608 &thisblock->data.block.stack_level,
3609 thisblock->data.block.first_insn);
3610 stack_block_stack = thisblock;
3613 /* Compute the variable's size, in bytes. */
3614 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3616 size_int (BITS_PER_UNIT)),
3617 NULL_RTX, VOIDmode, 0);
3620 /* Allocate space on the stack for the variable. Note that
3621 DECL_ALIGN says how the variable is to be aligned and we
3622 cannot use it to conclude anything about the alignment of
3624 address = allocate_dynamic_stack_space (size, NULL_RTX,
3625 TYPE_ALIGN (TREE_TYPE (decl)));
3627 /* Reference the variable indirect through that rtx. */
3628 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3630 /* If this is a memory ref that contains aggregate components,
3631 mark it as such for cse and loop optimize. */
3632 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3633 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3635 /* Indicate the alignment we actually gave this variable. */
3636 #ifdef STACK_BOUNDARY
3637 DECL_ALIGN (decl) = STACK_BOUNDARY;
3639 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3643 if (TREE_THIS_VOLATILE (decl))
3644 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3645 #if 0 /* A variable is not necessarily unchanging
3646 just because it is const. RTX_UNCHANGING_P
3647 means no change in the function,
3648 not merely no change in the variable's scope.
3649 It is correct to set RTX_UNCHANGING_P if the variable's scope
3650 is the whole function. There's no convenient way to test that. */
3651 if (TREE_READONLY (decl))
3652 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3655 /* If doing stupid register allocation, make sure life of any
3656 register variable starts here, at the start of its scope. */
3659 use_variable (DECL_RTL (decl));
3664 /* Emit code to perform the initialization of a declaration DECL. */
3667 expand_decl_init (decl)
3670 int was_used = TREE_USED (decl);
3672 /* If this is a CONST_DECL, we don't have to generate any code, but
3673 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3674 to be set while in the obstack containing the constant. If we don't
3675 do this, we can lose if we have functions nested three deep and the middle
3676 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3677 the innermost function is the first to expand that STRING_CST. */
3678 if (TREE_CODE (decl) == CONST_DECL)
3680 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3681 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3682 EXPAND_INITIALIZER);
3686 if (TREE_STATIC (decl))
3689 /* Compute and store the initial value now. */
3691 if (DECL_INITIAL (decl) == error_mark_node)
3693 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3695 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3696 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3697 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3701 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3703 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3704 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3708 /* Don't let the initialization count as "using" the variable. */
3709 TREE_USED (decl) = was_used;
3711 /* Free any temporaries we made while initializing the decl. */
3712 preserve_temp_slots (NULL_RTX);
3716 /* CLEANUP is an expression to be executed at exit from this binding contour;
3717 for example, in C++, it might call the destructor for this variable.
3719 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3720 CLEANUP multiple times, and have the correct semantics. This
3721 happens in exception handling, for gotos, returns, breaks that
3722 leave the current scope.
3724 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3725 that is not associated with any particular variable. */
3728 expand_decl_cleanup (decl, cleanup)
3731 struct nesting *thisblock = block_stack;
3733 /* Error if we are not in any block. */
3737 /* Record the cleanup if there is one. */
3743 tree *cleanups = &thisblock->data.block.cleanups;
3744 int cond_context = conditional_context ();
3748 rtx flag = gen_reg_rtx (word_mode);
3753 emit_move_insn (flag, const0_rtx);
3754 set_flag_0 = get_insns ();
3757 thisblock->data.block.last_unconditional_cleanup
3758 = emit_insns_after (set_flag_0,
3759 thisblock->data.block.last_unconditional_cleanup);
3761 emit_move_insn (flag, const1_rtx);
3763 /* All cleanups must be on the function_obstack. */
3764 push_obstacks_nochange ();
3765 resume_temporary_allocation ();
3767 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3768 DECL_RTL (cond) = flag;
3770 /* Conditionalize the cleanup. */
3771 cleanup = build (COND_EXPR, void_type_node,
3772 truthvalue_conversion (cond),
3773 cleanup, integer_zero_node);
3774 cleanup = fold (cleanup);
3778 cleanups = thisblock->data.block.cleanup_ptr;
3781 /* All cleanups must be on the function_obstack. */
3782 push_obstacks_nochange ();
3783 resume_temporary_allocation ();
3784 cleanup = unsave_expr (cleanup);
3787 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3790 /* If this block has a cleanup, it belongs in stack_block_stack. */
3791 stack_block_stack = thisblock;
3798 /* If this was optimized so that there is no exception region for the
3799 cleanup, then mark the TREE_LIST node, so that we can later tell
3800 if we need to call expand_eh_region_end. */
3801 if (! using_eh_for_cleanups_p
3802 || expand_eh_region_start_tree (decl, cleanup))
3803 TREE_ADDRESSABLE (t) = 1;
3804 /* If that started a new EH region, we're in a new block. */
3805 thisblock = block_stack;
3812 thisblock->data.block.last_unconditional_cleanup
3813 = emit_insns_after (seq,
3814 thisblock->data.block.last_unconditional_cleanup);
3818 thisblock->data.block.last_unconditional_cleanup
3820 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3826 /* Like expand_decl_cleanup, but suppress generating an exception handler
3827 to perform the cleanup. */
3830 expand_decl_cleanup_no_eh (decl, cleanup)
3833 int save_eh = using_eh_for_cleanups_p;
3836 using_eh_for_cleanups_p = 0;
3837 result = expand_decl_cleanup (decl, cleanup);
3838 using_eh_for_cleanups_p = save_eh;
3843 /* Arrange for the top element of the dynamic cleanup chain to be
3844 popped if we exit the current binding contour. DECL is the
3845 associated declaration, if any, otherwise NULL_TREE. If the
3846 current contour is left via an exception, then __sjthrow will pop
3847 the top element off the dynamic cleanup chain. The code that
3848 avoids doing the action we push into the cleanup chain in the
3849 exceptional case is contained in expand_cleanups.
3851 This routine is only used by expand_eh_region_start, and that is
3852 the only way in which an exception region should be started. This
3853 routine is only used when using the setjmp/longjmp codegen method
3854 for exception handling. */
3857 expand_dcc_cleanup (decl)
3860 struct nesting *thisblock = block_stack;
3863 /* Error if we are not in any block. */
3867 /* Record the cleanup for the dynamic handler chain. */
3869 /* All cleanups must be on the function_obstack. */
3870 push_obstacks_nochange ();
3871 resume_temporary_allocation ();
3872 cleanup = make_node (POPDCC_EXPR);
3875 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3876 thisblock->data.block.cleanups
3877 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3879 /* If this block has a cleanup, it belongs in stack_block_stack. */
3880 stack_block_stack = thisblock;
3884 /* Arrange for the top element of the dynamic handler chain to be
3885 popped if we exit the current binding contour. DECL is the
3886 associated declaration, if any, otherwise NULL_TREE. If the current
3887 contour is left via an exception, then __sjthrow will pop the top
3888 element off the dynamic handler chain. The code that avoids doing
3889 the action we push into the handler chain in the exceptional case
3890 is contained in expand_cleanups.
3892 This routine is only used by expand_eh_region_start, and that is
3893 the only way in which an exception region should be started. This
3894 routine is only used when using the setjmp/longjmp codegen method
3895 for exception handling. */
3898 expand_dhc_cleanup (decl)
3901 struct nesting *thisblock = block_stack;
3904 /* Error if we are not in any block. */
3908 /* Record the cleanup for the dynamic handler chain. */
3910 /* All cleanups must be on the function_obstack. */
3911 push_obstacks_nochange ();
3912 resume_temporary_allocation ();
3913 cleanup = make_node (POPDHC_EXPR);
3916 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3917 thisblock->data.block.cleanups
3918 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3920 /* If this block has a cleanup, it belongs in stack_block_stack. */
3921 stack_block_stack = thisblock;
3925 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3926 DECL_ELTS is the list of elements that belong to DECL's type.
3927 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3930 expand_anon_union_decl (decl, cleanup, decl_elts)
3931 tree decl, cleanup, decl_elts;
3933 struct nesting *thisblock = block_stack;
3937 expand_decl_cleanup (decl, cleanup);
3938 x = DECL_RTL (decl);
3942 tree decl_elt = TREE_VALUE (decl_elts);
3943 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3944 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3946 /* Propagate the union's alignment to the elements. */
3947 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3949 /* If the element has BLKmode and the union doesn't, the union is
3950 aligned such that the element doesn't need to have BLKmode, so
3951 change the element's mode to the appropriate one for its size. */
3952 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3953 DECL_MODE (decl_elt) = mode
3954 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3957 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3958 instead create a new MEM rtx with the proper mode. */
3959 if (GET_CODE (x) == MEM)
3961 if (mode == GET_MODE (x))
3962 DECL_RTL (decl_elt) = x;
3965 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3966 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
3967 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3970 else if (GET_CODE (x) == REG)
3972 if (mode == GET_MODE (x))
3973 DECL_RTL (decl_elt) = x;
3975 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3980 /* Record the cleanup if there is one. */
3983 thisblock->data.block.cleanups
3984 = temp_tree_cons (decl_elt, cleanup_elt,
3985 thisblock->data.block.cleanups);
3987 decl_elts = TREE_CHAIN (decl_elts);
3991 /* Expand a list of cleanups LIST.
3992 Elements may be expressions or may be nested lists.
3994 If DONT_DO is nonnull, then any list-element
3995 whose TREE_PURPOSE matches DONT_DO is omitted.
3996 This is sometimes used to avoid a cleanup associated with
3997 a value that is being returned out of the scope.
3999 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4000 goto and handle protection regions specially in that case.
4002 If REACHABLE, we emit code, otherwise just inform the exception handling
4003 code about this finalization. */
4006 expand_cleanups (list, dont_do, in_fixup, reachable)
4013 for (tail = list; tail; tail = TREE_CHAIN (tail))
4014 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4016 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4017 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4022 tree cleanup = TREE_VALUE (tail);
4024 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4025 if (TREE_CODE (cleanup) != POPDHC_EXPR
4026 && TREE_CODE (cleanup) != POPDCC_EXPR
4027 /* See expand_eh_region_start_tree for this case. */
4028 && ! TREE_ADDRESSABLE (tail))
4030 cleanup = protect_with_terminate (cleanup);
4031 expand_eh_region_end (cleanup);
4035 do_pending_stack_adjust();
4041 /* Cleanups may be run multiple times. For example,
4042 when exiting a binding contour, we expand the
4043 cleanups associated with that contour. When a goto
4044 within that binding contour has a target outside that
4045 contour, it will expand all cleanups from its scope to
4046 the target. Though the cleanups are expanded multiple
4047 times, the control paths are non-overlapping so the
4048 cleanups will not be executed twice. */
4050 /* We may need to protect fixups with rethrow regions. */
4051 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4054 expand_fixup_region_start ();
4056 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4058 expand_fixup_region_end (TREE_VALUE (tail));
4065 /* Mark when the context we are emitting RTL for as a conditional
4066 context, so that any cleanup actions we register with
4067 expand_decl_init will be properly conditionalized when those
4068 cleanup actions are later performed. Must be called before any
4069 expression (tree) is expanded that is within a conditional context. */
4072 start_cleanup_deferral ()
4074 /* block_stack can be NULL if we are inside the parameter list. It is
4075 OK to do nothing, because cleanups aren't possible here. */
4077 ++block_stack->data.block.conditional_code;
4080 /* Mark the end of a conditional region of code. Because cleanup
4081 deferrals may be nested, we may still be in a conditional region
4082 after we end the currently deferred cleanups, only after we end all
4083 deferred cleanups, are we back in unconditional code. */
4086 end_cleanup_deferral ()
4088 /* block_stack can be NULL if we are inside the parameter list. It is
4089 OK to do nothing, because cleanups aren't possible here. */
4091 --block_stack->data.block.conditional_code;
4094 /* Move all cleanups from the current block_stack
4095 to the containing block_stack, where they are assumed to
4096 have been created. If anything can cause a temporary to
4097 be created, but not expanded for more than one level of
4098 block_stacks, then this code will have to change. */
4103 struct nesting *block = block_stack;
4104 struct nesting *outer = block->next;
4106 outer->data.block.cleanups
4107 = chainon (block->data.block.cleanups,
4108 outer->data.block.cleanups);
4109 block->data.block.cleanups = 0;
4113 last_cleanup_this_contour ()
4115 if (block_stack == 0)
4118 return block_stack->data.block.cleanups;
4121 /* Return 1 if there are any pending cleanups at this point.
4122 If THIS_CONTOUR is nonzero, check the current contour as well.
4123 Otherwise, look only at the contours that enclose this one. */
4126 any_pending_cleanups (this_contour)
4129 struct nesting *block;
4131 if (block_stack == 0)
4134 if (this_contour && block_stack->data.block.cleanups != NULL)
4136 if (block_stack->data.block.cleanups == 0
4137 && block_stack->data.block.outer_cleanups == 0)
4140 for (block = block_stack->next; block; block = block->next)
4141 if (block->data.block.cleanups != 0)
4147 /* Enter a case (Pascal) or switch (C) statement.
4148 Push a block onto case_stack and nesting_stack
4149 to accumulate the case-labels that are seen
4150 and to record the labels generated for the statement.
4152 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4153 Otherwise, this construct is transparent for `exit_something'.
4155 EXPR is the index-expression to be dispatched on.
4156 TYPE is its nominal type. We could simply convert EXPR to this type,
4157 but instead we take short cuts. */
4160 expand_start_case (exit_flag, expr, type, printname)
4164 const char *printname;
4166 register struct nesting *thiscase = ALLOC_NESTING ();
4168 /* Make an entry on case_stack for the case we are entering. */
4170 thiscase->next = case_stack;
4171 thiscase->all = nesting_stack;
4172 thiscase->depth = ++nesting_depth;
4173 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4174 thiscase->data.case_stmt.case_list = 0;
4175 thiscase->data.case_stmt.index_expr = expr;
4176 thiscase->data.case_stmt.nominal_type = type;
4177 thiscase->data.case_stmt.default_label = 0;
4178 thiscase->data.case_stmt.num_ranges = 0;
4179 thiscase->data.case_stmt.printname = printname;
4180 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4181 case_stack = thiscase;
4182 nesting_stack = thiscase;
4184 do_pending_stack_adjust ();
4186 /* Make sure case_stmt.start points to something that won't
4187 need any transformation before expand_end_case. */
4188 if (GET_CODE (get_last_insn ()) != NOTE)
4189 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4191 thiscase->data.case_stmt.start = get_last_insn ();
4193 start_cleanup_deferral ();
4197 /* Start a "dummy case statement" within which case labels are invalid
4198 and are not connected to any larger real case statement.
4199 This can be used if you don't want to let a case statement jump
4200 into the middle of certain kinds of constructs. */
4203 expand_start_case_dummy ()
4205 register struct nesting *thiscase = ALLOC_NESTING ();
4207 /* Make an entry on case_stack for the dummy. */
4209 thiscase->next = case_stack;
4210 thiscase->all = nesting_stack;
4211 thiscase->depth = ++nesting_depth;
4212 thiscase->exit_label = 0;
4213 thiscase->data.case_stmt.case_list = 0;
4214 thiscase->data.case_stmt.start = 0;
4215 thiscase->data.case_stmt.nominal_type = 0;
4216 thiscase->data.case_stmt.default_label = 0;
4217 thiscase->data.case_stmt.num_ranges = 0;
4218 case_stack = thiscase;
4219 nesting_stack = thiscase;
4220 start_cleanup_deferral ();
4223 /* End a dummy case statement. */
4226 expand_end_case_dummy ()
4228 end_cleanup_deferral ();
4229 POPSTACK (case_stack);
4232 /* Return the data type of the index-expression
4233 of the innermost case statement, or null if none. */
4236 case_index_expr_type ()
4239 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4246 /* If this is the first label, warn if any insns have been emitted. */
4247 if (case_stack->data.case_stmt.line_number_status >= 0)
4251 restore_line_number_status
4252 (case_stack->data.case_stmt.line_number_status);
4253 case_stack->data.case_stmt.line_number_status = -1;
4255 for (insn = case_stack->data.case_stmt.start;
4257 insn = NEXT_INSN (insn))
4259 if (GET_CODE (insn) == CODE_LABEL)
4261 if (GET_CODE (insn) != NOTE
4262 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4265 insn = PREV_INSN (insn);
4266 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4268 /* If insn is zero, then there must have been a syntax error. */
4270 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4271 NOTE_LINE_NUMBER(insn),
4272 "unreachable code at beginning of %s",
4273 case_stack->data.case_stmt.printname);
4280 /* Accumulate one case or default label inside a case or switch statement.
4281 VALUE is the value of the case (a null pointer, for a default label).
4282 The function CONVERTER, when applied to arguments T and V,
4283 converts the value V to the type T.
4285 If not currently inside a case or switch statement, return 1 and do
4286 nothing. The caller will print a language-specific error message.
4287 If VALUE is a duplicate or overlaps, return 2 and do nothing
4288 except store the (first) duplicate node in *DUPLICATE.
4289 If VALUE is out of range, return 3 and do nothing.
4290 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4291 Return 0 on success.
4293 Extended to handle range statements. */
4296 pushcase (value, converter, label, duplicate)
4297 register tree value;
4298 tree (*converter) PROTO((tree, tree));
4299 register tree label;
4305 /* Fail if not inside a real case statement. */
4306 if (! (case_stack && case_stack->data.case_stmt.start))
4309 if (stack_block_stack
4310 && stack_block_stack->depth > case_stack->depth)
4313 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4314 nominal_type = case_stack->data.case_stmt.nominal_type;
4316 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4317 if (index_type == error_mark_node)
4320 /* Convert VALUE to the type in which the comparisons are nominally done. */
4322 value = (*converter) (nominal_type, value);
4326 /* Fail if this value is out of range for the actual type of the index
4327 (which may be narrower than NOMINAL_TYPE). */
4328 if (value != 0 && ! int_fits_type_p (value, index_type))
4331 /* Fail if this is a duplicate or overlaps another entry. */
4334 if (case_stack->data.case_stmt.default_label != 0)
4336 *duplicate = case_stack->data.case_stmt.default_label;
4339 case_stack->data.case_stmt.default_label = label;
4342 return add_case_node (value, value, label, duplicate);
4344 expand_label (label);
4348 /* Like pushcase but this case applies to all values between VALUE1 and
4349 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4350 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4351 starts at VALUE1 and ends at the highest value of the index type.
4352 If both are NULL, this case applies to all values.
4354 The return value is the same as that of pushcase but there is one
4355 additional error code: 4 means the specified range was empty. */
4358 pushcase_range (value1, value2, converter, label, duplicate)
4359 register tree value1, value2;
4360 tree (*converter) PROTO((tree, tree));
4361 register tree label;
4367 /* Fail if not inside a real case statement. */
4368 if (! (case_stack && case_stack->data.case_stmt.start))
4371 if (stack_block_stack
4372 && stack_block_stack->depth > case_stack->depth)
4375 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4376 nominal_type = case_stack->data.case_stmt.nominal_type;
4378 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4379 if (index_type == error_mark_node)
4384 /* Convert VALUEs to type in which the comparisons are nominally done
4385 and replace any unspecified value with the corresponding bound. */
4387 value1 = TYPE_MIN_VALUE (index_type);
4389 value2 = TYPE_MAX_VALUE (index_type);
4391 /* Fail if the range is empty. Do this before any conversion since
4392 we want to allow out-of-range empty ranges. */
4393 if (value2 && tree_int_cst_lt (value2, value1))
4396 value1 = (*converter) (nominal_type, value1);
4398 /* If the max was unbounded, use the max of the nominal_type we are
4399 converting to. Do this after the < check above to suppress false
4402 value2 = TYPE_MAX_VALUE (nominal_type);
4403 value2 = (*converter) (nominal_type, value2);
4405 /* Fail if these values are out of range. */
4406 if (TREE_CONSTANT_OVERFLOW (value1)
4407 || ! int_fits_type_p (value1, index_type))
4410 if (TREE_CONSTANT_OVERFLOW (value2)
4411 || ! int_fits_type_p (value2, index_type))
4414 return add_case_node (value1, value2, label, duplicate);
4417 /* Do the actual insertion of a case label for pushcase and pushcase_range
4418 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4419 slowdown for large switch statements. */
4422 add_case_node (low, high, label, duplicate)
4427 struct case_node *p, **q, *r;
4429 q = &case_stack->data.case_stmt.case_list;
4436 /* Keep going past elements distinctly greater than HIGH. */
4437 if (tree_int_cst_lt (high, p->low))
4440 /* or distinctly less than LOW. */
4441 else if (tree_int_cst_lt (p->high, low))
4446 /* We have an overlap; this is an error. */
4447 *duplicate = p->code_label;
4452 /* Add this label to the chain, and succeed.
4453 Copy LOW, HIGH so they are on temporary rather than momentary
4454 obstack and will thus survive till the end of the case statement. */
4456 r = (struct case_node *) oballoc (sizeof (struct case_node));
4457 r->low = copy_node (low);
4459 /* If the bounds are equal, turn this into the one-value case. */
4461 if (tree_int_cst_equal (low, high))
4465 r->high = copy_node (high);
4466 case_stack->data.case_stmt.num_ranges++;
4469 r->code_label = label;
4470 expand_label (label);
4480 struct case_node *s;
4486 if (! (b = p->balance))
4487 /* Growth propagation from left side. */
4494 if ((p->left = s = r->right))
4503 if ((r->parent = s))
4511 case_stack->data.case_stmt.case_list = r;
4514 /* r->balance == +1 */
4519 struct case_node *t = r->right;
4521 if ((p->left = s = t->right))
4525 if ((r->right = s = t->left))
4539 if ((t->parent = s))
4547 case_stack->data.case_stmt.case_list = t;
4554 /* p->balance == +1; growth of left side balances the node. */
4564 if (! (b = p->balance))
4565 /* Growth propagation from right side. */
4573 if ((p->right = s = r->left))
4581 if ((r->parent = s))
4590 case_stack->data.case_stmt.case_list = r;
4594 /* r->balance == -1 */
4598 struct case_node *t = r->left;
4600 if ((p->right = s = t->left))
4605 if ((r->left = s = t->right))
4619 if ((t->parent = s))
4628 case_stack->data.case_stmt.case_list = t;
4634 /* p->balance == -1; growth of right side balances the node. */
4648 /* Returns the number of possible values of TYPE.
4649 Returns -1 if the number is unknown or variable.
4650 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4651 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4652 do not increase monotonically (there may be duplicates);
4653 to 1 if the values increase monotonically, but not always by 1;
4654 otherwise sets it to 0. */
4657 all_cases_count (type, spareness)
4661 HOST_WIDE_INT count;
4664 switch (TREE_CODE (type))
4671 count = 1 << BITS_PER_UNIT;
4675 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4676 || TYPE_MAX_VALUE (type) == NULL
4677 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4682 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4683 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4684 but with overflow checking. */
4685 tree mint = TYPE_MIN_VALUE (type);
4686 tree maxt = TYPE_MAX_VALUE (type);
4687 HOST_WIDE_INT lo, hi;
4688 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4690 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4692 add_double (lo, hi, 1, 0, &lo, &hi);
4693 if (hi != 0 || lo < 0)
4700 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4702 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4703 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4704 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4705 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4709 if (*spareness == 1)
4711 tree prev = TREE_VALUE (TYPE_VALUES (type));
4712 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4714 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4719 prev = TREE_VALUE (t);
4728 #define BITARRAY_TEST(ARRAY, INDEX) \
4729 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4730 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4731 #define BITARRAY_SET(ARRAY, INDEX) \
4732 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4733 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4735 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4736 with the case values we have seen, assuming the case expression
4738 SPARSENESS is as determined by all_cases_count.
4740 The time needed is proportional to COUNT, unless
4741 SPARSENESS is 2, in which case quadratic time is needed. */
4744 mark_seen_cases (type, cases_seen, count, sparseness)
4746 unsigned char *cases_seen;
4750 tree next_node_to_try = NULL_TREE;
4751 long next_node_offset = 0;
4753 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4754 tree val = make_node (INTEGER_CST);
4755 TREE_TYPE (val) = type;
4758 else if (sparseness == 2)
4763 /* This less efficient loop is only needed to handle
4764 duplicate case values (multiple enum constants
4765 with the same value). */
4766 TREE_TYPE (val) = TREE_TYPE (root->low);
4767 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4768 t = TREE_CHAIN (t), xlo++)
4770 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4771 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4775 /* Keep going past elements distinctly greater than VAL. */
4776 if (tree_int_cst_lt (val, n->low))
4779 /* or distinctly less than VAL. */
4780 else if (tree_int_cst_lt (n->high, val))
4785 /* We have found a matching range. */
4786 BITARRAY_SET (cases_seen, xlo);
4796 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4797 for (n = root; n; n = n->right)
4799 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4800 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4801 while ( ! tree_int_cst_lt (n->high, val))
4803 /* Calculate (into xlo) the "offset" of the integer (val).
4804 The element with lowest value has offset 0, the next smallest
4805 element has offset 1, etc. */
4807 HOST_WIDE_INT xlo, xhi;
4809 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4811 /* The TYPE_VALUES will be in increasing order, so
4812 starting searching where we last ended. */
4813 t = next_node_to_try;
4814 xlo = next_node_offset;
4820 t = TYPE_VALUES (type);
4823 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4825 next_node_to_try = TREE_CHAIN (t);
4826 next_node_offset = xlo + 1;
4831 if (t == next_node_to_try)
4840 t = TYPE_MIN_VALUE (type);
4842 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4846 add_double (xlo, xhi,
4847 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4851 if (xhi == 0 && xlo >= 0 && xlo < count)
4852 BITARRAY_SET (cases_seen, xlo);
4853 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4855 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4861 /* Called when the index of a switch statement is an enumerated type
4862 and there is no default label.
4864 Checks that all enumeration literals are covered by the case
4865 expressions of a switch. Also, warn if there are any extra
4866 switch cases that are *not* elements of the enumerated type.
4868 If all enumeration literals were covered by the case expressions,
4869 turn one of the expressions into the default expression since it should
4870 not be possible to fall through such a switch. */
4873 check_for_full_enumeration_handling (type)
4876 register struct case_node *n;
4877 register tree chain;
4878 #if 0 /* variable used by 'if 0'ed code below. */
4879 register struct case_node **l;
4883 /* True iff the selector type is a numbered set mode. */
4886 /* The number of possible selector values. */
4889 /* For each possible selector value. a one iff it has been matched
4890 by a case value alternative. */
4891 unsigned char *cases_seen;
4893 /* The allocated size of cases_seen, in chars. */
4899 size = all_cases_count (type, &sparseness);
4900 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4902 if (size > 0 && size < 600000
4903 /* We deliberately use malloc here - not xmalloc. */
4904 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4907 tree v = TYPE_VALUES (type);
4908 bzero (cases_seen, bytes_needed);
4910 /* The time complexity of this code is normally O(N), where
4911 N being the number of members in the enumerated type.
4912 However, if type is a ENUMERAL_TYPE whose values do not
4913 increase monotonically, O(N*log(N)) time may be needed. */
4915 mark_seen_cases (type, cases_seen, size, sparseness);
4917 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4919 if (BITARRAY_TEST(cases_seen, i) == 0)
4920 warning ("enumeration value `%s' not handled in switch",
4921 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4927 /* Now we go the other way around; we warn if there are case
4928 expressions that don't correspond to enumerators. This can
4929 occur since C and C++ don't enforce type-checking of
4930 assignments to enumeration variables. */
4932 if (case_stack->data.case_stmt.case_list
4933 && case_stack->data.case_stmt.case_list->left)
4934 case_stack->data.case_stmt.case_list
4935 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4937 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4939 for (chain = TYPE_VALUES (type);
4940 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4941 chain = TREE_CHAIN (chain))
4946 if (TYPE_NAME (type) == 0)
4947 warning ("case value `%ld' not in enumerated type",
4948 (long) TREE_INT_CST_LOW (n->low));
4950 warning ("case value `%ld' not in enumerated type `%s'",
4951 (long) TREE_INT_CST_LOW (n->low),
4952 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4955 : DECL_NAME (TYPE_NAME (type))));
4957 if (!tree_int_cst_equal (n->low, n->high))
4959 for (chain = TYPE_VALUES (type);
4960 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4961 chain = TREE_CHAIN (chain))
4966 if (TYPE_NAME (type) == 0)
4967 warning ("case value `%ld' not in enumerated type",
4968 (long) TREE_INT_CST_LOW (n->high));
4970 warning ("case value `%ld' not in enumerated type `%s'",
4971 (long) TREE_INT_CST_LOW (n->high),
4972 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4975 : DECL_NAME (TYPE_NAME (type))));
4981 /* ??? This optimization is disabled because it causes valid programs to
4982 fail. ANSI C does not guarantee that an expression with enum type
4983 will have a value that is the same as one of the enumeration literals. */
4985 /* If all values were found as case labels, make one of them the default
4986 label. Thus, this switch will never fall through. We arbitrarily pick
4987 the last one to make the default since this is likely the most
4988 efficient choice. */
4992 for (l = &case_stack->data.case_stmt.case_list;
4997 case_stack->data.case_stmt.default_label = (*l)->code_label;
5004 /* Terminate a case (Pascal) or switch (C) statement
5005 in which ORIG_INDEX is the expression to be tested.
5006 Generate the code to test it and jump to the right place. */
5009 expand_end_case (orig_index)
5012 tree minval = NULL_TREE, maxval = NULL_TREE, range, orig_minval;
5013 rtx default_label = 0;
5014 register struct case_node *n;
5022 register struct nesting *thiscase = case_stack;
5023 tree index_expr, index_type;
5026 table_label = gen_label_rtx ();
5027 index_expr = thiscase->data.case_stmt.index_expr;
5028 index_type = TREE_TYPE (index_expr);
5029 unsignedp = TREE_UNSIGNED (index_type);
5031 do_pending_stack_adjust ();
5033 /* This might get an spurious warning in the presence of a syntax error;
5034 it could be fixed by moving the call to check_seenlabel after the
5035 check for error_mark_node, and copying the code of check_seenlabel that
5036 deals with case_stack->data.case_stmt.line_number_status /
5037 restore_line_number_status in front of the call to end_cleanup_deferral;
5038 However, this might miss some useful warnings in the presence of
5039 non-syntax errors. */
5042 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5043 if (index_type != error_mark_node)
5045 /* If switch expression was an enumerated type, check that all
5046 enumeration literals are covered by the cases.
5047 No sense trying this if there's a default case, however. */
5049 if (!thiscase->data.case_stmt.default_label
5050 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5051 && TREE_CODE (index_expr) != INTEGER_CST)
5052 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5054 /* If we don't have a default-label, create one here,
5055 after the body of the switch. */
5056 if (thiscase->data.case_stmt.default_label == 0)
5058 thiscase->data.case_stmt.default_label
5059 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5060 expand_label (thiscase->data.case_stmt.default_label);
5062 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5064 before_case = get_last_insn ();
5066 if (thiscase->data.case_stmt.case_list
5067 && thiscase->data.case_stmt.case_list->left)
5068 thiscase->data.case_stmt.case_list
5069 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5071 /* Simplify the case-list before we count it. */
5072 group_case_nodes (thiscase->data.case_stmt.case_list);
5074 /* Get upper and lower bounds of case values.
5075 Also convert all the case values to the index expr's data type. */
5078 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5080 /* Check low and high label values are integers. */
5081 if (TREE_CODE (n->low) != INTEGER_CST)
5083 if (TREE_CODE (n->high) != INTEGER_CST)
5086 n->low = convert (index_type, n->low);
5087 n->high = convert (index_type, n->high);
5089 /* Count the elements and track the largest and smallest
5090 of them (treating them as signed even if they are not). */
5098 if (INT_CST_LT (n->low, minval))
5100 if (INT_CST_LT (maxval, n->high))
5103 /* A range counts double, since it requires two compares. */
5104 if (! tree_int_cst_equal (n->low, n->high))
5108 orig_minval = minval;
5110 /* Compute span of values. */
5112 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5114 end_cleanup_deferral ();
5118 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5120 emit_jump (default_label);
5123 /* If range of values is much bigger than number of values,
5124 make a sequence of conditional branches instead of a dispatch.
5125 If the switch-index is a constant, do it this way
5126 because we can optimize it. */
5128 #ifndef CASE_VALUES_THRESHOLD
5130 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5132 /* If machine does not have a case insn that compares the
5133 bounds, this means extra overhead for dispatch tables
5134 which raises the threshold for using them. */
5135 #define CASE_VALUES_THRESHOLD 5
5136 #endif /* HAVE_casesi */
5137 #endif /* CASE_VALUES_THRESHOLD */
5139 else if (TREE_INT_CST_HIGH (range) != 0
5140 || count < (unsigned int) CASE_VALUES_THRESHOLD
5141 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5143 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5146 || TREE_CODE (index_expr) == INTEGER_CST
5147 /* These will reduce to a constant. */
5148 || (TREE_CODE (index_expr) == CALL_EXPR
5149 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5150 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5151 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5152 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5153 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5155 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5157 /* If the index is a short or char that we do not have
5158 an insn to handle comparisons directly, convert it to
5159 a full integer now, rather than letting each comparison
5160 generate the conversion. */
5162 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5163 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5164 == CODE_FOR_nothing))
5166 enum machine_mode wider_mode;
5167 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5168 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5169 if (cmp_optab->handlers[(int) wider_mode].insn_code
5170 != CODE_FOR_nothing)
5172 index = convert_to_mode (wider_mode, index, unsignedp);
5178 do_pending_stack_adjust ();
5180 index = protect_from_queue (index, 0);
5181 if (GET_CODE (index) == MEM)
5182 index = copy_to_reg (index);
5183 if (GET_CODE (index) == CONST_INT
5184 || TREE_CODE (index_expr) == INTEGER_CST)
5186 /* Make a tree node with the proper constant value
5187 if we don't already have one. */
5188 if (TREE_CODE (index_expr) != INTEGER_CST)
5191 = build_int_2 (INTVAL (index),
5192 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5193 index_expr = convert (index_type, index_expr);
5196 /* For constant index expressions we need only
5197 issue a unconditional branch to the appropriate
5198 target code. The job of removing any unreachable
5199 code is left to the optimisation phase if the
5200 "-O" option is specified. */
5201 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5202 if (! tree_int_cst_lt (index_expr, n->low)
5203 && ! tree_int_cst_lt (n->high, index_expr))
5207 emit_jump (label_rtx (n->code_label));
5209 emit_jump (default_label);
5213 /* If the index expression is not constant we generate
5214 a binary decision tree to select the appropriate
5215 target code. This is done as follows:
5217 The list of cases is rearranged into a binary tree,
5218 nearly optimal assuming equal probability for each case.
5220 The tree is transformed into RTL, eliminating
5221 redundant test conditions at the same time.
5223 If program flow could reach the end of the
5224 decision tree an unconditional jump to the
5225 default code is emitted. */
5228 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5229 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5230 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5232 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5233 default_label, index_type);
5234 emit_jump_if_reachable (default_label);
5243 enum machine_mode index_mode = SImode;
5244 int index_bits = GET_MODE_BITSIZE (index_mode);
5246 enum machine_mode op_mode;
5248 /* Convert the index to SImode. */
5249 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5250 > GET_MODE_BITSIZE (index_mode))
5252 enum machine_mode omode = TYPE_MODE (index_type);
5253 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5255 /* We must handle the endpoints in the original mode. */
5256 index_expr = build (MINUS_EXPR, index_type,
5257 index_expr, minval);
5258 minval = integer_zero_node;
5259 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5260 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5261 omode, 1, 0, default_label);
5262 /* Now we can safely truncate. */
5263 index = convert_to_mode (index_mode, index, 0);
5267 if (TYPE_MODE (index_type) != index_mode)
5269 index_expr = convert (type_for_size (index_bits, 0),
5271 index_type = TREE_TYPE (index_expr);
5274 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5277 index = protect_from_queue (index, 0);
5278 do_pending_stack_adjust ();
5280 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5281 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5283 index = copy_to_mode_reg (op_mode, index);
5285 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5287 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5288 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5290 op1 = copy_to_mode_reg (op_mode, op1);
5292 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5294 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5295 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5297 op2 = copy_to_mode_reg (op_mode, op2);
5299 emit_jump_insn (gen_casesi (index, op1, op2,
5300 table_label, default_label));
5304 #ifdef HAVE_tablejump
5305 if (! win && HAVE_tablejump)
5307 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5308 fold (build (MINUS_EXPR, index_type,
5309 index_expr, minval)));
5310 index_type = TREE_TYPE (index_expr);
5311 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5313 index = protect_from_queue (index, 0);
5314 do_pending_stack_adjust ();
5316 do_tablejump (index, TYPE_MODE (index_type),
5317 expand_expr (range, NULL_RTX, VOIDmode, 0),
5318 table_label, default_label);
5325 /* Get table of labels to jump to, in order of case index. */
5327 ncases = TREE_INT_CST_LOW (range) + 1;
5328 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5329 bzero ((char *) labelvec, ncases * sizeof (rtx));
5331 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5333 register HOST_WIDE_INT i
5334 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5339 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5340 if (i + TREE_INT_CST_LOW (orig_minval)
5341 == TREE_INT_CST_LOW (n->high))
5347 /* Fill in the gaps with the default. */
5348 for (i = 0; i < ncases; i++)
5349 if (labelvec[i] == 0)
5350 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5352 /* Output the table */
5353 emit_label (table_label);
5355 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5356 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5357 gen_rtx_LABEL_REF (Pmode, table_label),
5358 gen_rtvec_v (ncases, labelvec),
5359 const0_rtx, const0_rtx, 0));
5361 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5362 gen_rtvec_v (ncases, labelvec)));
5364 /* If the case insn drops through the table,
5365 after the table we must jump to the default-label.
5366 Otherwise record no drop-through after the table. */
5367 #ifdef CASE_DROPS_THROUGH
5368 emit_jump (default_label);
5374 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5375 reorder_insns (before_case, get_last_insn (),
5376 thiscase->data.case_stmt.start);
5379 end_cleanup_deferral ();
5381 if (thiscase->exit_label)
5382 emit_label (thiscase->exit_label);
5384 POPSTACK (case_stack);
5389 /* Convert the tree NODE into a list linked by the right field, with the left
5390 field zeroed. RIGHT is used for recursion; it is a list to be placed
5391 rightmost in the resulting list. */
5393 static struct case_node *
5394 case_tree2list (node, right)
5395 struct case_node *node, *right;
5397 struct case_node *left;
5400 right = case_tree2list (node->right, right);
5402 node->right = right;
5403 if ((left = node->left))
5406 return case_tree2list (left, node);
5412 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5415 do_jump_if_equal (op1, op2, label, unsignedp)
5416 rtx op1, op2, label;
5419 if (GET_CODE (op1) == CONST_INT
5420 && GET_CODE (op2) == CONST_INT)
5422 if (INTVAL (op1) == INTVAL (op2))
5427 enum machine_mode mode = GET_MODE (op1);
5428 if (mode == VOIDmode)
5429 mode = GET_MODE (op2);
5430 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5435 /* Not all case values are encountered equally. This function
5436 uses a heuristic to weight case labels, in cases where that
5437 looks like a reasonable thing to do.
5439 Right now, all we try to guess is text, and we establish the
5442 chars above space: 16
5451 If we find any cases in the switch that are not either -1 or in the range
5452 of valid ASCII characters, or are control characters other than those
5453 commonly used with "\", don't treat this switch scanning text.
5455 Return 1 if these nodes are suitable for cost estimation, otherwise
5459 estimate_case_costs (node)
5462 tree min_ascii = build_int_2 (-1, -1);
5463 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5467 /* If we haven't already made the cost table, make it now. Note that the
5468 lower bound of the table is -1, not zero. */
5470 if (cost_table == NULL)
5472 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5473 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5475 for (i = 0; i < 128; i++)
5479 else if (ISPUNCT (i))
5481 else if (ISCNTRL (i))
5485 cost_table[' '] = 8;
5486 cost_table['\t'] = 4;
5487 cost_table['\0'] = 4;
5488 cost_table['\n'] = 2;
5489 cost_table['\f'] = 1;
5490 cost_table['\v'] = 1;
5491 cost_table['\b'] = 1;
5494 /* See if all the case expressions look like text. It is text if the
5495 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5496 as signed arithmetic since we don't want to ever access cost_table with a
5497 value less than -1. Also check that none of the constants in a range
5498 are strange control characters. */
5500 for (n = node; n; n = n->right)
5502 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5505 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5506 if (cost_table[i] < 0)
5510 /* All interesting values are within the range of interesting
5511 ASCII characters. */
5515 /* Scan an ordered list of case nodes
5516 combining those with consecutive values or ranges.
5518 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5521 group_case_nodes (head)
5524 case_node_ptr node = head;
5528 rtx lb = next_real_insn (label_rtx (node->code_label));
5530 case_node_ptr np = node;
5532 /* Try to group the successors of NODE with NODE. */
5533 while (((np = np->right) != 0)
5534 /* Do they jump to the same place? */
5535 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5536 || (lb != 0 && lb2 != 0
5537 && simplejump_p (lb)
5538 && simplejump_p (lb2)
5539 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5540 SET_SRC (PATTERN (lb2)))))
5541 /* Are their ranges consecutive? */
5542 && tree_int_cst_equal (np->low,
5543 fold (build (PLUS_EXPR,
5544 TREE_TYPE (node->high),
5547 /* An overflow is not consecutive. */
5548 && tree_int_cst_lt (node->high,
5549 fold (build (PLUS_EXPR,
5550 TREE_TYPE (node->high),
5552 integer_one_node))))
5554 node->high = np->high;
5556 /* NP is the first node after NODE which can't be grouped with it.
5557 Delete the nodes in between, and move on to that node. */
5563 /* Take an ordered list of case nodes
5564 and transform them into a near optimal binary tree,
5565 on the assumption that any target code selection value is as
5566 likely as any other.
5568 The transformation is performed by splitting the ordered
5569 list into two equal sections plus a pivot. The parts are
5570 then attached to the pivot as left and right branches. Each
5571 branch is then transformed recursively. */
5574 balance_case_nodes (head, parent)
5575 case_node_ptr *head;
5576 case_node_ptr parent;
5578 register case_node_ptr np;
5586 register case_node_ptr *npp;
5589 /* Count the number of entries on branch. Also count the ranges. */
5593 if (!tree_int_cst_equal (np->low, np->high))
5597 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5601 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5609 /* Split this list if it is long enough for that to help. */
5614 /* Find the place in the list that bisects the list's total cost,
5615 Here I gets half the total cost. */
5620 /* Skip nodes while their cost does not reach that amount. */
5621 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5622 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5623 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5626 npp = &(*npp)->right;
5631 /* Leave this branch lopsided, but optimize left-hand
5632 side and fill in `parent' fields for right-hand side. */
5634 np->parent = parent;
5635 balance_case_nodes (&np->left, np);
5636 for (; np->right; np = np->right)
5637 np->right->parent = np;
5641 /* If there are just three nodes, split at the middle one. */
5643 npp = &(*npp)->right;
5646 /* Find the place in the list that bisects the list's total cost,
5647 where ranges count as 2.
5648 Here I gets half the total cost. */
5649 i = (i + ranges + 1) / 2;
5652 /* Skip nodes while their cost does not reach that amount. */
5653 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5658 npp = &(*npp)->right;
5663 np->parent = parent;
5666 /* Optimize each of the two split parts. */
5667 balance_case_nodes (&np->left, np);
5668 balance_case_nodes (&np->right, np);
5672 /* Else leave this branch as one level,
5673 but fill in `parent' fields. */
5675 np->parent = parent;
5676 for (; np->right; np = np->right)
5677 np->right->parent = np;
5682 /* Search the parent sections of the case node tree
5683 to see if a test for the lower bound of NODE would be redundant.
5684 INDEX_TYPE is the type of the index expression.
5686 The instructions to generate the case decision tree are
5687 output in the same order as nodes are processed so it is
5688 known that if a parent node checks the range of the current
5689 node minus one that the current node is bounded at its lower
5690 span. Thus the test would be redundant. */
5693 node_has_low_bound (node, index_type)
5698 case_node_ptr pnode;
5700 /* If the lower bound of this node is the lowest value in the index type,
5701 we need not test it. */
5703 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5706 /* If this node has a left branch, the value at the left must be less
5707 than that at this node, so it cannot be bounded at the bottom and
5708 we need not bother testing any further. */
5713 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5714 node->low, integer_one_node));
5716 /* If the subtraction above overflowed, we can't verify anything.
5717 Otherwise, look for a parent that tests our value - 1. */
5719 if (! tree_int_cst_lt (low_minus_one, node->low))
5722 for (pnode = node->parent; pnode; pnode = pnode->parent)
5723 if (tree_int_cst_equal (low_minus_one, pnode->high))
5729 /* Search the parent sections of the case node tree
5730 to see if a test for the upper bound of NODE would be redundant.
5731 INDEX_TYPE is the type of the index expression.
5733 The instructions to generate the case decision tree are
5734 output in the same order as nodes are processed so it is
5735 known that if a parent node checks the range of the current
5736 node plus one that the current node is bounded at its upper
5737 span. Thus the test would be redundant. */
5740 node_has_high_bound (node, index_type)
5745 case_node_ptr pnode;
5747 /* If there is no upper bound, obviously no test is needed. */
5749 if (TYPE_MAX_VALUE (index_type) == NULL)
5752 /* If the upper bound of this node is the highest value in the type
5753 of the index expression, we need not test against it. */
5755 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5758 /* If this node has a right branch, the value at the right must be greater
5759 than that at this node, so it cannot be bounded at the top and
5760 we need not bother testing any further. */
5765 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5766 node->high, integer_one_node));
5768 /* If the addition above overflowed, we can't verify anything.
5769 Otherwise, look for a parent that tests our value + 1. */
5771 if (! tree_int_cst_lt (node->high, high_plus_one))
5774 for (pnode = node->parent; pnode; pnode = pnode->parent)
5775 if (tree_int_cst_equal (high_plus_one, pnode->low))
5781 /* Search the parent sections of the
5782 case node tree to see if both tests for the upper and lower
5783 bounds of NODE would be redundant. */
5786 node_is_bounded (node, index_type)
5790 return (node_has_low_bound (node, index_type)
5791 && node_has_high_bound (node, index_type));
5794 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5797 emit_jump_if_reachable (label)
5800 if (GET_CODE (get_last_insn ()) != BARRIER)
5804 /* Emit step-by-step code to select a case for the value of INDEX.
5805 The thus generated decision tree follows the form of the
5806 case-node binary tree NODE, whose nodes represent test conditions.
5807 INDEX_TYPE is the type of the index of the switch.
5809 Care is taken to prune redundant tests from the decision tree
5810 by detecting any boundary conditions already checked by
5811 emitted rtx. (See node_has_high_bound, node_has_low_bound
5812 and node_is_bounded, above.)
5814 Where the test conditions can be shown to be redundant we emit
5815 an unconditional jump to the target code. As a further
5816 optimization, the subordinates of a tree node are examined to
5817 check for bounded nodes. In this case conditional and/or
5818 unconditional jumps as a result of the boundary check for the
5819 current node are arranged to target the subordinates associated
5820 code for out of bound conditions on the current node.
5822 We can assume that when control reaches the code generated here,
5823 the index value has already been compared with the parents
5824 of this node, and determined to be on the same side of each parent
5825 as this node is. Thus, if this node tests for the value 51,
5826 and a parent tested for 52, we don't need to consider
5827 the possibility of a value greater than 51. If another parent
5828 tests for the value 50, then this node need not test anything. */
5831 emit_case_nodes (index, node, default_label, index_type)
5837 /* If INDEX has an unsigned type, we must make unsigned branches. */
5838 int unsignedp = TREE_UNSIGNED (index_type);
5839 typedef rtx rtx_fn ();
5840 enum machine_mode mode = GET_MODE (index);
5842 /* See if our parents have already tested everything for us.
5843 If they have, emit an unconditional jump for this node. */
5844 if (node_is_bounded (node, index_type))
5845 emit_jump (label_rtx (node->code_label));
5847 else if (tree_int_cst_equal (node->low, node->high))
5849 /* Node is single valued. First see if the index expression matches
5850 this node and then check our children, if any. */
5852 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5853 label_rtx (node->code_label), unsignedp);
5855 if (node->right != 0 && node->left != 0)
5857 /* This node has children on both sides.
5858 Dispatch to one side or the other
5859 by comparing the index value with this node's value.
5860 If one subtree is bounded, check that one first,
5861 so we can avoid real branches in the tree. */
5863 if (node_is_bounded (node->right, index_type))
5865 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5867 GT, NULL_RTX, mode, unsignedp, 0,
5868 label_rtx (node->right->code_label));
5869 emit_case_nodes (index, node->left, default_label, index_type);
5872 else if (node_is_bounded (node->left, index_type))
5874 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5876 LT, NULL_RTX, mode, unsignedp, 0,
5877 label_rtx (node->left->code_label));
5878 emit_case_nodes (index, node->right, default_label, index_type);
5883 /* Neither node is bounded. First distinguish the two sides;
5884 then emit the code for one side at a time. */
5887 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5889 /* See if the value is on the right. */
5890 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5892 GT, NULL_RTX, mode, unsignedp, 0,
5893 label_rtx (test_label));
5895 /* Value must be on the left.
5896 Handle the left-hand subtree. */
5897 emit_case_nodes (index, node->left, default_label, index_type);
5898 /* If left-hand subtree does nothing,
5900 emit_jump_if_reachable (default_label);
5902 /* Code branches here for the right-hand subtree. */
5903 expand_label (test_label);
5904 emit_case_nodes (index, node->right, default_label, index_type);
5908 else if (node->right != 0 && node->left == 0)
5910 /* Here we have a right child but no left so we issue conditional
5911 branch to default and process the right child.
5913 Omit the conditional branch to default if we it avoid only one
5914 right child; it costs too much space to save so little time. */
5916 if (node->right->right || node->right->left
5917 || !tree_int_cst_equal (node->right->low, node->right->high))
5919 if (!node_has_low_bound (node, index_type))
5921 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5924 LT, NULL_RTX, mode, unsignedp, 0,
5928 emit_case_nodes (index, node->right, default_label, index_type);
5931 /* We cannot process node->right normally
5932 since we haven't ruled out the numbers less than
5933 this node's value. So handle node->right explicitly. */
5934 do_jump_if_equal (index,
5935 expand_expr (node->right->low, NULL_RTX,
5937 label_rtx (node->right->code_label), unsignedp);
5940 else if (node->right == 0 && node->left != 0)
5942 /* Just one subtree, on the left. */
5944 #if 0 /* The following code and comment were formerly part
5945 of the condition here, but they didn't work
5946 and I don't understand what the idea was. -- rms. */
5947 /* If our "most probable entry" is less probable
5948 than the default label, emit a jump to
5949 the default label using condition codes
5950 already lying around. With no right branch,
5951 a branch-greater-than will get us to the default
5954 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5957 if (node->left->left || node->left->right
5958 || !tree_int_cst_equal (node->left->low, node->left->high))
5960 if (!node_has_high_bound (node, index_type))
5962 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5965 GT, NULL_RTX, mode, unsignedp, 0,
5969 emit_case_nodes (index, node->left, default_label, index_type);
5972 /* We cannot process node->left normally
5973 since we haven't ruled out the numbers less than
5974 this node's value. So handle node->left explicitly. */
5975 do_jump_if_equal (index,
5976 expand_expr (node->left->low, NULL_RTX,
5978 label_rtx (node->left->code_label), unsignedp);
5983 /* Node is a range. These cases are very similar to those for a single
5984 value, except that we do not start by testing whether this node
5985 is the one to branch to. */
5987 if (node->right != 0 && node->left != 0)
5989 /* Node has subtrees on both sides.
5990 If the right-hand subtree is bounded,
5991 test for it first, since we can go straight there.
5992 Otherwise, we need to make a branch in the control structure,
5993 then handle the two subtrees. */
5994 tree test_label = 0;
5997 if (node_is_bounded (node->right, index_type))
5998 /* Right hand node is fully bounded so we can eliminate any
5999 testing and branch directly to the target code. */
6000 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6002 GT, NULL_RTX, mode, unsignedp, 0,
6003 label_rtx (node->right->code_label));
6006 /* Right hand node requires testing.
6007 Branch to a label where we will handle it later. */
6009 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6010 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6012 GT, NULL_RTX, mode, unsignedp, 0,
6013 label_rtx (test_label));
6016 /* Value belongs to this node or to the left-hand subtree. */
6018 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6020 GE, NULL_RTX, mode, unsignedp, 0,
6021 label_rtx (node->code_label));
6023 /* Handle the left-hand subtree. */
6024 emit_case_nodes (index, node->left, default_label, index_type);
6026 /* If right node had to be handled later, do that now. */
6030 /* If the left-hand subtree fell through,
6031 don't let it fall into the right-hand subtree. */
6032 emit_jump_if_reachable (default_label);
6034 expand_label (test_label);
6035 emit_case_nodes (index, node->right, default_label, index_type);
6039 else if (node->right != 0 && node->left == 0)
6041 /* Deal with values to the left of this node,
6042 if they are possible. */
6043 if (!node_has_low_bound (node, index_type))
6045 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6047 LT, NULL_RTX, mode, unsignedp, 0,
6051 /* Value belongs to this node or to the right-hand subtree. */
6053 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6055 LE, NULL_RTX, mode, unsignedp, 0,
6056 label_rtx (node->code_label));
6058 emit_case_nodes (index, node->right, default_label, index_type);
6061 else if (node->right == 0 && node->left != 0)
6063 /* Deal with values to the right of this node,
6064 if they are possible. */
6065 if (!node_has_high_bound (node, index_type))
6067 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6069 GT, NULL_RTX, mode, unsignedp, 0,
6073 /* Value belongs to this node or to the left-hand subtree. */
6075 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6077 GE, NULL_RTX, mode, unsignedp, 0,
6078 label_rtx (node->code_label));
6080 emit_case_nodes (index, node->left, default_label, index_type);
6085 /* Node has no children so we check low and high bounds to remove
6086 redundant tests. Only one of the bounds can exist,
6087 since otherwise this node is bounded--a case tested already. */
6089 if (!node_has_high_bound (node, index_type))
6091 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6093 GT, NULL_RTX, mode, unsignedp, 0,
6097 if (!node_has_low_bound (node, index_type))
6099 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6101 LT, NULL_RTX, mode, unsignedp, 0,
6105 emit_jump (label_rtx (node->code_label));
6110 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6111 so that the debugging info will be correct for the unrolled loop. */
6113 /* Indexed by block number, contains a pointer to the N'th block node.
6115 Allocated by the call to identify_blocks, then released after the call
6116 to reorder_blocks in the function unroll_block_trees. */
6118 static tree *block_vector;
6121 find_loop_tree_blocks ()
6123 tree block = DECL_INITIAL (current_function_decl);
6125 block_vector = identify_blocks (block, get_insns ());
6129 unroll_block_trees ()
6131 tree block = DECL_INITIAL (current_function_decl);
6133 reorder_blocks (block_vector, block, get_insns ());
6135 /* Release any memory allocated by identify_blocks. */
6137 free (block_vector);