1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
29 #include "coretypes.h"
33 #include "hard-reg-set.h"
39 #include "insn-config.h"
47 #include "langhooks.h"
52 #include "alloc-pool.h"
54 /* Functions and data structures for expanding case statements. */
56 /* Case label structure, used to hold info on labels within case
57 statements. We handle "range" labels; for a single-value label
58 as in C, the high and low limits are the same.
60 We start with a vector of case nodes sorted in ascending order, and
61 the default label as the last element in the vector. Before expanding
62 to RTL, we transform this vector into a list linked via the RIGHT
63 fields in the case_node struct. Nodes with higher case values are
66 Switch statements can be output in three forms. A branch table is
67 used if there are more than a few labels and the labels are dense
68 within the range between the smallest and largest case value. If a
69 branch table is used, no further manipulations are done with the case
72 The alternative to the use of a branch table is to generate a series
73 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
74 and PARENT fields to hold a binary tree. Initially the tree is
75 totally unbalanced, with everything on the right. We balance the tree
76 with nodes on the left having lower case values than the parent
77 and nodes on the right having higher values. We then output the tree
80 For very small, suitable switch statements, we can generate a series
81 of simple bit test and branches instead. */
85 struct case_node *left; /* Left son in binary tree */
86 struct case_node *right; /* Right son in binary tree; also node chain */
87 struct case_node *parent; /* Parent of node in binary tree */
88 tree low; /* Lowest index value for this label */
89 tree high; /* Highest index value for this label */
90 tree code_label; /* Label to jump to when node matches */
93 typedef struct case_node case_node;
94 typedef struct case_node *case_node_ptr;
96 /* These are used by estimate_case_costs and balance_case_nodes. */
98 /* This must be a signed type, and non-ANSI compilers lack signed char. */
99 static short cost_table_[129];
100 static int use_cost_table;
101 static int cost_table_initialized;
103 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
105 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
107 static int n_occurrences (int, const char *);
108 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
109 static void expand_nl_goto_receiver (void);
110 static bool check_operand_nalternatives (tree, tree);
111 static bool check_unique_operand_names (tree, tree);
112 static char *resolve_operand_name_1 (char *, tree, tree);
113 static void expand_null_return_1 (void);
114 static void expand_value_return (rtx);
115 static int estimate_case_costs (case_node_ptr);
116 static bool lshift_cheap_p (void);
117 static int case_bit_test_cmp (const void *, const void *);
118 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
119 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
120 static int node_has_low_bound (case_node_ptr, tree);
121 static int node_has_high_bound (case_node_ptr, tree);
122 static int node_is_bounded (case_node_ptr, tree);
123 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
124 static struct case_node *add_case_node (struct case_node *, tree,
125 tree, tree, tree, alloc_pool);
128 /* Return the rtx-label that corresponds to a LABEL_DECL,
129 creating it if necessary. */
132 label_rtx (tree label)
134 gcc_assert (TREE_CODE (label) == LABEL_DECL);
136 if (!DECL_RTL_SET_P (label))
138 rtx r = gen_label_rtx ();
139 SET_DECL_RTL (label, r);
140 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
141 LABEL_PRESERVE_P (r) = 1;
144 return DECL_RTL (label);
147 /* As above, but also put it on the forced-reference list of the
148 function that contains it. */
150 force_label_rtx (tree label)
152 rtx ref = label_rtx (label);
153 tree function = decl_function_context (label);
155 gcc_assert (function);
157 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
161 /* Add an unconditional jump to LABEL as the next sequential instruction. */
164 emit_jump (rtx label)
166 do_pending_stack_adjust ();
167 emit_jump_insn (gen_jump (label));
171 /* Emit code to jump to the address
172 specified by the pointer expression EXP. */
175 expand_computed_goto (tree exp)
177 rtx x = expand_normal (exp);
179 x = convert_memory_address (Pmode, x);
181 do_pending_stack_adjust ();
182 emit_indirect_jump (x);
185 /* Handle goto statements and the labels that they can go to. */
187 /* Specify the location in the RTL code of a label LABEL,
188 which is a LABEL_DECL tree node.
190 This is used for the kind of label that the user can jump to with a
191 goto statement, and for alternatives of a switch or case statement.
192 RTL labels generated for loops and conditionals don't go through here;
193 they are generated directly at the RTL level, by other functions below.
195 Note that this has nothing to do with defining label *names*.
196 Languages vary in how they do that and what that even means. */
199 expand_label (tree label)
201 rtx label_r = label_rtx (label);
203 do_pending_stack_adjust ();
204 emit_label (label_r);
205 if (DECL_NAME (label))
206 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
208 if (DECL_NONLOCAL (label))
210 expand_nl_goto_receiver ();
211 nonlocal_goto_handler_labels
212 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
213 nonlocal_goto_handler_labels);
216 if (FORCED_LABEL (label))
217 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
219 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
220 maybe_set_first_label_num (label_r);
223 /* Generate RTL code for a `goto' statement with target label LABEL.
224 LABEL should be a LABEL_DECL tree node that was or will later be
225 defined with `expand_label'. */
228 expand_goto (tree label)
230 #ifdef ENABLE_CHECKING
231 /* Check for a nonlocal goto to a containing function. Should have
232 gotten translated to __builtin_nonlocal_goto. */
233 tree context = decl_function_context (label);
234 gcc_assert (!context || context == current_function_decl);
237 emit_jump (label_rtx (label));
240 /* Return the number of times character C occurs in string S. */
242 n_occurrences (int c, const char *s)
250 /* Generate RTL for an asm statement (explicit assembler code).
251 STRING is a STRING_CST node containing the assembler code text,
252 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
253 insn is volatile; don't optimize it. */
256 expand_asm_loc (tree string, int vol, location_t locus)
260 if (TREE_CODE (string) == ADDR_EXPR)
261 string = TREE_OPERAND (string, 0);
263 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
264 ggc_strdup (TREE_STRING_POINTER (string)),
267 MEM_VOLATILE_P (body) = vol;
272 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
273 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
274 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
275 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
276 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
277 constraint allows the use of a register operand. And, *IS_INOUT
278 will be true if the operand is read-write, i.e., if it is used as
279 an input as well as an output. If *CONSTRAINT_P is not in
280 canonical form, it will be made canonical. (Note that `+' will be
281 replaced with `=' as part of this process.)
283 Returns TRUE if all went well; FALSE if an error occurred. */
286 parse_output_constraint (const char **constraint_p, int operand_num,
287 int ninputs, int noutputs, bool *allows_mem,
288 bool *allows_reg, bool *is_inout)
290 const char *constraint = *constraint_p;
293 /* Assume the constraint doesn't allow the use of either a register
298 /* Allow the `=' or `+' to not be at the beginning of the string,
299 since it wasn't explicitly documented that way, and there is a
300 large body of code that puts it last. Swap the character to
301 the front, so as not to uglify any place else. */
302 p = strchr (constraint, '=');
304 p = strchr (constraint, '+');
306 /* If the string doesn't contain an `=', issue an error
310 error ("output operand constraint lacks %<=%>");
314 /* If the constraint begins with `+', then the operand is both read
315 from and written to. */
316 *is_inout = (*p == '+');
318 /* Canonicalize the output constraint so that it begins with `='. */
319 if (p != constraint || *is_inout)
322 size_t c_len = strlen (constraint);
325 warning (0, "output constraint %qc for operand %d "
326 "is not at the beginning",
329 /* Make a copy of the constraint. */
330 buf = XALLOCAVEC (char, c_len + 1);
331 strcpy (buf, constraint);
332 /* Swap the first character and the `=' or `+'. */
333 buf[p - constraint] = buf[0];
334 /* Make sure the first character is an `='. (Until we do this,
335 it might be a `+'.) */
337 /* Replace the constraint with the canonicalized string. */
338 *constraint_p = ggc_alloc_string (buf, c_len);
339 constraint = *constraint_p;
342 /* Loop through the constraint string. */
343 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
348 error ("operand constraint contains incorrectly positioned "
353 if (operand_num + 1 == ninputs + noutputs)
355 error ("%<%%%> constraint used with last operand");
360 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
364 case '?': case '!': case '*': case '&': case '#':
365 case 'E': case 'F': case 'G': case 'H':
366 case 's': case 'i': case 'n':
367 case 'I': case 'J': case 'K': case 'L': case 'M':
368 case 'N': case 'O': case 'P': case ',':
371 case '0': case '1': case '2': case '3': case '4':
372 case '5': case '6': case '7': case '8': case '9':
374 error ("matching constraint not valid in output operand");
378 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
379 excepting those that expand_call created. So match memory
396 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
398 #ifdef EXTRA_CONSTRAINT_STR
399 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
401 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
405 /* Otherwise we can't assume anything about the nature of
406 the constraint except that it isn't purely registers.
407 Treat it like "g" and hope for the best. */
418 /* Similar, but for input constraints. */
421 parse_input_constraint (const char **constraint_p, int input_num,
422 int ninputs, int noutputs, int ninout,
423 const char * const * constraints,
424 bool *allows_mem, bool *allows_reg)
426 const char *constraint = *constraint_p;
427 const char *orig_constraint = constraint;
428 size_t c_len = strlen (constraint);
430 bool saw_match = false;
432 /* Assume the constraint doesn't allow the use of either
433 a register or memory. */
437 /* Make sure constraint has neither `=', `+', nor '&'. */
439 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
440 switch (constraint[j])
442 case '+': case '=': case '&':
443 if (constraint == orig_constraint)
445 error ("input operand constraint contains %qc", constraint[j]);
451 if (constraint == orig_constraint
452 && input_num + 1 == ninputs - ninout)
454 error ("%<%%%> constraint used with last operand");
459 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
464 case '?': case '!': case '*': case '#':
465 case 'E': case 'F': case 'G': case 'H':
466 case 's': case 'i': case 'n':
467 case 'I': case 'J': case 'K': case 'L': case 'M':
468 case 'N': case 'O': case 'P': case ',':
471 /* Whether or not a numeric constraint allows a register is
472 decided by the matching constraint, and so there is no need
473 to do anything special with them. We must handle them in
474 the default case, so that we don't unnecessarily force
475 operands to memory. */
476 case '0': case '1': case '2': case '3': case '4':
477 case '5': case '6': case '7': case '8': case '9':
484 match = strtoul (constraint + j, &end, 10);
485 if (match >= (unsigned long) noutputs)
487 error ("matching constraint references invalid operand number");
491 /* Try and find the real constraint for this dup. Only do this
492 if the matching constraint is the only alternative. */
494 && (j == 0 || (j == 1 && constraint[0] == '%')))
496 constraint = constraints[match];
497 *constraint_p = constraint;
498 c_len = strlen (constraint);
500 /* ??? At the end of the loop, we will skip the first part of
501 the matched constraint. This assumes not only that the
502 other constraint is an output constraint, but also that
503 the '=' or '+' come first. */
507 j = end - constraint;
508 /* Anticipate increment at end of loop. */
523 if (! ISALPHA (constraint[j]))
525 error ("invalid punctuation %qc in constraint", constraint[j]);
528 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
531 #ifdef EXTRA_CONSTRAINT_STR
532 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
534 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
538 /* Otherwise we can't assume anything about the nature of
539 the constraint except that it isn't purely registers.
540 Treat it like "g" and hope for the best. */
548 if (saw_match && !*allows_reg)
549 warning (0, "matching constraint does not allow a register");
554 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
555 can be an asm-declared register. Called via walk_tree. */
558 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
562 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
564 if (TREE_CODE (decl) == VAR_DECL)
566 if (DECL_HARD_REGISTER (decl)
567 && REG_P (DECL_RTL (decl))
568 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
570 rtx reg = DECL_RTL (decl);
572 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
577 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
582 /* If there is an overlap between *REGS and DECL, return the first overlap
585 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
587 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
590 /* Check for overlap between registers marked in CLOBBERED_REGS and
591 anything inappropriate in T. Emit error and return the register
592 variable definition for error, NULL_TREE for ok. */
595 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
597 /* Conflicts between asm-declared register variables and the clobber
598 list are not allowed. */
599 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
603 error ("asm-specifier for variable %qs conflicts with asm clobber list",
604 IDENTIFIER_POINTER (DECL_NAME (overlap)));
606 /* Reset registerness to stop multiple errors emitted for a single
608 DECL_REGISTER (overlap) = 0;
615 /* Generate RTL for an asm statement with arguments.
616 STRING is the instruction template.
617 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
618 Each output or input has an expression in the TREE_VALUE and
619 a tree list in TREE_PURPOSE which in turn contains a constraint
620 name in TREE_VALUE (or NULL_TREE) and a constraint string
622 CLOBBERS is a list of STRING_CST nodes each naming a hard register
623 that is clobbered by this insn.
625 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
626 Some elements of OUTPUTS may be replaced with trees representing temporary
627 values. The caller should copy those temporary values to the originally
630 VOL nonzero means the insn is volatile; don't optimize it. */
633 expand_asm_operands (tree string, tree outputs, tree inputs,
634 tree clobbers, int vol, location_t locus)
636 rtvec argvec, constraintvec;
638 int ninputs = list_length (inputs);
639 int noutputs = list_length (outputs);
642 HARD_REG_SET clobbered_regs;
643 int clobber_conflict_found = 0;
647 /* Vector of RTX's of evaluated output operands. */
648 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
649 int *inout_opnum = XALLOCAVEC (int, noutputs);
650 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
651 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
652 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
653 int old_generating_concat_p = generating_concat_p;
655 /* An ASM with no outputs needs to be treated as volatile, for now. */
659 if (! check_operand_nalternatives (outputs, inputs))
662 string = resolve_asm_operand_names (string, outputs, inputs);
664 /* Collect constraints. */
666 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
667 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
668 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
669 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
671 /* Sometimes we wish to automatically clobber registers across an asm.
672 Case in point is when the i386 backend moved from cc0 to a hard reg --
673 maintaining source-level compatibility means automatically clobbering
674 the flags register. */
675 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
677 /* Count the number of meaningful clobbered registers, ignoring what
678 we would ignore later. */
680 CLEAR_HARD_REG_SET (clobbered_regs);
681 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
685 if (TREE_VALUE (tail) == error_mark_node)
687 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
689 i = decode_reg_name (regname);
690 if (i >= 0 || i == -4)
693 error ("unknown register name %qs in %<asm%>", regname);
695 /* Mark clobbered registers. */
698 /* Clobbering the PIC register is an error. */
699 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
701 error ("PIC register %qs clobbered in %<asm%>", regname);
705 SET_HARD_REG_BIT (clobbered_regs, i);
709 /* First pass over inputs and outputs checks validity and sets
710 mark_addressable if needed. */
713 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
715 tree val = TREE_VALUE (tail);
716 tree type = TREE_TYPE (val);
717 const char *constraint;
722 /* If there's an erroneous arg, emit no insn. */
723 if (type == error_mark_node)
726 /* Try to parse the output constraint. If that fails, there's
727 no point in going further. */
728 constraint = constraints[i];
729 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
730 &allows_mem, &allows_reg, &is_inout))
737 && REG_P (DECL_RTL (val))
738 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
739 lang_hooks.mark_addressable (val);
746 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
748 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
752 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
754 bool allows_reg, allows_mem;
755 const char *constraint;
757 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
758 would get VOIDmode and that could cause a crash in reload. */
759 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
762 constraint = constraints[i + noutputs];
763 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
764 constraints, &allows_mem, &allows_reg))
767 if (! allows_reg && allows_mem)
768 lang_hooks.mark_addressable (TREE_VALUE (tail));
771 /* Second pass evaluates arguments. */
774 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
776 tree val = TREE_VALUE (tail);
777 tree type = TREE_TYPE (val);
784 ok = parse_output_constraint (&constraints[i], i, ninputs,
785 noutputs, &allows_mem, &allows_reg,
789 /* If an output operand is not a decl or indirect ref and our constraint
790 allows a register, make a temporary to act as an intermediate.
791 Make the asm insn write into that, then our caller will copy it to
792 the real output operand. Likewise for promoted variables. */
794 generating_concat_p = 0;
796 real_output_rtx[i] = NULL_RTX;
797 if ((TREE_CODE (val) == INDIRECT_REF
800 && (allows_mem || REG_P (DECL_RTL (val)))
801 && ! (REG_P (DECL_RTL (val))
802 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
806 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
808 op = validize_mem (op);
810 if (! allows_reg && !MEM_P (op))
811 error ("output number %d not directly addressable", i);
812 if ((! allows_mem && MEM_P (op))
813 || GET_CODE (op) == CONCAT)
815 real_output_rtx[i] = op;
816 op = gen_reg_rtx (GET_MODE (op));
818 emit_move_insn (op, real_output_rtx[i]);
823 op = assign_temp (type, 0, 0, 1);
824 op = validize_mem (op);
825 TREE_VALUE (tail) = make_tree (type, op);
829 generating_concat_p = old_generating_concat_p;
833 inout_mode[ninout] = TYPE_MODE (type);
834 inout_opnum[ninout++] = i;
837 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
838 clobber_conflict_found = 1;
841 /* Make vectors for the expression-rtx, constraint strings,
842 and named operands. */
844 argvec = rtvec_alloc (ninputs);
845 constraintvec = rtvec_alloc (ninputs);
847 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
848 : GET_MODE (output_rtx[0])),
849 ggc_strdup (TREE_STRING_POINTER (string)),
850 empty_string, 0, argvec, constraintvec,
853 MEM_VOLATILE_P (body) = vol;
855 /* Eval the inputs and put them into ARGVEC.
856 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
858 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
860 bool allows_reg, allows_mem;
861 const char *constraint;
866 constraint = constraints[i + noutputs];
867 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
868 constraints, &allows_mem, &allows_reg);
871 generating_concat_p = 0;
873 val = TREE_VALUE (tail);
874 type = TREE_TYPE (val);
875 /* EXPAND_INITIALIZER will not generate code for valid initializer
876 constants, but will still generate code for other types of operand.
877 This is the behavior we want for constant constraints. */
878 op = expand_expr (val, NULL_RTX, VOIDmode,
879 allows_reg ? EXPAND_NORMAL
880 : allows_mem ? EXPAND_MEMORY
881 : EXPAND_INITIALIZER);
883 /* Never pass a CONCAT to an ASM. */
884 if (GET_CODE (op) == CONCAT)
885 op = force_reg (GET_MODE (op), op);
887 op = validize_mem (op);
889 if (asm_operand_ok (op, constraint, NULL) <= 0)
891 if (allows_reg && TYPE_MODE (type) != BLKmode)
892 op = force_reg (TYPE_MODE (type), op);
893 else if (!allows_mem)
894 warning (0, "asm operand %d probably doesn%'t match constraints",
898 /* We won't recognize either volatile memory or memory
899 with a queued address as available a memory_operand
900 at this point. Ignore it: clearly this *is* a memory. */
904 warning (0, "use of memory input without lvalue in "
905 "asm operand %d is deprecated", i + noutputs);
909 rtx mem = force_const_mem (TYPE_MODE (type), op);
911 op = validize_mem (mem);
913 op = force_reg (TYPE_MODE (type), op);
916 || GET_CODE (op) == SUBREG
917 || GET_CODE (op) == CONCAT)
919 tree qual_type = build_qualified_type (type,
922 rtx memloc = assign_temp (qual_type, 1, 1, 1);
923 memloc = validize_mem (memloc);
924 emit_move_insn (memloc, op);
930 generating_concat_p = old_generating_concat_p;
931 ASM_OPERANDS_INPUT (body, i) = op;
933 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
934 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
935 ggc_strdup (constraints[i + noutputs]));
937 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
938 clobber_conflict_found = 1;
941 /* Protect all the operands from the queue now that they have all been
944 generating_concat_p = 0;
946 /* For in-out operands, copy output rtx to input rtx. */
947 for (i = 0; i < ninout; i++)
949 int j = inout_opnum[i];
952 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
955 sprintf (buffer, "%d", j);
956 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
957 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
960 generating_concat_p = old_generating_concat_p;
962 /* Now, for each output, construct an rtx
963 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
964 ARGVEC CONSTRAINTS OPNAMES))
965 If there is more than one, put them inside a PARALLEL. */
967 if (noutputs == 1 && nclobbers == 0)
969 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
970 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
973 else if (noutputs == 0 && nclobbers == 0)
975 /* No output operands: put in a raw ASM_OPERANDS rtx. */
987 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
989 /* For each output operand, store a SET. */
990 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
993 = gen_rtx_SET (VOIDmode,
996 (GET_MODE (output_rtx[i]),
997 ggc_strdup (TREE_STRING_POINTER (string)),
998 ggc_strdup (constraints[i]),
999 i, argvec, constraintvec, locus));
1001 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1004 /* If there are no outputs (but there are some clobbers)
1005 store the bare ASM_OPERANDS into the PARALLEL. */
1008 XVECEXP (body, 0, i++) = obody;
1010 /* Store (clobber REG) for each clobbered register specified. */
1012 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1014 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1015 int j = decode_reg_name (regname);
1020 if (j == -3) /* `cc', which is not a register */
1023 if (j == -4) /* `memory', don't cache memory across asm */
1025 XVECEXP (body, 0, i++)
1026 = gen_rtx_CLOBBER (VOIDmode,
1029 gen_rtx_SCRATCH (VOIDmode)));
1033 /* Ignore unknown register, error already signaled. */
1037 /* Use QImode since that's guaranteed to clobber just one reg. */
1038 clobbered_reg = gen_rtx_REG (QImode, j);
1040 /* Do sanity check for overlap between clobbers and respectively
1041 input and outputs that hasn't been handled. Such overlap
1042 should have been detected and reported above. */
1043 if (!clobber_conflict_found)
1047 /* We test the old body (obody) contents to avoid tripping
1048 over the under-construction body. */
1049 for (opno = 0; opno < noutputs; opno++)
1050 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1051 internal_error ("asm clobber conflict with output operand");
1053 for (opno = 0; opno < ninputs - ninout; opno++)
1054 if (reg_overlap_mentioned_p (clobbered_reg,
1055 ASM_OPERANDS_INPUT (obody, opno)))
1056 internal_error ("asm clobber conflict with input operand");
1059 XVECEXP (body, 0, i++)
1060 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1066 /* For any outputs that needed reloading into registers, spill them
1067 back to where they belong. */
1068 for (i = 0; i < noutputs; ++i)
1069 if (real_output_rtx[i])
1070 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1072 crtl->has_asm_statement = 1;
1077 expand_asm_expr (tree exp)
1083 if (ASM_INPUT_P (exp))
1085 expand_asm_loc (ASM_STRING (exp), ASM_VOLATILE_P (exp), input_location);
1089 outputs = ASM_OUTPUTS (exp);
1090 noutputs = list_length (outputs);
1091 /* o[I] is the place that output number I should be written. */
1092 o = (tree *) alloca (noutputs * sizeof (tree));
1094 /* Record the contents of OUTPUTS before it is modified. */
1095 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1096 o[i] = TREE_VALUE (tail);
1098 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1099 OUTPUTS some trees for where the values were actually stored. */
1100 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1101 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1104 /* Copy all the intermediate outputs into the specified outputs. */
1105 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1107 if (o[i] != TREE_VALUE (tail))
1109 expand_assignment (o[i], TREE_VALUE (tail), false);
1112 /* Restore the original value so that it's correct the next
1113 time we expand this function. */
1114 TREE_VALUE (tail) = o[i];
1119 /* A subroutine of expand_asm_operands. Check that all operands have
1120 the same number of alternatives. Return true if so. */
1123 check_operand_nalternatives (tree outputs, tree inputs)
1125 if (outputs || inputs)
1127 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1129 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1132 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1134 error ("too many alternatives in %<asm%>");
1141 const char *constraint
1142 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1144 if (n_occurrences (',', constraint) != nalternatives)
1146 error ("operand constraints for %<asm%> differ "
1147 "in number of alternatives");
1151 if (TREE_CHAIN (tmp))
1152 tmp = TREE_CHAIN (tmp);
1154 tmp = next, next = 0;
1161 /* A subroutine of expand_asm_operands. Check that all operand names
1162 are unique. Return true if so. We rely on the fact that these names
1163 are identifiers, and so have been canonicalized by get_identifier,
1164 so all we need are pointer comparisons. */
1167 check_unique_operand_names (tree outputs, tree inputs)
1171 for (i = outputs; i ; i = TREE_CHAIN (i))
1173 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1177 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1178 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1182 for (i = inputs; i ; i = TREE_CHAIN (i))
1184 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1188 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1189 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1191 for (j = outputs; j ; j = TREE_CHAIN (j))
1192 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1199 error ("duplicate asm operand name %qs",
1200 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1204 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1205 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1206 STRING and in the constraints to those numbers. */
1209 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1216 check_unique_operand_names (outputs, inputs);
1218 /* Substitute [<name>] in input constraint strings. There should be no
1219 named operands in output constraints. */
1220 for (t = inputs; t ; t = TREE_CHAIN (t))
1222 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1223 if (strchr (c, '[') != NULL)
1225 p = buffer = xstrdup (c);
1226 while ((p = strchr (p, '[')) != NULL)
1227 p = resolve_operand_name_1 (p, outputs, inputs);
1228 TREE_VALUE (TREE_PURPOSE (t))
1229 = build_string (strlen (buffer), buffer);
1234 /* Now check for any needed substitutions in the template. */
1235 c = TREE_STRING_POINTER (string);
1236 while ((c = strchr (c, '%')) != NULL)
1240 else if (ISALPHA (c[1]) && c[2] == '[')
1251 /* OK, we need to make a copy so we can perform the substitutions.
1252 Assume that we will not need extra space--we get to remove '['
1253 and ']', which means we cannot have a problem until we have more
1254 than 999 operands. */
1255 buffer = xstrdup (TREE_STRING_POINTER (string));
1256 p = buffer + (c - TREE_STRING_POINTER (string));
1258 while ((p = strchr (p, '%')) != NULL)
1262 else if (ISALPHA (p[1]) && p[2] == '[')
1270 p = resolve_operand_name_1 (p, outputs, inputs);
1273 string = build_string (strlen (buffer), buffer);
1280 /* A subroutine of resolve_operand_names. P points to the '[' for a
1281 potential named operand of the form [<name>]. In place, replace
1282 the name and brackets with a number. Return a pointer to the
1283 balance of the string after substitution. */
1286 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1293 /* Collect the operand name. */
1294 q = strchr (p, ']');
1297 error ("missing close brace for named operand");
1298 return strchr (p, '\0');
1302 /* Resolve the name to a number. */
1303 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1305 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1308 const char *c = TREE_STRING_POINTER (name);
1309 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1313 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1315 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1318 const char *c = TREE_STRING_POINTER (name);
1319 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1325 error ("undefined named operand %qs", p + 1);
1329 /* Replace the name with the number. Unfortunately, not all libraries
1330 get the return value of sprintf correct, so search for the end of the
1331 generated string by hand. */
1332 sprintf (p, "%d", op);
1333 p = strchr (p, '\0');
1335 /* Verify the no extra buffer space assumption. */
1336 gcc_assert (p <= q);
1338 /* Shift the rest of the buffer down to fill the gap. */
1339 memmove (p, q + 1, strlen (q + 1) + 1);
1344 /* Generate RTL to evaluate the expression EXP. */
1347 expand_expr_stmt (tree exp)
1352 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1353 type = TREE_TYPE (exp);
1355 /* If all we do is reference a volatile value in memory,
1356 copy it to a register to be sure it is actually touched. */
1357 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1359 if (TYPE_MODE (type) == VOIDmode)
1361 else if (TYPE_MODE (type) != BLKmode)
1362 value = copy_to_reg (value);
1365 rtx lab = gen_label_rtx ();
1367 /* Compare the value with itself to reference it. */
1368 emit_cmp_and_jump_insns (value, value, EQ,
1369 expand_normal (TYPE_SIZE (type)),
1375 /* Free any temporaries used to evaluate this expression. */
1379 /* Warn if EXP contains any computations whose results are not used.
1380 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1381 (potential) location of the expression. */
1384 warn_if_unused_value (const_tree exp, location_t locus)
1387 if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1390 /* Don't warn about void constructs. This includes casting to void,
1391 void function calls, and statement expressions with a final cast
1393 if (VOID_TYPE_P (TREE_TYPE (exp)))
1396 if (EXPR_HAS_LOCATION (exp))
1397 locus = EXPR_LOCATION (exp);
1399 switch (TREE_CODE (exp))
1401 case PREINCREMENT_EXPR:
1402 case POSTINCREMENT_EXPR:
1403 case PREDECREMENT_EXPR:
1404 case POSTDECREMENT_EXPR:
1409 case TRY_CATCH_EXPR:
1410 case WITH_CLEANUP_EXPR:
1416 /* For a binding, warn if no side effect within it. */
1417 exp = BIND_EXPR_BODY (exp);
1421 case NON_LVALUE_EXPR:
1422 exp = TREE_OPERAND (exp, 0);
1425 case TRUTH_ORIF_EXPR:
1426 case TRUTH_ANDIF_EXPR:
1427 /* In && or ||, warn if 2nd operand has no side effect. */
1428 exp = TREE_OPERAND (exp, 1);
1432 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1434 /* Let people do `(foo (), 0)' without a warning. */
1435 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1437 exp = TREE_OPERAND (exp, 1);
1441 /* If this is an expression with side effects, don't warn; this
1442 case commonly appears in macro expansions. */
1443 if (TREE_SIDE_EFFECTS (exp))
1448 /* Don't warn about automatic dereferencing of references, since
1449 the user cannot control it. */
1450 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1452 exp = TREE_OPERAND (exp, 0);
1458 /* Referencing a volatile value is a side effect, so don't warn. */
1459 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1460 && TREE_THIS_VOLATILE (exp))
1463 /* If this is an expression which has no operands, there is no value
1464 to be unused. There are no such language-independent codes,
1465 but front ends may define such. */
1466 if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1470 warning (OPT_Wunused_value, "%Hvalue computed is not used", &locus);
1476 /* Generate RTL to return from the current function, with no value.
1477 (That is, we do not do anything about returning any value.) */
1480 expand_null_return (void)
1482 /* If this function was declared to return a value, but we
1483 didn't, clobber the return registers so that they are not
1484 propagated live to the rest of the function. */
1485 clobber_return_register ();
1487 expand_null_return_1 ();
1490 /* Generate RTL to return directly from the current function.
1491 (That is, we bypass any return value.) */
1494 expand_naked_return (void)
1498 clear_pending_stack_adjust ();
1499 do_pending_stack_adjust ();
1501 end_label = naked_return_label;
1503 end_label = naked_return_label = gen_label_rtx ();
1505 emit_jump (end_label);
1508 /* Generate RTL to return from the current function, with value VAL. */
1511 expand_value_return (rtx val)
1513 /* Copy the value to the return location
1514 unless it's already there. */
1516 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1517 if (return_reg != val)
1519 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1520 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1522 int unsignedp = TYPE_UNSIGNED (type);
1523 enum machine_mode old_mode
1524 = DECL_MODE (DECL_RESULT (current_function_decl));
1525 enum machine_mode mode
1526 = promote_mode (type, old_mode, &unsignedp, 1);
1528 if (mode != old_mode)
1529 val = convert_modes (mode, old_mode, val, unsignedp);
1531 if (GET_CODE (return_reg) == PARALLEL)
1532 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1534 emit_move_insn (return_reg, val);
1537 expand_null_return_1 ();
1540 /* Output a return with no value. */
1543 expand_null_return_1 (void)
1545 clear_pending_stack_adjust ();
1546 do_pending_stack_adjust ();
1547 emit_jump (return_label);
1550 /* Generate RTL to evaluate the expression RETVAL and return it
1551 from the current function. */
1554 expand_return (tree retval)
1560 /* If function wants no value, give it none. */
1561 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1563 expand_normal (retval);
1564 expand_null_return ();
1568 if (retval == error_mark_node)
1570 /* Treat this like a return of no value from a function that
1572 expand_null_return ();
1575 else if ((TREE_CODE (retval) == MODIFY_EXPR
1576 || TREE_CODE (retval) == INIT_EXPR)
1577 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1578 retval_rhs = TREE_OPERAND (retval, 1);
1580 retval_rhs = retval;
1582 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1584 /* If we are returning the RESULT_DECL, then the value has already
1585 been stored into it, so we don't have to do anything special. */
1586 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1587 expand_value_return (result_rtl);
1589 /* If the result is an aggregate that is being returned in one (or more)
1590 registers, load the registers here. The compiler currently can't handle
1591 copying a BLKmode value into registers. We could put this code in a
1592 more general area (for use by everyone instead of just function
1593 call/return), but until this feature is generally usable it is kept here
1594 (and in expand_call). */
1596 else if (retval_rhs != 0
1597 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1598 && REG_P (result_rtl))
1601 unsigned HOST_WIDE_INT bitpos, xbitpos;
1602 unsigned HOST_WIDE_INT padding_correction = 0;
1603 unsigned HOST_WIDE_INT bytes
1604 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1605 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1606 unsigned int bitsize
1607 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1608 rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
1609 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1610 rtx result_val = expand_normal (retval_rhs);
1611 enum machine_mode tmpmode, result_reg_mode;
1615 expand_null_return ();
1619 /* If the structure doesn't take up a whole number of words, see
1620 whether the register value should be padded on the left or on
1621 the right. Set PADDING_CORRECTION to the number of padding
1622 bits needed on the left side.
1624 In most ABIs, the structure will be returned at the least end of
1625 the register, which translates to right padding on little-endian
1626 targets and left padding on big-endian targets. The opposite
1627 holds if the structure is returned at the most significant
1628 end of the register. */
1629 if (bytes % UNITS_PER_WORD != 0
1630 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1632 : BYTES_BIG_ENDIAN))
1633 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1636 /* Copy the structure BITSIZE bits at a time. */
1637 for (bitpos = 0, xbitpos = padding_correction;
1638 bitpos < bytes * BITS_PER_UNIT;
1639 bitpos += bitsize, xbitpos += bitsize)
1641 /* We need a new destination pseudo each time xbitpos is
1642 on a word boundary and when xbitpos == padding_correction
1643 (the first time through). */
1644 if (xbitpos % BITS_PER_WORD == 0
1645 || xbitpos == padding_correction)
1647 /* Generate an appropriate register. */
1648 dst = gen_reg_rtx (word_mode);
1649 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1651 /* Clear the destination before we move anything into it. */
1652 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1655 /* We need a new source operand each time bitpos is on a word
1657 if (bitpos % BITS_PER_WORD == 0)
1658 src = operand_subword_force (result_val,
1659 bitpos / BITS_PER_WORD,
1662 /* Use bitpos for the source extraction (left justified) and
1663 xbitpos for the destination store (right justified). */
1664 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1665 extract_bit_field (src, bitsize,
1666 bitpos % BITS_PER_WORD, 1,
1667 NULL_RTX, word_mode, word_mode));
1670 tmpmode = GET_MODE (result_rtl);
1671 if (tmpmode == BLKmode)
1673 /* Find the smallest integer mode large enough to hold the
1674 entire structure and use that mode instead of BLKmode
1675 on the USE insn for the return register. */
1676 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1677 tmpmode != VOIDmode;
1678 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1679 /* Have we found a large enough mode? */
1680 if (GET_MODE_SIZE (tmpmode) >= bytes)
1683 /* A suitable mode should have been found. */
1684 gcc_assert (tmpmode != VOIDmode);
1686 PUT_MODE (result_rtl, tmpmode);
1689 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1690 result_reg_mode = word_mode;
1692 result_reg_mode = tmpmode;
1693 result_reg = gen_reg_rtx (result_reg_mode);
1695 for (i = 0; i < n_regs; i++)
1696 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1699 if (tmpmode != result_reg_mode)
1700 result_reg = gen_lowpart (tmpmode, result_reg);
1702 expand_value_return (result_reg);
1704 else if (retval_rhs != 0
1705 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1706 && (REG_P (result_rtl)
1707 || (GET_CODE (result_rtl) == PARALLEL)))
1709 /* Calculate the return value into a temporary (usually a pseudo
1711 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1712 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1714 val = assign_temp (nt, 0, 0, 1);
1715 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1716 val = force_not_mem (val);
1717 /* Return the calculated value. */
1718 expand_value_return (val);
1722 /* No hard reg used; calculate value into hard return reg. */
1723 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1724 expand_value_return (result_rtl);
1728 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1731 expand_nl_goto_receiver (void)
1733 /* Clobber the FP when we get here, so we have to make sure it's
1734 marked as used by this function. */
1735 emit_use (hard_frame_pointer_rtx);
1737 /* Mark the static chain as clobbered here so life information
1738 doesn't get messed up for it. */
1739 emit_clobber (static_chain_rtx);
1741 #ifdef HAVE_nonlocal_goto
1742 if (! HAVE_nonlocal_goto)
1744 /* First adjust our frame pointer to its actual value. It was
1745 previously set to the start of the virtual area corresponding to
1746 the stacked variables when we branched here and now needs to be
1747 adjusted to the actual hardware fp value.
1749 Assignments are to virtual registers are converted by
1750 instantiate_virtual_regs into the corresponding assignment
1751 to the underlying register (fp in this case) that makes
1752 the original assignment true.
1753 So the following insn will actually be
1754 decrementing fp by STARTING_FRAME_OFFSET. */
1755 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1757 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1758 if (fixed_regs[ARG_POINTER_REGNUM])
1760 #ifdef ELIMINABLE_REGS
1761 /* If the argument pointer can be eliminated in favor of the
1762 frame pointer, we don't need to restore it. We assume here
1763 that if such an elimination is present, it can always be used.
1764 This is the case on all known machines; if we don't make this
1765 assumption, we do unnecessary saving on many machines. */
1766 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1769 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1770 if (elim_regs[i].from == ARG_POINTER_REGNUM
1771 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1774 if (i == ARRAY_SIZE (elim_regs))
1777 /* Now restore our arg pointer from the address at which it
1778 was saved in our stack frame. */
1779 emit_move_insn (crtl->args.internal_arg_pointer,
1780 copy_to_reg (get_arg_pointer_save_area ()));
1785 #ifdef HAVE_nonlocal_goto_receiver
1786 if (HAVE_nonlocal_goto_receiver)
1787 emit_insn (gen_nonlocal_goto_receiver ());
1790 /* We must not allow the code we just generated to be reordered by
1791 scheduling. Specifically, the update of the frame pointer must
1792 happen immediately, not later. */
1793 emit_insn (gen_blockage ());
1796 /* Generate RTL for the automatic variable declaration DECL.
1797 (Other kinds of declarations are simply ignored if seen here.) */
1800 expand_decl (tree decl)
1804 type = TREE_TYPE (decl);
1806 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1807 type in case this node is used in a reference. */
1808 if (TREE_CODE (decl) == CONST_DECL)
1810 DECL_MODE (decl) = TYPE_MODE (type);
1811 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1812 DECL_SIZE (decl) = TYPE_SIZE (type);
1813 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1817 /* Otherwise, only automatic variables need any expansion done. Static and
1818 external variables, and external functions, will be handled by
1819 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1820 nothing. PARM_DECLs are handled in `assign_parms'. */
1821 if (TREE_CODE (decl) != VAR_DECL)
1824 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1827 /* Create the RTL representation for the variable. */
1829 if (type == error_mark_node)
1830 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1832 else if (DECL_SIZE (decl) == 0)
1834 /* Variable with incomplete type. */
1836 if (DECL_INITIAL (decl) == 0)
1837 /* Error message was already done; now avoid a crash. */
1838 x = gen_rtx_MEM (BLKmode, const0_rtx);
1840 /* An initializer is going to decide the size of this array.
1841 Until we know the size, represent its address with a reg. */
1842 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1844 set_mem_attributes (x, decl, 1);
1845 SET_DECL_RTL (decl, x);
1847 else if (use_register_for_decl (decl))
1849 /* Automatic variable that can go in a register. */
1850 int unsignedp = TYPE_UNSIGNED (type);
1851 enum machine_mode reg_mode
1852 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1854 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1856 /* Note if the object is a user variable. */
1857 if (!DECL_ARTIFICIAL (decl))
1858 mark_user_reg (DECL_RTL (decl));
1860 if (POINTER_TYPE_P (type))
1861 mark_reg_pointer (DECL_RTL (decl),
1862 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1871 /* Variable-sized decls are dealt with in the gimplifier. */
1872 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST);
1874 /* If we previously made RTL for this decl, it must be an array
1875 whose size was determined by the initializer.
1876 The old address was a register; set that register now
1877 to the proper address. */
1878 if (DECL_RTL_SET_P (decl))
1880 gcc_assert (MEM_P (DECL_RTL (decl)));
1881 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1882 oldaddr = XEXP (DECL_RTL (decl), 0);
1885 /* Set alignment we actually gave this decl. */
1886 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1887 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1888 DECL_USER_ALIGN (decl) = 0;
1890 x = assign_temp (decl, 1, 1, 1);
1891 set_mem_attributes (x, decl, 1);
1892 SET_DECL_RTL (decl, x);
1896 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1897 if (addr != oldaddr)
1898 emit_move_insn (oldaddr, addr);
1903 /* Emit code to save the current value of stack. */
1905 expand_stack_save (void)
1909 do_pending_stack_adjust ();
1910 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1914 /* Emit code to restore the current value of stack. */
1916 expand_stack_restore (tree var)
1918 rtx sa = expand_normal (var);
1920 sa = convert_memory_address (Pmode, sa);
1921 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
1924 /* Do the insertion of a case label into case_list. The labels are
1925 fed to us in descending order from the sorted vector of case labels used
1926 in the tree part of the middle end. So the list we construct is
1927 sorted in ascending order. The bounds on the case range, LOW and HIGH,
1928 are converted to case's index type TYPE. */
1930 static struct case_node *
1931 add_case_node (struct case_node *head, tree type, tree low, tree high,
1932 tree label, alloc_pool case_node_pool)
1934 tree min_value, max_value;
1935 struct case_node *r;
1937 gcc_assert (TREE_CODE (low) == INTEGER_CST);
1938 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
1940 min_value = TYPE_MIN_VALUE (type);
1941 max_value = TYPE_MAX_VALUE (type);
1943 /* If there's no HIGH value, then this is not a case range; it's
1944 just a simple case label. But that's just a degenerate case
1946 If the bounds are equal, turn this into the one-value case. */
1947 if (!high || tree_int_cst_equal (low, high))
1949 /* If the simple case value is unreachable, ignore it. */
1950 if ((TREE_CODE (min_value) == INTEGER_CST
1951 && tree_int_cst_compare (low, min_value) < 0)
1952 || (TREE_CODE (max_value) == INTEGER_CST
1953 && tree_int_cst_compare (low, max_value) > 0))
1955 low = fold_convert (type, low);
1960 /* If the entire case range is unreachable, ignore it. */
1961 if ((TREE_CODE (min_value) == INTEGER_CST
1962 && tree_int_cst_compare (high, min_value) < 0)
1963 || (TREE_CODE (max_value) == INTEGER_CST
1964 && tree_int_cst_compare (low, max_value) > 0))
1967 /* If the lower bound is less than the index type's minimum
1968 value, truncate the range bounds. */
1969 if (TREE_CODE (min_value) == INTEGER_CST
1970 && tree_int_cst_compare (low, min_value) < 0)
1972 low = fold_convert (type, low);
1974 /* If the upper bound is greater than the index type's maximum
1975 value, truncate the range bounds. */
1976 if (TREE_CODE (max_value) == INTEGER_CST
1977 && tree_int_cst_compare (high, max_value) > 0)
1979 high = fold_convert (type, high);
1983 /* Add this label to the chain. Make sure to drop overflow flags. */
1984 r = (struct case_node *) pool_alloc (case_node_pool);
1985 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
1986 TREE_INT_CST_HIGH (low));
1987 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
1988 TREE_INT_CST_HIGH (high));
1989 r->code_label = label;
1990 r->parent = r->left = NULL;
1995 /* Maximum number of case bit tests. */
1996 #define MAX_CASE_BIT_TESTS 3
1998 /* By default, enable case bit tests on targets with ashlsi3. */
1999 #ifndef CASE_USE_BIT_TESTS
2000 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode)->insn_code \
2001 != CODE_FOR_nothing)
2005 /* A case_bit_test represents a set of case nodes that may be
2006 selected from using a bit-wise comparison. HI and LO hold
2007 the integer to be tested against, LABEL contains the label
2008 to jump to upon success and BITS counts the number of case
2009 nodes handled by this test, typically the number of bits
2012 struct case_bit_test
2020 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2023 bool lshift_cheap_p (void)
2025 static bool init = false;
2026 static bool cheap = true;
2030 rtx reg = gen_rtx_REG (word_mode, 10000);
2031 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET,
2032 optimize_insn_for_speed_p ());
2033 cheap = cost < COSTS_N_INSNS (3);
2040 /* Comparison function for qsort to order bit tests by decreasing
2041 number of case nodes, i.e. the node with the most cases gets
2045 case_bit_test_cmp (const void *p1, const void *p2)
2047 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
2048 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
2050 if (d2->bits != d1->bits)
2051 return d2->bits - d1->bits;
2053 /* Stabilize the sort. */
2054 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2057 /* Expand a switch statement by a short sequence of bit-wise
2058 comparisons. "switch(x)" is effectively converted into
2059 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2062 INDEX_EXPR is the value being switched on, which is of
2063 type INDEX_TYPE. MINVAL is the lowest case value of in
2064 the case nodes, of INDEX_TYPE type, and RANGE is highest
2065 value minus MINVAL, also of type INDEX_TYPE. NODES is
2066 the set of case nodes, and DEFAULT_LABEL is the label to
2067 branch to should none of the cases match.
2069 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2073 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2074 tree range, case_node_ptr nodes, rtx default_label)
2076 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2077 enum machine_mode mode;
2078 rtx expr, index, label;
2079 unsigned int i,j,lo,hi;
2080 struct case_node *n;
2084 for (n = nodes; n; n = n->right)
2086 label = label_rtx (n->code_label);
2087 for (i = 0; i < count; i++)
2088 if (label == test[i].label)
2093 gcc_assert (count < MAX_CASE_BIT_TESTS);
2096 test[i].label = label;
2103 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2104 n->low, minval), 1);
2105 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2106 n->high, minval), 1);
2107 for (j = lo; j <= hi; j++)
2108 if (j >= HOST_BITS_PER_WIDE_INT)
2109 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2111 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2114 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2116 index_expr = fold_build2 (MINUS_EXPR, index_type,
2117 fold_convert (index_type, index_expr),
2118 fold_convert (index_type, minval));
2119 index = expand_normal (index_expr);
2120 do_pending_stack_adjust ();
2122 mode = TYPE_MODE (index_type);
2123 expr = expand_normal (range);
2125 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2128 index = convert_to_mode (word_mode, index, 0);
2129 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2130 index, NULL_RTX, 1, OPTAB_WIDEN);
2132 for (i = 0; i < count; i++)
2134 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2135 expr = expand_binop (word_mode, and_optab, index, expr,
2136 NULL_RTX, 1, OPTAB_WIDEN);
2137 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2138 word_mode, 1, test[i].label);
2142 emit_jump (default_label);
2146 #define HAVE_casesi 0
2149 #ifndef HAVE_tablejump
2150 #define HAVE_tablejump 0
2153 /* Terminate a case (Pascal/Ada) or switch (C) statement
2154 in which ORIG_INDEX is the expression to be tested.
2155 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2156 type as given in the source before any compiler conversions.
2157 Generate the code to test it and jump to the right place. */
2160 expand_case (tree exp)
2162 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2163 rtx default_label = 0;
2164 struct case_node *n;
2165 unsigned int count, uniq;
2171 rtx before_case, end, lab;
2173 tree vec = SWITCH_LABELS (exp);
2174 tree orig_type = TREE_TYPE (exp);
2175 tree index_expr = SWITCH_COND (exp);
2176 tree index_type = TREE_TYPE (index_expr);
2177 int unsignedp = TYPE_UNSIGNED (index_type);
2179 /* The insn after which the case dispatch should finally
2180 be emitted. Zero for a dummy. */
2183 /* A list of case labels; it is first built as a list and it may then
2184 be rearranged into a nearly balanced binary tree. */
2185 struct case_node *case_list = 0;
2187 /* Label to jump to if no case matches. */
2188 tree default_label_decl = NULL_TREE;
2190 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2191 sizeof (struct case_node),
2194 /* The switch body is lowered in gimplify.c, we should never have
2195 switches with a non-NULL SWITCH_BODY here. */
2196 gcc_assert (!SWITCH_BODY (exp));
2197 gcc_assert (SWITCH_LABELS (exp));
2199 do_pending_stack_adjust ();
2201 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2202 if (index_type != error_mark_node)
2205 bitmap label_bitmap;
2206 int vl = TREE_VEC_LENGTH (vec);
2208 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2209 expressions being INTEGER_CST. */
2210 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2212 /* The default case, if ever taken, is at the end of TREE_VEC. */
2213 elt = TREE_VEC_ELT (vec, vl - 1);
2214 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2216 default_label_decl = CASE_LABEL (elt);
2220 for (i = vl - 1; i >= 0; --i)
2223 elt = TREE_VEC_ELT (vec, i);
2225 low = CASE_LOW (elt);
2227 high = CASE_HIGH (elt);
2229 /* Discard empty ranges. */
2230 if (high && tree_int_cst_lt (high, low))
2233 case_list = add_case_node (case_list, index_type, low, high,
2234 CASE_LABEL (elt), case_node_pool);
2238 before_case = start = get_last_insn ();
2239 if (default_label_decl)
2240 default_label = label_rtx (default_label_decl);
2242 /* Get upper and lower bounds of case values. */
2246 label_bitmap = BITMAP_ALLOC (NULL);
2247 for (n = case_list; n; n = n->right)
2249 /* Count the elements and track the largest and smallest
2250 of them (treating them as signed even if they are not). */
2258 if (tree_int_cst_lt (n->low, minval))
2260 if (tree_int_cst_lt (maxval, n->high))
2263 /* A range counts double, since it requires two compares. */
2264 if (! tree_int_cst_equal (n->low, n->high))
2267 /* If we have not seen this label yet, then increase the
2268 number of unique case node targets seen. */
2269 lab = label_rtx (n->code_label);
2270 if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2272 bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2277 BITMAP_FREE (label_bitmap);
2279 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2280 destination, such as one with a default case only. However,
2281 it doesn't remove cases that are out of range for the switch
2282 type, so we may still get a zero here. */
2286 emit_jump (default_label);
2287 free_alloc_pool (case_node_pool);
2291 /* Compute span of values. */
2292 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2294 /* Try implementing this switch statement by a short sequence of
2295 bit-wise comparisons. However, we let the binary-tree case
2296 below handle constant index expressions. */
2297 if (CASE_USE_BIT_TESTS
2298 && ! TREE_CONSTANT (index_expr)
2299 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2300 && compare_tree_int (range, 0) > 0
2301 && lshift_cheap_p ()
2302 && ((uniq == 1 && count >= 3)
2303 || (uniq == 2 && count >= 5)
2304 || (uniq == 3 && count >= 6)))
2306 /* Optimize the case where all the case values fit in a
2307 word without having to subtract MINVAL. In this case,
2308 we can optimize away the subtraction. */
2309 if (compare_tree_int (minval, 0) > 0
2310 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2312 minval = build_int_cst (index_type, 0);
2315 emit_case_bit_tests (index_type, index_expr, minval, range,
2316 case_list, default_label);
2319 /* If range of values is much bigger than number of values,
2320 make a sequence of conditional branches instead of a dispatch.
2321 If the switch-index is a constant, do it this way
2322 because we can optimize it. */
2324 else if (count < case_values_threshold ()
2325 || compare_tree_int (range,
2326 (optimize_insn_for_size_p () ? 3 : 10) * count) > 0
2327 /* RANGE may be signed, and really large ranges will show up
2328 as negative numbers. */
2329 || compare_tree_int (range, 0) < 0
2330 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2333 || !flag_jump_tables
2334 || TREE_CONSTANT (index_expr)
2335 /* If neither casesi or tablejump is available, we can
2336 only go this way. */
2337 || (!HAVE_casesi && !HAVE_tablejump))
2339 index = expand_normal (index_expr);
2341 /* If the index is a short or char that we do not have
2342 an insn to handle comparisons directly, convert it to
2343 a full integer now, rather than letting each comparison
2344 generate the conversion. */
2346 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2347 && ! have_insn_for (COMPARE, GET_MODE (index)))
2349 enum machine_mode wider_mode;
2350 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2351 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2352 if (have_insn_for (COMPARE, wider_mode))
2354 index = convert_to_mode (wider_mode, index, unsignedp);
2359 do_pending_stack_adjust ();
2362 index = copy_to_reg (index);
2364 /* We generate a binary decision tree to select the
2365 appropriate target code. This is done as follows:
2367 The list of cases is rearranged into a binary tree,
2368 nearly optimal assuming equal probability for each case.
2370 The tree is transformed into RTL, eliminating
2371 redundant test conditions at the same time.
2373 If program flow could reach the end of the
2374 decision tree an unconditional jump to the
2375 default code is emitted. */
2378 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
2379 && estimate_case_costs (case_list));
2380 balance_case_nodes (&case_list, NULL);
2381 emit_case_nodes (index, case_list, default_label, index_type);
2383 emit_jump (default_label);
2387 rtx fallback_label = label_rtx (case_list->code_label);
2388 table_label = gen_label_rtx ();
2389 if (! try_casesi (index_type, index_expr, minval, range,
2390 table_label, default_label, fallback_label))
2394 /* Index jumptables from zero for suitable values of
2395 minval to avoid a subtraction. */
2396 if (optimize_insn_for_speed_p ()
2397 && compare_tree_int (minval, 0) > 0
2398 && compare_tree_int (minval, 3) < 0)
2400 minval = build_int_cst (index_type, 0);
2404 ok = try_tablejump (index_type, index_expr, minval, range,
2405 table_label, default_label);
2409 /* Get table of labels to jump to, in order of case index. */
2411 ncases = tree_low_cst (range, 0) + 1;
2412 labelvec = XALLOCAVEC (rtx, ncases);
2413 memset (labelvec, 0, ncases * sizeof (rtx));
2415 for (n = case_list; n; n = n->right)
2417 /* Compute the low and high bounds relative to the minimum
2418 value since that should fit in a HOST_WIDE_INT while the
2419 actual values may not. */
2421 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2422 n->low, minval), 1);
2423 HOST_WIDE_INT i_high
2424 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2425 n->high, minval), 1);
2428 for (i = i_low; i <= i_high; i ++)
2430 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2433 /* Fill in the gaps with the default. We may have gaps at
2434 the beginning if we tried to avoid the minval subtraction,
2435 so substitute some label even if the default label was
2436 deemed unreachable. */
2438 default_label = fallback_label;
2439 for (i = 0; i < ncases; i++)
2440 if (labelvec[i] == 0)
2441 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2443 /* Output the table. */
2444 emit_label (table_label);
2446 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2447 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2448 gen_rtx_LABEL_REF (Pmode, table_label),
2449 gen_rtvec_v (ncases, labelvec),
2450 const0_rtx, const0_rtx));
2452 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2453 gen_rtvec_v (ncases, labelvec)));
2455 /* Record no drop-through after the table. */
2459 before_case = NEXT_INSN (before_case);
2460 end = get_last_insn ();
2461 reorder_insns (before_case, end, start);
2465 free_alloc_pool (case_node_pool);
2468 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2471 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2474 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2475 NULL_RTX, NULL_RTX, label, -1);
2478 /* Not all case values are encountered equally. This function
2479 uses a heuristic to weight case labels, in cases where that
2480 looks like a reasonable thing to do.
2482 Right now, all we try to guess is text, and we establish the
2485 chars above space: 16
2494 If we find any cases in the switch that are not either -1 or in the range
2495 of valid ASCII characters, or are control characters other than those
2496 commonly used with "\", don't treat this switch scanning text.
2498 Return 1 if these nodes are suitable for cost estimation, otherwise
2502 estimate_case_costs (case_node_ptr node)
2504 tree min_ascii = integer_minus_one_node;
2505 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2509 /* If we haven't already made the cost table, make it now. Note that the
2510 lower bound of the table is -1, not zero. */
2512 if (! cost_table_initialized)
2514 cost_table_initialized = 1;
2516 for (i = 0; i < 128; i++)
2519 COST_TABLE (i) = 16;
2520 else if (ISPUNCT (i))
2522 else if (ISCNTRL (i))
2523 COST_TABLE (i) = -1;
2526 COST_TABLE (' ') = 8;
2527 COST_TABLE ('\t') = 4;
2528 COST_TABLE ('\0') = 4;
2529 COST_TABLE ('\n') = 2;
2530 COST_TABLE ('\f') = 1;
2531 COST_TABLE ('\v') = 1;
2532 COST_TABLE ('\b') = 1;
2535 /* See if all the case expressions look like text. It is text if the
2536 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2537 as signed arithmetic since we don't want to ever access cost_table with a
2538 value less than -1. Also check that none of the constants in a range
2539 are strange control characters. */
2541 for (n = node; n; n = n->right)
2543 if (tree_int_cst_lt (n->low, min_ascii)
2544 || tree_int_cst_lt (max_ascii, n->high))
2547 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2548 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2549 if (COST_TABLE (i) < 0)
2553 /* All interesting values are within the range of interesting
2554 ASCII characters. */
2558 /* Take an ordered list of case nodes
2559 and transform them into a near optimal binary tree,
2560 on the assumption that any target code selection value is as
2561 likely as any other.
2563 The transformation is performed by splitting the ordered
2564 list into two equal sections plus a pivot. The parts are
2565 then attached to the pivot as left and right branches. Each
2566 branch is then transformed recursively. */
2569 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2582 /* Count the number of entries on branch. Also count the ranges. */
2586 if (!tree_int_cst_equal (np->low, np->high))
2590 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2594 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2602 /* Split this list if it is long enough for that to help. */
2607 /* Find the place in the list that bisects the list's total cost,
2608 Here I gets half the total cost. */
2613 /* Skip nodes while their cost does not reach that amount. */
2614 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2615 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2616 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2619 npp = &(*npp)->right;
2624 /* Leave this branch lopsided, but optimize left-hand
2625 side and fill in `parent' fields for right-hand side. */
2627 np->parent = parent;
2628 balance_case_nodes (&np->left, np);
2629 for (; np->right; np = np->right)
2630 np->right->parent = np;
2634 /* If there are just three nodes, split at the middle one. */
2636 npp = &(*npp)->right;
2639 /* Find the place in the list that bisects the list's total cost,
2640 where ranges count as 2.
2641 Here I gets half the total cost. */
2642 i = (i + ranges + 1) / 2;
2645 /* Skip nodes while their cost does not reach that amount. */
2646 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2651 npp = &(*npp)->right;
2656 np->parent = parent;
2659 /* Optimize each of the two split parts. */
2660 balance_case_nodes (&np->left, np);
2661 balance_case_nodes (&np->right, np);
2665 /* Else leave this branch as one level,
2666 but fill in `parent' fields. */
2668 np->parent = parent;
2669 for (; np->right; np = np->right)
2670 np->right->parent = np;
2675 /* Search the parent sections of the case node tree
2676 to see if a test for the lower bound of NODE would be redundant.
2677 INDEX_TYPE is the type of the index expression.
2679 The instructions to generate the case decision tree are
2680 output in the same order as nodes are processed so it is
2681 known that if a parent node checks the range of the current
2682 node minus one that the current node is bounded at its lower
2683 span. Thus the test would be redundant. */
2686 node_has_low_bound (case_node_ptr node, tree index_type)
2689 case_node_ptr pnode;
2691 /* If the lower bound of this node is the lowest value in the index type,
2692 we need not test it. */
2694 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2697 /* If this node has a left branch, the value at the left must be less
2698 than that at this node, so it cannot be bounded at the bottom and
2699 we need not bother testing any further. */
2704 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2706 build_int_cst (TREE_TYPE (node->low), 1));
2708 /* If the subtraction above overflowed, we can't verify anything.
2709 Otherwise, look for a parent that tests our value - 1. */
2711 if (! tree_int_cst_lt (low_minus_one, node->low))
2714 for (pnode = node->parent; pnode; pnode = pnode->parent)
2715 if (tree_int_cst_equal (low_minus_one, pnode->high))
2721 /* Search the parent sections of the case node tree
2722 to see if a test for the upper bound of NODE would be redundant.
2723 INDEX_TYPE is the type of the index expression.
2725 The instructions to generate the case decision tree are
2726 output in the same order as nodes are processed so it is
2727 known that if a parent node checks the range of the current
2728 node plus one that the current node is bounded at its upper
2729 span. Thus the test would be redundant. */
2732 node_has_high_bound (case_node_ptr node, tree index_type)
2735 case_node_ptr pnode;
2737 /* If there is no upper bound, obviously no test is needed. */
2739 if (TYPE_MAX_VALUE (index_type) == NULL)
2742 /* If the upper bound of this node is the highest value in the type
2743 of the index expression, we need not test against it. */
2745 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2748 /* If this node has a right branch, the value at the right must be greater
2749 than that at this node, so it cannot be bounded at the top and
2750 we need not bother testing any further. */
2755 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2757 build_int_cst (TREE_TYPE (node->high), 1));
2759 /* If the addition above overflowed, we can't verify anything.
2760 Otherwise, look for a parent that tests our value + 1. */
2762 if (! tree_int_cst_lt (node->high, high_plus_one))
2765 for (pnode = node->parent; pnode; pnode = pnode->parent)
2766 if (tree_int_cst_equal (high_plus_one, pnode->low))
2772 /* Search the parent sections of the
2773 case node tree to see if both tests for the upper and lower
2774 bounds of NODE would be redundant. */
2777 node_is_bounded (case_node_ptr node, tree index_type)
2779 return (node_has_low_bound (node, index_type)
2780 && node_has_high_bound (node, index_type));
2783 /* Emit step-by-step code to select a case for the value of INDEX.
2784 The thus generated decision tree follows the form of the
2785 case-node binary tree NODE, whose nodes represent test conditions.
2786 INDEX_TYPE is the type of the index of the switch.
2788 Care is taken to prune redundant tests from the decision tree
2789 by detecting any boundary conditions already checked by
2790 emitted rtx. (See node_has_high_bound, node_has_low_bound
2791 and node_is_bounded, above.)
2793 Where the test conditions can be shown to be redundant we emit
2794 an unconditional jump to the target code. As a further
2795 optimization, the subordinates of a tree node are examined to
2796 check for bounded nodes. In this case conditional and/or
2797 unconditional jumps as a result of the boundary check for the
2798 current node are arranged to target the subordinates associated
2799 code for out of bound conditions on the current node.
2801 We can assume that when control reaches the code generated here,
2802 the index value has already been compared with the parents
2803 of this node, and determined to be on the same side of each parent
2804 as this node is. Thus, if this node tests for the value 51,
2805 and a parent tested for 52, we don't need to consider
2806 the possibility of a value greater than 51. If another parent
2807 tests for the value 50, then this node need not test anything. */
2810 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2813 /* If INDEX has an unsigned type, we must make unsigned branches. */
2814 int unsignedp = TYPE_UNSIGNED (index_type);
2815 enum machine_mode mode = GET_MODE (index);
2816 enum machine_mode imode = TYPE_MODE (index_type);
2818 /* Handle indices detected as constant during RTL expansion. */
2819 if (mode == VOIDmode)
2822 /* See if our parents have already tested everything for us.
2823 If they have, emit an unconditional jump for this node. */
2824 if (node_is_bounded (node, index_type))
2825 emit_jump (label_rtx (node->code_label));
2827 else if (tree_int_cst_equal (node->low, node->high))
2829 /* Node is single valued. First see if the index expression matches
2830 this node and then check our children, if any. */
2832 do_jump_if_equal (mode, index,
2833 convert_modes (mode, imode,
2834 expand_normal (node->low),
2836 label_rtx (node->code_label), unsignedp);
2838 if (node->right != 0 && node->left != 0)
2840 /* This node has children on both sides.
2841 Dispatch to one side or the other
2842 by comparing the index value with this node's value.
2843 If one subtree is bounded, check that one first,
2844 so we can avoid real branches in the tree. */
2846 if (node_is_bounded (node->right, index_type))
2848 emit_cmp_and_jump_insns (index,
2851 expand_normal (node->high),
2853 GT, NULL_RTX, mode, unsignedp,
2854 label_rtx (node->right->code_label));
2855 emit_case_nodes (index, node->left, default_label, index_type);
2858 else if (node_is_bounded (node->left, index_type))
2860 emit_cmp_and_jump_insns (index,
2863 expand_normal (node->high),
2865 LT, NULL_RTX, mode, unsignedp,
2866 label_rtx (node->left->code_label));
2867 emit_case_nodes (index, node->right, default_label, index_type);
2870 /* If both children are single-valued cases with no
2871 children, finish up all the work. This way, we can save
2872 one ordered comparison. */
2873 else if (tree_int_cst_equal (node->right->low, node->right->high)
2874 && node->right->left == 0
2875 && node->right->right == 0
2876 && tree_int_cst_equal (node->left->low, node->left->high)
2877 && node->left->left == 0
2878 && node->left->right == 0)
2880 /* Neither node is bounded. First distinguish the two sides;
2881 then emit the code for one side at a time. */
2883 /* See if the value matches what the right hand side
2885 do_jump_if_equal (mode, index,
2886 convert_modes (mode, imode,
2887 expand_normal (node->right->low),
2889 label_rtx (node->right->code_label),
2892 /* See if the value matches what the left hand side
2894 do_jump_if_equal (mode, index,
2895 convert_modes (mode, imode,
2896 expand_normal (node->left->low),
2898 label_rtx (node->left->code_label),
2904 /* Neither node is bounded. First distinguish the two sides;
2905 then emit the code for one side at a time. */
2907 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
2909 /* See if the value is on the right. */
2910 emit_cmp_and_jump_insns (index,
2913 expand_normal (node->high),
2915 GT, NULL_RTX, mode, unsignedp,
2916 label_rtx (test_label));
2918 /* Value must be on the left.
2919 Handle the left-hand subtree. */
2920 emit_case_nodes (index, node->left, default_label, index_type);
2921 /* If left-hand subtree does nothing,
2924 emit_jump (default_label);
2926 /* Code branches here for the right-hand subtree. */
2927 expand_label (test_label);
2928 emit_case_nodes (index, node->right, default_label, index_type);
2932 else if (node->right != 0 && node->left == 0)
2934 /* Here we have a right child but no left so we issue a conditional
2935 branch to default and process the right child.
2937 Omit the conditional branch to default if the right child
2938 does not have any children and is single valued; it would
2939 cost too much space to save so little time. */
2941 if (node->right->right || node->right->left
2942 || !tree_int_cst_equal (node->right->low, node->right->high))
2944 if (!node_has_low_bound (node, index_type))
2946 emit_cmp_and_jump_insns (index,
2949 expand_normal (node->high),
2951 LT, NULL_RTX, mode, unsignedp,
2955 emit_case_nodes (index, node->right, default_label, index_type);
2958 /* We cannot process node->right normally
2959 since we haven't ruled out the numbers less than
2960 this node's value. So handle node->right explicitly. */
2961 do_jump_if_equal (mode, index,
2964 expand_normal (node->right->low),
2966 label_rtx (node->right->code_label), unsignedp);
2969 else if (node->right == 0 && node->left != 0)
2971 /* Just one subtree, on the left. */
2972 if (node->left->left || node->left->right
2973 || !tree_int_cst_equal (node->left->low, node->left->high))
2975 if (!node_has_high_bound (node, index_type))
2977 emit_cmp_and_jump_insns (index,
2980 expand_normal (node->high),
2982 GT, NULL_RTX, mode, unsignedp,
2986 emit_case_nodes (index, node->left, default_label, index_type);
2989 /* We cannot process node->left normally
2990 since we haven't ruled out the numbers less than
2991 this node's value. So handle node->left explicitly. */
2992 do_jump_if_equal (mode, index,
2995 expand_normal (node->left->low),
2997 label_rtx (node->left->code_label), unsignedp);
3002 /* Node is a range. These cases are very similar to those for a single
3003 value, except that we do not start by testing whether this node
3004 is the one to branch to. */
3006 if (node->right != 0 && node->left != 0)
3008 /* Node has subtrees on both sides.
3009 If the right-hand subtree is bounded,
3010 test for it first, since we can go straight there.
3011 Otherwise, we need to make a branch in the control structure,
3012 then handle the two subtrees. */
3013 tree test_label = 0;
3015 if (node_is_bounded (node->right, index_type))
3016 /* Right hand node is fully bounded so we can eliminate any
3017 testing and branch directly to the target code. */
3018 emit_cmp_and_jump_insns (index,
3021 expand_normal (node->high),
3023 GT, NULL_RTX, mode, unsignedp,
3024 label_rtx (node->right->code_label));
3027 /* Right hand node requires testing.
3028 Branch to a label where we will handle it later. */
3030 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3031 emit_cmp_and_jump_insns (index,
3034 expand_normal (node->high),
3036 GT, NULL_RTX, mode, unsignedp,
3037 label_rtx (test_label));
3040 /* Value belongs to this node or to the left-hand subtree. */
3042 emit_cmp_and_jump_insns (index,
3045 expand_normal (node->low),
3047 GE, NULL_RTX, mode, unsignedp,
3048 label_rtx (node->code_label));
3050 /* Handle the left-hand subtree. */
3051 emit_case_nodes (index, node->left, default_label, index_type);
3053 /* If right node had to be handled later, do that now. */
3057 /* If the left-hand subtree fell through,
3058 don't let it fall into the right-hand subtree. */
3060 emit_jump (default_label);
3062 expand_label (test_label);
3063 emit_case_nodes (index, node->right, default_label, index_type);
3067 else if (node->right != 0 && node->left == 0)
3069 /* Deal with values to the left of this node,
3070 if they are possible. */
3071 if (!node_has_low_bound (node, index_type))
3073 emit_cmp_and_jump_insns (index,
3076 expand_normal (node->low),
3078 LT, NULL_RTX, mode, unsignedp,
3082 /* Value belongs to this node or to the right-hand subtree. */
3084 emit_cmp_and_jump_insns (index,
3087 expand_normal (node->high),
3089 LE, NULL_RTX, mode, unsignedp,
3090 label_rtx (node->code_label));
3092 emit_case_nodes (index, node->right, default_label, index_type);
3095 else if (node->right == 0 && node->left != 0)
3097 /* Deal with values to the right of this node,
3098 if they are possible. */
3099 if (!node_has_high_bound (node, index_type))
3101 emit_cmp_and_jump_insns (index,
3104 expand_normal (node->high),
3106 GT, NULL_RTX, mode, unsignedp,
3110 /* Value belongs to this node or to the left-hand subtree. */
3112 emit_cmp_and_jump_insns (index,
3115 expand_normal (node->low),
3117 GE, NULL_RTX, mode, unsignedp,
3118 label_rtx (node->code_label));
3120 emit_case_nodes (index, node->left, default_label, index_type);
3125 /* Node has no children so we check low and high bounds to remove
3126 redundant tests. Only one of the bounds can exist,
3127 since otherwise this node is bounded--a case tested already. */
3128 int high_bound = node_has_high_bound (node, index_type);
3129 int low_bound = node_has_low_bound (node, index_type);
3131 if (!high_bound && low_bound)
3133 emit_cmp_and_jump_insns (index,
3136 expand_normal (node->high),
3138 GT, NULL_RTX, mode, unsignedp,
3142 else if (!low_bound && high_bound)
3144 emit_cmp_and_jump_insns (index,
3147 expand_normal (node->low),
3149 LT, NULL_RTX, mode, unsignedp,
3152 else if (!low_bound && !high_bound)
3154 /* Widen LOW and HIGH to the same width as INDEX. */
3155 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3156 tree low = build1 (CONVERT_EXPR, type, node->low);
3157 tree high = build1 (CONVERT_EXPR, type, node->high);
3158 rtx low_rtx, new_index, new_bound;
3160 /* Instead of doing two branches, emit one unsigned branch for
3161 (index-low) > (high-low). */
3162 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3163 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3164 NULL_RTX, unsignedp,
3166 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3168 NULL_RTX, mode, EXPAND_NORMAL);
3170 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3171 mode, 1, default_label);
3174 emit_jump (label_rtx (node->code_label));