1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
31 #include "hard-reg-set.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
44 #include "protector.h"
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
66 #define STACK_PUSH_CODE PRE_INC
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
88 /* Number of units that we should eventually pop off the stack.
89 These are the arguments to function calls that have already returned. */
90 int pending_stack_adjust;
92 /* Under some ABIs, it is the caller's responsibility to pop arguments
93 pushed for function calls. A naive implementation would simply pop
94 the arguments immediately after each call. However, if several
95 function calls are made in a row, it is typically cheaper to pop
96 all the arguments after all of the calls are complete since a
97 single pop instruction can be used. Therefore, GCC attempts to
98 defer popping the arguments until absolutely necessary. (For
99 example, at the end of a conditional, the arguments must be popped,
100 since code outside the conditional won't know whether or not the
101 arguments need to be popped.)
103 When INHIBIT_DEFER_POP is non-zero, however, the compiler does not
104 attempt to defer pops. Instead, the stack is popped immediately
105 after each call. Rather then setting this variable directly, use
106 NO_DEFER_POP and OK_DEFER_POP. */
107 int inhibit_defer_pop;
109 /* Nonzero means __builtin_saveregs has already been done in this function.
110 The value is the pseudoreg containing the value __builtin_saveregs
112 static rtx saveregs_value;
114 /* Similarly for __builtin_apply_args. */
115 static rtx apply_args_value;
117 /* Don't check memory usage, since code is being emitted to check a memory
118 usage. Used when current_function_check_memory_usage is true, to avoid
119 infinite recursion. */
120 static int in_check_memory_usage;
122 /* Postincrements that still need to be expanded. */
123 static rtx pending_chain;
125 /* This structure is used by move_by_pieces to describe the move to
127 struct move_by_pieces
137 int explicit_inc_from;
144 /* This structure is used by clear_by_pieces to describe the clear to
147 struct clear_by_pieces
159 extern struct obstack permanent_obstack;
160 extern rtx arg_pointer_save_area;
162 static rtx get_push_address PROTO ((int));
164 static rtx enqueue_insn PROTO((rtx, rtx));
165 static void init_queue PROTO((void));
166 static int move_by_pieces_ninsns PROTO((unsigned int, int));
167 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
168 struct move_by_pieces *));
169 static void clear_by_pieces PROTO((rtx, int, int));
170 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
171 struct clear_by_pieces *));
172 static int is_zeros_p PROTO((tree));
173 static int mostly_zeros_p PROTO((tree));
174 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
176 static void store_constructor PROTO((tree, rtx, int));
177 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
178 enum machine_mode, int, int,
180 static enum memory_use_mode
181 get_memory_usage_from_modifier PROTO((enum expand_modifier));
182 static tree save_noncopied_parts PROTO((tree, tree));
183 static tree init_noncopied_parts PROTO((tree, tree));
184 static int safe_from_p PROTO((rtx, tree, int));
185 static int fixed_type_p PROTO((tree));
186 static rtx var_rtx PROTO((tree));
187 static int get_pointer_alignment PROTO((tree, unsigned));
188 static tree string_constant PROTO((tree, tree *));
189 static tree c_strlen PROTO((tree));
190 static rtx get_memory_rtx PROTO((tree));
191 static rtx expand_builtin PROTO((tree, rtx, rtx,
192 enum machine_mode, int));
193 static int apply_args_size PROTO((void));
194 static int apply_result_size PROTO((void));
195 static rtx result_vector PROTO((int, rtx));
196 static rtx expand_builtin_setjmp PROTO((tree, rtx));
197 static rtx expand_builtin_apply_args PROTO((void));
198 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
199 static void expand_builtin_return PROTO((rtx));
200 static rtx expand_increment PROTO((tree, int, int));
201 static void preexpand_calls PROTO((tree));
202 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
203 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
204 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
205 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
206 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
208 /* Record for each mode whether we can move a register directly to or
209 from an object of that mode in memory. If we can't, we won't try
210 to use that mode directly when accessing a field of that mode. */
212 static char direct_load[NUM_MACHINE_MODES];
213 static char direct_store[NUM_MACHINE_MODES];
215 /* If a memory-to-memory move would take MOVE_RATIO or more simple
216 move-instruction sequences, we will do a movstr or libcall instead. */
219 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
222 /* If we are optimizing for space (-Os), cut down the default move ratio */
223 #define MOVE_RATIO (optimize_size ? 3 : 15)
227 /* This macro is used to determine whether move_by_pieces should be called
228 to perform a structure copy. */
229 #ifndef MOVE_BY_PIECES_P
230 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
231 (SIZE, ALIGN) < MOVE_RATIO)
234 /* This array records the insn_code of insns to perform block moves. */
235 enum insn_code movstr_optab[NUM_MACHINE_MODES];
237 /* This array records the insn_code of insns to perform block clears. */
238 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
240 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
242 #ifndef SLOW_UNALIGNED_ACCESS
243 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
246 /* Register mappings for target machines without register windows. */
247 #ifndef INCOMING_REGNO
248 #define INCOMING_REGNO(OUT) (OUT)
250 #ifndef OUTGOING_REGNO
251 #define OUTGOING_REGNO(IN) (IN)
254 /* This is run once per compilation to set up which modes can be used
255 directly in memory and to initialize the block move optab. */
261 enum machine_mode mode;
268 /* Since we are on the permanent obstack, we must be sure we save this
269 spot AFTER we call start_sequence, since it will reuse the rtl it
271 free_point = (char *) oballoc (0);
273 /* Try indexing by frame ptr and try by stack ptr.
274 It is known that on the Convex the stack ptr isn't a valid index.
275 With luck, one or the other is valid on any machine. */
276 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
277 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
279 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
280 pat = PATTERN (insn);
282 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
283 mode = (enum machine_mode) ((int) mode + 1))
288 direct_load[(int) mode] = direct_store[(int) mode] = 0;
289 PUT_MODE (mem, mode);
290 PUT_MODE (mem1, mode);
292 /* See if there is some register that can be used in this mode and
293 directly loaded or stored from memory. */
295 if (mode != VOIDmode && mode != BLKmode)
296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
297 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
300 if (! HARD_REGNO_MODE_OK (regno, mode))
303 reg = gen_rtx_REG (mode, regno);
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
310 SET_SRC (pat) = mem1;
311 SET_DEST (pat) = reg;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_load[(int) mode] = 1;
316 SET_DEST (pat) = mem;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
321 SET_DEST (pat) = mem1;
322 if (recog (pat, insn, &num_clobbers) >= 0)
323 direct_store[(int) mode] = 1;
331 /* This is run at the start of compiling a function. */
338 pending_stack_adjust = 0;
339 inhibit_defer_pop = 0;
341 apply_args_value = 0;
345 /* Save all variables describing the current status into the structure *P.
346 This is used before starting a nested function. */
352 p->pending_chain = pending_chain;
353 p->pending_stack_adjust = pending_stack_adjust;
354 p->inhibit_defer_pop = inhibit_defer_pop;
355 p->saveregs_value = saveregs_value;
356 p->apply_args_value = apply_args_value;
357 p->forced_labels = forced_labels;
359 pending_chain = NULL_RTX;
360 pending_stack_adjust = 0;
361 inhibit_defer_pop = 0;
363 apply_args_value = 0;
367 /* Restore all variables describing the current status from the structure *P.
368 This is used after a nested function. */
371 restore_expr_status (p)
374 pending_chain = p->pending_chain;
375 pending_stack_adjust = p->pending_stack_adjust;
376 inhibit_defer_pop = p->inhibit_defer_pop;
377 saveregs_value = p->saveregs_value;
378 apply_args_value = p->apply_args_value;
379 forced_labels = p->forced_labels;
382 /* Manage the queue of increment instructions to be output
383 for POSTINCREMENT_EXPR expressions, etc. */
385 /* Queue up to increment (or change) VAR later. BODY says how:
386 BODY should be the same thing you would pass to emit_insn
387 to increment right away. It will go to emit_insn later on.
389 The value is a QUEUED expression to be used in place of VAR
390 where you want to guarantee the pre-incrementation value of VAR. */
393 enqueue_insn (var, body)
396 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
397 var, NULL_RTX, NULL_RTX, body,
399 return pending_chain;
402 /* Use protect_from_queue to convert a QUEUED expression
403 into something that you can put immediately into an instruction.
404 If the queued incrementation has not happened yet,
405 protect_from_queue returns the variable itself.
406 If the incrementation has happened, protect_from_queue returns a temp
407 that contains a copy of the old value of the variable.
409 Any time an rtx which might possibly be a QUEUED is to be put
410 into an instruction, it must be passed through protect_from_queue first.
411 QUEUED expressions are not meaningful in instructions.
413 Do not pass a value through protect_from_queue and then hold
414 on to it for a while before putting it in an instruction!
415 If the queue is flushed in between, incorrect code will result. */
418 protect_from_queue (x, modify)
422 register RTX_CODE code = GET_CODE (x);
424 #if 0 /* A QUEUED can hang around after the queue is forced out. */
425 /* Shortcut for most common case. */
426 if (pending_chain == 0)
432 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
433 use of autoincrement. Make a copy of the contents of the memory
434 location rather than a copy of the address, but not if the value is
435 of mode BLKmode. Don't modify X in place since it might be
437 if (code == MEM && GET_MODE (x) != BLKmode
438 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
440 register rtx y = XEXP (x, 0);
441 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
443 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
444 MEM_COPY_ATTRIBUTES (new, x);
445 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
449 register rtx temp = gen_reg_rtx (GET_MODE (new));
450 emit_insn_before (gen_move_insn (temp, new),
454 /* Copy the address into a pseudo, so that the returned value
455 remains correct across calls to emit_queue. */
456 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
459 /* Otherwise, recursively protect the subexpressions of all
460 the kinds of rtx's that can contain a QUEUED. */
463 rtx tem = protect_from_queue (XEXP (x, 0), 0);
464 if (tem != XEXP (x, 0))
470 else if (code == PLUS || code == MULT)
472 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
473 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
474 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
483 /* If the increment has not happened, use the variable itself. Copy it
484 into a new pseudo so that the value remains correct across calls to
486 if (QUEUED_INSN (x) == 0)
487 return copy_to_reg (QUEUED_VAR (x));
488 /* If the increment has happened and a pre-increment copy exists,
490 if (QUEUED_COPY (x) != 0)
491 return QUEUED_COPY (x);
492 /* The increment has happened but we haven't set up a pre-increment copy.
493 Set one up now, and use it. */
494 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
495 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
497 return QUEUED_COPY (x);
500 /* Return nonzero if X contains a QUEUED expression:
501 if it contains anything that will be altered by a queued increment.
502 We handle only combinations of MEM, PLUS, MINUS and MULT operators
503 since memory addresses generally contain only those. */
509 register enum rtx_code code = GET_CODE (x);
515 return queued_subexp_p (XEXP (x, 0));
519 return (queued_subexp_p (XEXP (x, 0))
520 || queued_subexp_p (XEXP (x, 1)));
526 /* Perform all the pending incrementations. */
532 while ((p = pending_chain))
534 rtx body = QUEUED_BODY (p);
536 if (GET_CODE (body) == SEQUENCE)
538 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
539 emit_insn (QUEUED_BODY (p));
542 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
543 pending_chain = QUEUED_NEXT (p);
554 /* Copy data from FROM to TO, where the machine modes are not the same.
555 Both modes may be integer, or both may be floating.
556 UNSIGNEDP should be nonzero if FROM is an unsigned type.
557 This causes zero-extension instead of sign-extension. */
560 convert_move (to, from, unsignedp)
561 register rtx to, from;
564 enum machine_mode to_mode = GET_MODE (to);
565 enum machine_mode from_mode = GET_MODE (from);
566 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
567 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
571 /* rtx code for making an equivalent value. */
572 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
574 to = protect_from_queue (to, 1);
575 from = protect_from_queue (from, 0);
577 if (to_real != from_real)
580 /* If FROM is a SUBREG that indicates that we have already done at least
581 the required extension, strip it. We don't handle such SUBREGs as
584 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
585 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
586 >= GET_MODE_SIZE (to_mode))
587 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
588 from = gen_lowpart (to_mode, from), from_mode = to_mode;
590 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
593 if (to_mode == from_mode
594 || (from_mode == VOIDmode && CONSTANT_P (from)))
596 emit_move_insn (to, from);
604 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
606 /* Try converting directly if the insn is supported. */
607 if ((code = can_extend_p (to_mode, from_mode, 0))
610 emit_unop_insn (code, to, from, UNKNOWN);
615 #ifdef HAVE_trunchfqf2
616 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
622 #ifdef HAVE_trunctqfqf2
623 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
625 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
629 #ifdef HAVE_truncsfqf2
630 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
632 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
636 #ifdef HAVE_truncdfqf2
637 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
639 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
643 #ifdef HAVE_truncxfqf2
644 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
646 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
650 #ifdef HAVE_trunctfqf2
651 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
653 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 #ifdef HAVE_trunctqfhf2
659 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
661 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
665 #ifdef HAVE_truncsfhf2
666 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
668 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
672 #ifdef HAVE_truncdfhf2
673 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
675 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
679 #ifdef HAVE_truncxfhf2
680 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
682 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
686 #ifdef HAVE_trunctfhf2
687 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
689 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 #ifdef HAVE_truncsftqf2
695 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
697 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
701 #ifdef HAVE_truncdftqf2
702 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
704 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
708 #ifdef HAVE_truncxftqf2
709 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
711 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
715 #ifdef HAVE_trunctftqf2
716 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
718 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 #ifdef HAVE_truncdfsf2
724 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
726 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
730 #ifdef HAVE_truncxfsf2
731 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
733 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
737 #ifdef HAVE_trunctfsf2
738 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
740 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
744 #ifdef HAVE_truncxfdf2
745 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
747 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
751 #ifdef HAVE_trunctfdf2
752 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
754 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
766 libcall = extendsfdf2_libfunc;
770 libcall = extendsfxf2_libfunc;
774 libcall = extendsftf2_libfunc;
786 libcall = truncdfsf2_libfunc;
790 libcall = extenddfxf2_libfunc;
794 libcall = extenddftf2_libfunc;
806 libcall = truncxfsf2_libfunc;
810 libcall = truncxfdf2_libfunc;
822 libcall = trunctfsf2_libfunc;
826 libcall = trunctfdf2_libfunc;
838 if (libcall == (rtx) 0)
839 /* This conversion is not implemented yet. */
842 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
844 emit_move_insn (to, value);
848 /* Now both modes are integers. */
850 /* Handle expanding beyond a word. */
851 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
852 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
859 enum machine_mode lowpart_mode;
860 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
862 /* Try converting directly if the insn is supported. */
863 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
866 /* If FROM is a SUBREG, put it into a register. Do this
867 so that we always generate the same set of insns for
868 better cse'ing; if an intermediate assignment occurred,
869 we won't be doing the operation directly on the SUBREG. */
870 if (optimize > 0 && GET_CODE (from) == SUBREG)
871 from = force_reg (from_mode, from);
872 emit_unop_insn (code, to, from, equiv_code);
875 /* Next, try converting via full word. */
876 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
877 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
878 != CODE_FOR_nothing))
880 if (GET_CODE (to) == REG)
881 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
882 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
883 emit_unop_insn (code, to,
884 gen_lowpart (word_mode, to), equiv_code);
888 /* No special multiword conversion insn; do it by hand. */
891 /* Since we will turn this into a no conflict block, we must ensure
892 that the source does not overlap the target. */
894 if (reg_overlap_mentioned_p (to, from))
895 from = force_reg (from_mode, from);
897 /* Get a copy of FROM widened to a word, if necessary. */
898 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
899 lowpart_mode = word_mode;
901 lowpart_mode = from_mode;
903 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
905 lowpart = gen_lowpart (lowpart_mode, to);
906 emit_move_insn (lowpart, lowfrom);
908 /* Compute the value to put in each remaining word. */
910 fill_value = const0_rtx;
915 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
916 && STORE_FLAG_VALUE == -1)
918 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
920 fill_value = gen_reg_rtx (word_mode);
921 emit_insn (gen_slt (fill_value));
927 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
928 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
930 fill_value = convert_to_mode (word_mode, fill_value, 1);
934 /* Fill the remaining words. */
935 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
937 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
938 rtx subword = operand_subword (to, index, 1, to_mode);
943 if (fill_value != subword)
944 emit_move_insn (subword, fill_value);
947 insns = get_insns ();
950 emit_no_conflict_block (insns, to, from, NULL_RTX,
951 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
955 /* Truncating multi-word to a word or less. */
956 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
957 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
959 if (!((GET_CODE (from) == MEM
960 && ! MEM_VOLATILE_P (from)
961 && direct_load[(int) to_mode]
962 && ! mode_dependent_address_p (XEXP (from, 0)))
963 || GET_CODE (from) == REG
964 || GET_CODE (from) == SUBREG))
965 from = force_reg (from_mode, from);
966 convert_move (to, gen_lowpart (word_mode, from), 0);
970 /* Handle pointer conversion */ /* SPEE 900220 */
971 if (to_mode == PQImode)
973 if (from_mode != QImode)
974 from = convert_to_mode (QImode, from, unsignedp);
976 #ifdef HAVE_truncqipqi2
977 if (HAVE_truncqipqi2)
979 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
982 #endif /* HAVE_truncqipqi2 */
986 if (from_mode == PQImode)
988 if (to_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
995 #ifdef HAVE_extendpqiqi2
996 if (HAVE_extendpqiqi2)
998 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1001 #endif /* HAVE_extendpqiqi2 */
1006 if (to_mode == PSImode)
1008 if (from_mode != SImode)
1009 from = convert_to_mode (SImode, from, unsignedp);
1011 #ifdef HAVE_truncsipsi2
1012 if (HAVE_truncsipsi2)
1014 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1017 #endif /* HAVE_truncsipsi2 */
1021 if (from_mode == PSImode)
1023 if (to_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1030 #ifdef HAVE_extendpsisi2
1031 if (HAVE_extendpsisi2)
1033 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1036 #endif /* HAVE_extendpsisi2 */
1041 if (to_mode == PDImode)
1043 if (from_mode != DImode)
1044 from = convert_to_mode (DImode, from, unsignedp);
1046 #ifdef HAVE_truncdipdi2
1047 if (HAVE_truncdipdi2)
1049 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1052 #endif /* HAVE_truncdipdi2 */
1056 if (from_mode == PDImode)
1058 if (to_mode != DImode)
1060 from = convert_to_mode (DImode, from, unsignedp);
1065 #ifdef HAVE_extendpdidi2
1066 if (HAVE_extendpdidi2)
1068 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1071 #endif /* HAVE_extendpdidi2 */
1076 /* Now follow all the conversions between integers
1077 no more than a word long. */
1079 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1080 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1081 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1082 GET_MODE_BITSIZE (from_mode)))
1084 if (!((GET_CODE (from) == MEM
1085 && ! MEM_VOLATILE_P (from)
1086 && direct_load[(int) to_mode]
1087 && ! mode_dependent_address_p (XEXP (from, 0)))
1088 || GET_CODE (from) == REG
1089 || GET_CODE (from) == SUBREG))
1090 from = force_reg (from_mode, from);
1091 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1092 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1093 from = copy_to_reg (from);
1094 emit_move_insn (to, gen_lowpart (to_mode, from));
1098 /* Handle extension. */
1099 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1101 /* Convert directly if that works. */
1102 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1103 != CODE_FOR_nothing)
1105 emit_unop_insn (code, to, from, equiv_code);
1110 enum machine_mode intermediate;
1114 /* Search for a mode to convert via. */
1115 for (intermediate = from_mode; intermediate != VOIDmode;
1116 intermediate = GET_MODE_WIDER_MODE (intermediate))
1117 if (((can_extend_p (to_mode, intermediate, unsignedp)
1118 != CODE_FOR_nothing)
1119 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1120 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1121 && (can_extend_p (intermediate, from_mode, unsignedp)
1122 != CODE_FOR_nothing))
1124 convert_move (to, convert_to_mode (intermediate, from,
1125 unsignedp), unsignedp);
1129 /* No suitable intermediate mode.
1130 Generate what we need with shifts. */
1131 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1132 - GET_MODE_BITSIZE (from_mode), 0);
1133 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1134 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1136 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1139 emit_move_insn (to, tmp);
1144 /* Support special truncate insns for certain modes. */
1146 if (from_mode == DImode && to_mode == SImode)
1148 #ifdef HAVE_truncdisi2
1149 if (HAVE_truncdisi2)
1151 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1155 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 if (from_mode == DImode && to_mode == HImode)
1161 #ifdef HAVE_truncdihi2
1162 if (HAVE_truncdihi2)
1164 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1168 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 if (from_mode == DImode && to_mode == QImode)
1174 #ifdef HAVE_truncdiqi2
1175 if (HAVE_truncdiqi2)
1177 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1181 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 if (from_mode == SImode && to_mode == HImode)
1187 #ifdef HAVE_truncsihi2
1188 if (HAVE_truncsihi2)
1190 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1194 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 if (from_mode == SImode && to_mode == QImode)
1200 #ifdef HAVE_truncsiqi2
1201 if (HAVE_truncsiqi2)
1203 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1207 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 if (from_mode == HImode && to_mode == QImode)
1213 #ifdef HAVE_trunchiqi2
1214 if (HAVE_trunchiqi2)
1216 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1220 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 if (from_mode == TImode && to_mode == DImode)
1226 #ifdef HAVE_trunctidi2
1227 if (HAVE_trunctidi2)
1229 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1233 convert_move (to, force_reg (from_mode, from), unsignedp);
1237 if (from_mode == TImode && to_mode == SImode)
1239 #ifdef HAVE_trunctisi2
1240 if (HAVE_trunctisi2)
1242 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1246 convert_move (to, force_reg (from_mode, from), unsignedp);
1250 if (from_mode == TImode && to_mode == HImode)
1252 #ifdef HAVE_trunctihi2
1253 if (HAVE_trunctihi2)
1255 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1259 convert_move (to, force_reg (from_mode, from), unsignedp);
1263 if (from_mode == TImode && to_mode == QImode)
1265 #ifdef HAVE_trunctiqi2
1266 if (HAVE_trunctiqi2)
1268 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1272 convert_move (to, force_reg (from_mode, from), unsignedp);
1276 /* Handle truncation of volatile memrefs, and so on;
1277 the things that couldn't be truncated directly,
1278 and for which there was no special instruction. */
1279 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1281 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1282 emit_move_insn (to, temp);
1286 /* Mode combination is not recognized. */
1290 /* Return an rtx for a value that would result
1291 from converting X to mode MODE.
1292 Both X and MODE may be floating, or both integer.
1293 UNSIGNEDP is nonzero if X is an unsigned value.
1294 This can be done by referring to a part of X in place
1295 or by copying to a new temporary with conversion.
1297 This function *must not* call protect_from_queue
1298 except when putting X into an insn (in which case convert_move does it). */
1301 convert_to_mode (mode, x, unsignedp)
1302 enum machine_mode mode;
1306 return convert_modes (mode, VOIDmode, x, unsignedp);
1309 /* Return an rtx for a value that would result
1310 from converting X from mode OLDMODE to mode MODE.
1311 Both modes may be floating, or both integer.
1312 UNSIGNEDP is nonzero if X is an unsigned value.
1314 This can be done by referring to a part of X in place
1315 or by copying to a new temporary with conversion.
1317 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1319 This function *must not* call protect_from_queue
1320 except when putting X into an insn (in which case convert_move does it). */
1323 convert_modes (mode, oldmode, x, unsignedp)
1324 enum machine_mode mode, oldmode;
1330 /* If FROM is a SUBREG that indicates that we have already done at least
1331 the required extension, strip it. */
1333 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1334 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1335 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1336 x = gen_lowpart (mode, x);
1338 if (GET_MODE (x) != VOIDmode)
1339 oldmode = GET_MODE (x);
1341 if (mode == oldmode)
1344 /* There is one case that we must handle specially: If we are converting
1345 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1346 we are to interpret the constant as unsigned, gen_lowpart will do
1347 the wrong if the constant appears negative. What we want to do is
1348 make the high-order word of the constant zero, not all ones. */
1350 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1351 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1352 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1354 HOST_WIDE_INT val = INTVAL (x);
1356 if (oldmode != VOIDmode
1357 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1359 int width = GET_MODE_BITSIZE (oldmode);
1361 /* We need to zero extend VAL. */
1362 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1365 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1368 /* We can do this with a gen_lowpart if both desired and current modes
1369 are integer, and this is either a constant integer, a register, or a
1370 non-volatile MEM. Except for the constant case where MODE is no
1371 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1373 if ((GET_CODE (x) == CONST_INT
1374 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1375 || (GET_MODE_CLASS (mode) == MODE_INT
1376 && GET_MODE_CLASS (oldmode) == MODE_INT
1377 && (GET_CODE (x) == CONST_DOUBLE
1378 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1379 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1380 && direct_load[(int) mode])
1381 || (GET_CODE (x) == REG
1382 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1383 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1385 /* ?? If we don't know OLDMODE, we have to assume here that
1386 X does not need sign- or zero-extension. This may not be
1387 the case, but it's the best we can do. */
1388 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1389 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1391 HOST_WIDE_INT val = INTVAL (x);
1392 int width = GET_MODE_BITSIZE (oldmode);
1394 /* We must sign or zero-extend in this case. Start by
1395 zero-extending, then sign extend if we need to. */
1396 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1398 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1399 val |= (HOST_WIDE_INT) (-1) << width;
1401 return GEN_INT (val);
1404 return gen_lowpart (mode, x);
1407 temp = gen_reg_rtx (mode);
1408 convert_move (temp, x, unsignedp);
1413 /* This macro is used to determine what the largest unit size that
1414 move_by_pieces can use is. */
1416 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1417 move efficiently, as opposed to MOVE_MAX which is the maximum
1418 number of bhytes we can move with a single instruction. */
1420 #ifndef MOVE_MAX_PIECES
1421 #define MOVE_MAX_PIECES MOVE_MAX
1424 /* Generate several move instructions to copy LEN bytes
1425 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1426 The caller must pass FROM and TO
1427 through protect_from_queue before calling.
1428 ALIGN (in bytes) is maximum alignment we can assume. */
1431 move_by_pieces (to, from, len, align)
1435 struct move_by_pieces data;
1436 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1437 int max_size = MOVE_MAX_PIECES + 1;
1438 enum machine_mode mode = VOIDmode, tmode;
1439 enum insn_code icode;
1442 data.to_addr = to_addr;
1443 data.from_addr = from_addr;
1447 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1448 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1450 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1451 || GET_CODE (from_addr) == POST_INC
1452 || GET_CODE (from_addr) == POST_DEC);
1454 data.explicit_inc_from = 0;
1455 data.explicit_inc_to = 0;
1457 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1458 if (data.reverse) data.offset = len;
1461 data.to_struct = MEM_IN_STRUCT_P (to);
1462 data.from_struct = MEM_IN_STRUCT_P (from);
1464 /* If copying requires more than two move insns,
1465 copy addresses to registers (to make displacements shorter)
1466 and use post-increment if available. */
1467 if (!(data.autinc_from && data.autinc_to)
1468 && move_by_pieces_ninsns (len, align) > 2)
1470 /* Find the mode of the largest move... */
1471 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1472 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1473 if (GET_MODE_SIZE (tmode) < max_size)
1476 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1478 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len-GET_MODE_SIZE (mode)));
1479 data.autinc_from = 1;
1480 data.explicit_inc_from = -1;
1482 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1484 data.from_addr = copy_addr_to_reg (from_addr);
1485 data.autinc_from = 1;
1486 data.explicit_inc_from = 1;
1488 if (!data.autinc_from && CONSTANT_P (from_addr))
1489 data.from_addr = copy_addr_to_reg (from_addr);
1490 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1492 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len-GET_MODE_SIZE (mode)));
1494 data.explicit_inc_to = -1;
1496 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1498 data.to_addr = copy_addr_to_reg (to_addr);
1500 data.explicit_inc_to = 1;
1502 if (!data.autinc_to && CONSTANT_P (to_addr))
1503 data.to_addr = copy_addr_to_reg (to_addr);
1506 if (! SLOW_UNALIGNED_ACCESS
1507 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1510 /* First move what we can in the largest integer mode, then go to
1511 successively smaller modes. */
1513 while (max_size > 1)
1515 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1516 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1517 if (GET_MODE_SIZE (tmode) < max_size)
1520 if (mode == VOIDmode)
1523 icode = mov_optab->handlers[(int) mode].insn_code;
1524 if (icode != CODE_FOR_nothing
1525 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1526 GET_MODE_SIZE (mode)))
1527 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1529 max_size = GET_MODE_SIZE (mode);
1532 /* The code above should have handled everything. */
1537 /* Return number of insns required to move L bytes by pieces.
1538 ALIGN (in bytes) is maximum alignment we can assume. */
1541 move_by_pieces_ninsns (l, align)
1545 register int n_insns = 0;
1546 int max_size = MOVE_MAX + 1;
1548 if (! SLOW_UNALIGNED_ACCESS
1549 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1552 while (max_size > 1)
1554 enum machine_mode mode = VOIDmode, tmode;
1555 enum insn_code icode;
1557 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1558 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1559 if (GET_MODE_SIZE (tmode) < max_size)
1562 if (mode == VOIDmode)
1565 icode = mov_optab->handlers[(int) mode].insn_code;
1566 if (icode != CODE_FOR_nothing
1567 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1568 GET_MODE_SIZE (mode)))
1569 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1571 max_size = GET_MODE_SIZE (mode);
1577 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1578 with move instructions for mode MODE. GENFUN is the gen_... function
1579 to make a move insn for that mode. DATA has all the other info. */
1582 move_by_pieces_1 (genfun, mode, data)
1583 rtx (*genfun) PROTO ((rtx, ...));
1584 enum machine_mode mode;
1585 struct move_by_pieces *data;
1587 register int size = GET_MODE_SIZE (mode);
1588 register rtx to1, from1;
1590 while (data->len >= size)
1592 if (data->reverse) data->offset -= size;
1594 to1 = (data->autinc_to
1595 ? gen_rtx_MEM (mode, data->to_addr)
1596 : copy_rtx (change_address (data->to, mode,
1597 plus_constant (data->to_addr,
1599 MEM_IN_STRUCT_P (to1) = data->to_struct;
1602 = (data->autinc_from
1603 ? gen_rtx_MEM (mode, data->from_addr)
1604 : copy_rtx (change_address (data->from, mode,
1605 plus_constant (data->from_addr,
1607 MEM_IN_STRUCT_P (from1) = data->from_struct;
1609 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1610 if (data->explicit_inc_to-- < -1) emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1611 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1612 if (data->explicit_inc_from-- < -1) emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1614 emit_insn ((*genfun) (to1, from1));
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1616 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1617 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1618 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1620 if (! data->reverse) data->offset += size;
1626 /* Emit code to move a block Y to a block X.
1627 This may be done with string-move instructions,
1628 with multiple scalar move instructions, or with a library call.
1630 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1632 SIZE is an rtx that says how long they are.
1633 ALIGN is the maximum alignment we can assume they have,
1636 Return the address of the new block, if memcpy is called and returns it,
1640 emit_block_move (x, y, size, align)
1646 #ifdef TARGET_MEM_FUNCTIONS
1648 tree call_expr, arg_list;
1651 if (GET_MODE (x) != BLKmode)
1654 if (GET_MODE (y) != BLKmode)
1657 x = protect_from_queue (x, 1);
1658 y = protect_from_queue (y, 0);
1659 size = protect_from_queue (size, 0);
1661 if (GET_CODE (x) != MEM)
1663 if (GET_CODE (y) != MEM)
1668 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1669 move_by_pieces (x, y, INTVAL (size), align);
1672 /* Try the most limited insn first, because there's no point
1673 including more than one in the machine description unless
1674 the more limited one has some advantage. */
1676 rtx opalign = GEN_INT (align);
1677 enum machine_mode mode;
1679 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1680 mode = GET_MODE_WIDER_MODE (mode))
1682 enum insn_code code = movstr_optab[(int) mode];
1684 if (code != CODE_FOR_nothing
1685 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1686 here because if SIZE is less than the mode mask, as it is
1687 returned by the macro, it will definitely be less than the
1688 actual mode mask. */
1689 && ((GET_CODE (size) == CONST_INT
1690 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1691 <= (GET_MODE_MASK (mode) >> 1)))
1692 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1693 && (insn_operand_predicate[(int) code][0] == 0
1694 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1695 && (insn_operand_predicate[(int) code][1] == 0
1696 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1697 && (insn_operand_predicate[(int) code][3] == 0
1698 || (*insn_operand_predicate[(int) code][3]) (opalign,
1702 rtx last = get_last_insn ();
1705 op2 = convert_to_mode (mode, size, 1);
1706 if (insn_operand_predicate[(int) code][2] != 0
1707 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1708 op2 = copy_to_mode_reg (mode, op2);
1710 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1717 delete_insns_since (last);
1721 /* X, Y, or SIZE may have been passed through protect_from_queue.
1723 It is unsafe to save the value generated by protect_from_queue
1724 and reuse it later. Consider what happens if emit_queue is
1725 called before the return value from protect_from_queue is used.
1727 Expansion of the CALL_EXPR below will call emit_queue before
1728 we are finished emitting RTL for argument setup. So if we are
1729 not careful we could get the wrong value for an argument.
1731 To avoid this problem we go ahead and emit code to copy X, Y &
1732 SIZE into new pseudos. We can then place those new pseudos
1733 into an RTL_EXPR and use them later, even after a call to
1736 Note this is not strictly needed for library calls since they
1737 do not call emit_queue before loading their arguments. However,
1738 we may need to have library calls call emit_queue in the future
1739 since failing to do so could cause problems for targets which
1740 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1741 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1742 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1744 #ifdef TARGET_MEM_FUNCTIONS
1745 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1747 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1748 TREE_UNSIGNED (integer_type_node));
1749 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1752 #ifdef TARGET_MEM_FUNCTIONS
1753 /* It is incorrect to use the libcall calling conventions to call
1754 memcpy in this context.
1756 This could be a user call to memcpy and the user may wish to
1757 examine the return value from memcpy.
1759 For targets where libcalls and normal calls have different conventions
1760 for returning pointers, we could end up generating incorrect code.
1762 So instead of using a libcall sequence we build up a suitable
1763 CALL_EXPR and expand the call in the normal fashion. */
1764 if (fn == NULL_TREE)
1768 /* This was copied from except.c, I don't know if all this is
1769 necessary in this context or not. */
1770 fn = get_identifier ("memcpy");
1771 push_obstacks_nochange ();
1772 end_temporary_allocation ();
1773 fntype = build_pointer_type (void_type_node);
1774 fntype = build_function_type (fntype, NULL_TREE);
1775 fn = build_decl (FUNCTION_DECL, fn, fntype);
1776 DECL_EXTERNAL (fn) = 1;
1777 TREE_PUBLIC (fn) = 1;
1778 DECL_ARTIFICIAL (fn) = 1;
1779 make_decl_rtl (fn, NULL_PTR, 1);
1780 assemble_external (fn);
1784 /* We need to make an argument list for the function call.
1786 memcpy has three arguments, the first two are void * addresses and
1787 the last is a size_t byte count for the copy. */
1789 = build_tree_list (NULL_TREE,
1790 make_tree (build_pointer_type (void_type_node), x));
1791 TREE_CHAIN (arg_list)
1792 = build_tree_list (NULL_TREE,
1793 make_tree (build_pointer_type (void_type_node), y));
1794 TREE_CHAIN (TREE_CHAIN (arg_list))
1795 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1796 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1798 /* Now we have to build up the CALL_EXPR itself. */
1799 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1800 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1801 call_expr, arg_list, NULL_TREE);
1802 TREE_SIDE_EFFECTS (call_expr) = 1;
1804 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1806 emit_library_call (bcopy_libfunc, 0,
1807 VOIDmode, 3, y, Pmode, x, Pmode,
1808 convert_to_mode (TYPE_MODE (integer_type_node), size,
1809 TREE_UNSIGNED (integer_type_node)),
1810 TYPE_MODE (integer_type_node));
1817 /* Copy all or part of a value X into registers starting at REGNO.
1818 The number of registers to be filled is NREGS. */
1821 move_block_to_reg (regno, x, nregs, mode)
1825 enum machine_mode mode;
1828 #ifdef HAVE_load_multiple
1836 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1837 x = validize_mem (force_const_mem (mode, x));
1839 /* See if the machine can do this with a load multiple insn. */
1840 #ifdef HAVE_load_multiple
1841 if (HAVE_load_multiple)
1843 last = get_last_insn ();
1844 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1852 delete_insns_since (last);
1856 for (i = 0; i < nregs; i++)
1857 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1858 operand_subword_force (x, i, mode));
1861 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1862 The number of registers to be filled is NREGS. SIZE indicates the number
1863 of bytes in the object X. */
1867 move_block_from_reg (regno, x, nregs, size)
1874 #ifdef HAVE_store_multiple
1878 enum machine_mode mode;
1880 /* If SIZE is that of a mode no bigger than a word, just use that
1881 mode's store operation. */
1882 if (size <= UNITS_PER_WORD
1883 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1885 emit_move_insn (change_address (x, mode, NULL),
1886 gen_rtx_REG (mode, regno));
1890 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1891 to the left before storing to memory. Note that the previous test
1892 doesn't handle all cases (e.g. SIZE == 3). */
1893 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1895 rtx tem = operand_subword (x, 0, 1, BLKmode);
1901 shift = expand_shift (LSHIFT_EXPR, word_mode,
1902 gen_rtx_REG (word_mode, regno),
1903 build_int_2 ((UNITS_PER_WORD - size)
1904 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1905 emit_move_insn (tem, shift);
1909 /* See if the machine can do this with a store multiple insn. */
1910 #ifdef HAVE_store_multiple
1911 if (HAVE_store_multiple)
1913 last = get_last_insn ();
1914 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1922 delete_insns_since (last);
1926 for (i = 0; i < nregs; i++)
1928 rtx tem = operand_subword (x, i, 1, BLKmode);
1933 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1937 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1938 registers represented by a PARALLEL. SSIZE represents the total size of
1939 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1941 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1942 the balance will be in what would be the low-order memory addresses, i.e.
1943 left justified for big endian, right justified for little endian. This
1944 happens to be true for the targets currently using this support. If this
1945 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1949 emit_group_load (dst, orig_src, ssize, align)
1956 if (GET_CODE (dst) != PARALLEL)
1959 /* Check for a NULL entry, used to indicate that the parameter goes
1960 both on the stack and in registers. */
1961 if (XEXP (XVECEXP (dst, 0, 0), 0))
1966 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1968 /* If we won't be loading directly from memory, protect the real source
1969 from strange tricks we might play. */
1971 if (GET_CODE (src) != MEM)
1973 src = gen_reg_rtx (GET_MODE (orig_src));
1974 emit_move_insn (src, orig_src);
1977 /* Process the pieces. */
1978 for (i = start; i < XVECLEN (dst, 0); i++)
1980 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1981 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1982 int bytelen = GET_MODE_SIZE (mode);
1985 /* Handle trailing fragments that run over the size of the struct. */
1986 if (ssize >= 0 && bytepos + bytelen > ssize)
1988 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1989 bytelen = ssize - bytepos;
1994 /* Optimize the access just a bit. */
1995 if (GET_CODE (src) == MEM
1996 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1997 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1998 && bytelen == GET_MODE_SIZE (mode))
2000 tmps[i] = gen_reg_rtx (mode);
2001 emit_move_insn (tmps[i],
2002 change_address (src, mode,
2003 plus_constant (XEXP (src, 0),
2008 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
2009 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
2010 mode, mode, align, ssize);
2013 if (BYTES_BIG_ENDIAN && shift)
2015 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2016 tmps[i], 0, OPTAB_WIDEN);
2021 /* Copy the extracted pieces into the proper (probable) hard regs. */
2022 for (i = start; i < XVECLEN (dst, 0); i++)
2023 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2026 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2027 registers represented by a PARALLEL. SSIZE represents the total size of
2028 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2031 emit_group_store (orig_dst, src, ssize, align)
2038 if (GET_CODE (src) != PARALLEL)
2041 /* Check for a NULL entry, used to indicate that the parameter goes
2042 both on the stack and in registers. */
2043 if (XEXP (XVECEXP (src, 0, 0), 0))
2048 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2050 /* Copy the (probable) hard regs into pseudos. */
2051 for (i = start; i < XVECLEN (src, 0); i++)
2053 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2054 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2055 emit_move_insn (tmps[i], reg);
2059 /* If we won't be storing directly into memory, protect the real destination
2060 from strange tricks we might play. */
2062 if (GET_CODE (dst) == PARALLEL)
2066 /* We can get a PARALLEL dst if there is a conditional expression in
2067 a return statement. In that case, the dst and src are the same,
2068 so no action is necessary. */
2069 if (rtx_equal_p (dst, src))
2072 /* It is unclear if we can ever reach here, but we may as well handle
2073 it. Allocate a temporary, and split this into a store/load to/from
2076 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2077 emit_group_store (temp, src, ssize, align);
2078 emit_group_load (dst, temp, ssize, align);
2081 else if (GET_CODE (dst) != MEM)
2083 dst = gen_reg_rtx (GET_MODE (orig_dst));
2084 /* Make life a bit easier for combine. */
2085 emit_move_insn (dst, const0_rtx);
2087 else if (! MEM_IN_STRUCT_P (dst))
2089 /* store_bit_field requires that memory operations have
2090 mem_in_struct_p set; we might not. */
2092 dst = copy_rtx (orig_dst);
2093 MEM_SET_IN_STRUCT_P (dst, 1);
2096 /* Process the pieces. */
2097 for (i = start; i < XVECLEN (src, 0); i++)
2099 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2100 enum machine_mode mode = GET_MODE (tmps[i]);
2101 int bytelen = GET_MODE_SIZE (mode);
2103 /* Handle trailing fragments that run over the size of the struct. */
2104 if (ssize >= 0 && bytepos + bytelen > ssize)
2106 if (BYTES_BIG_ENDIAN)
2108 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2109 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2110 tmps[i], 0, OPTAB_WIDEN);
2112 bytelen = ssize - bytepos;
2115 /* Optimize the access just a bit. */
2116 if (GET_CODE (dst) == MEM
2117 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2118 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2119 && bytelen == GET_MODE_SIZE (mode))
2121 emit_move_insn (change_address (dst, mode,
2122 plus_constant (XEXP (dst, 0),
2128 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2129 mode, tmps[i], align, ssize);
2134 /* Copy from the pseudo into the (probable) hard reg. */
2135 if (GET_CODE (dst) == REG)
2136 emit_move_insn (orig_dst, dst);
2139 /* Generate code to copy a BLKmode object of TYPE out of a
2140 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2141 is null, a stack temporary is created. TGTBLK is returned.
2143 The primary purpose of this routine is to handle functions
2144 that return BLKmode structures in registers. Some machines
2145 (the PA for example) want to return all small structures
2146 in registers regardless of the structure's alignment.
2150 copy_blkmode_from_reg(tgtblk,srcreg,type)
2155 int bytes = int_size_in_bytes (type);
2156 rtx src = NULL, dst = NULL;
2157 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2158 int bitpos, xbitpos, big_endian_correction = 0;
2162 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2163 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2164 preserve_temp_slots (tgtblk);
2167 /* This code assumes srcreg is at least a full word. If it isn't,
2168 copy it into a new pseudo which is a full word. */
2169 if (GET_MODE (srcreg) != BLKmode
2170 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2171 srcreg = convert_to_mode (word_mode, srcreg,
2172 TREE_UNSIGNED (type));
2174 /* Structures whose size is not a multiple of a word are aligned
2175 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2176 machine, this means we must skip the empty high order bytes when
2177 calculating the bit offset. */
2178 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2179 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2182 /* Copy the structure BITSIZE bites at a time.
2184 We could probably emit more efficient code for machines
2185 which do not use strict alignment, but it doesn't seem
2186 worth the effort at the current time. */
2187 for (bitpos = 0, xbitpos = big_endian_correction;
2188 bitpos < bytes * BITS_PER_UNIT;
2189 bitpos += bitsize, xbitpos += bitsize)
2192 /* We need a new source operand each time xbitpos is on a
2193 word boundary and when xbitpos == big_endian_correction
2194 (the first time through). */
2195 if (xbitpos % BITS_PER_WORD == 0
2196 || xbitpos == big_endian_correction)
2197 src = operand_subword_force (srcreg,
2198 xbitpos / BITS_PER_WORD,
2201 /* We need a new destination operand each time bitpos is on
2203 if (bitpos % BITS_PER_WORD == 0)
2204 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2206 /* Use xbitpos for the source extraction (right justified) and
2207 xbitpos for the destination store (left justified). */
2208 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2209 extract_bit_field (src, bitsize,
2210 xbitpos % BITS_PER_WORD, 1,
2211 NULL_RTX, word_mode,
2213 bitsize / BITS_PER_UNIT,
2215 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2221 /* Add a USE expression for REG to the (possibly empty) list pointed
2222 to by CALL_FUSAGE. REG must denote a hard register. */
2225 use_reg (call_fusage, reg)
2226 rtx *call_fusage, reg;
2228 if (GET_CODE (reg) != REG
2229 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2233 = gen_rtx_EXPR_LIST (VOIDmode,
2234 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2237 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2238 starting at REGNO. All of these registers must be hard registers. */
2241 use_regs (call_fusage, regno, nregs)
2248 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2251 for (i = 0; i < nregs; i++)
2252 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2255 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2256 PARALLEL REGS. This is for calls that pass values in multiple
2257 non-contiguous locations. The Irix 6 ABI has examples of this. */
2260 use_group_regs (call_fusage, regs)
2266 for (i = 0; i < XVECLEN (regs, 0); i++)
2268 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2270 /* A NULL entry means the parameter goes both on the stack and in
2271 registers. This can also be a MEM for targets that pass values
2272 partially on the stack and partially in registers. */
2273 if (reg != 0 && GET_CODE (reg) == REG)
2274 use_reg (call_fusage, reg);
2278 /* Generate several move instructions to clear LEN bytes of block TO.
2279 (A MEM rtx with BLKmode). The caller must pass TO through
2280 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2284 clear_by_pieces (to, len, align)
2288 struct clear_by_pieces data;
2289 rtx to_addr = XEXP (to, 0);
2290 int max_size = MOVE_MAX_PIECES + 1;
2291 enum machine_mode mode = VOIDmode, tmode;
2292 enum insn_code icode;
2295 data.to_addr = to_addr;
2298 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2299 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2301 data.explicit_inc_to = 0;
2303 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2304 if (data.reverse) data.offset = len;
2307 data.to_struct = MEM_IN_STRUCT_P (to);
2309 /* If copying requires more than two move insns,
2310 copy addresses to registers (to make displacements shorter)
2311 and use post-increment if available. */
2313 && move_by_pieces_ninsns (len, align) > 2)
2315 /* Determine the main mode we'll be using */
2316 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2317 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2318 if (GET_MODE_SIZE (tmode) < max_size)
2321 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2323 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len-GET_MODE_SIZE (mode)));
2325 data.explicit_inc_to = -1;
2327 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2329 data.to_addr = copy_addr_to_reg (to_addr);
2331 data.explicit_inc_to = 1;
2333 if (!data.autinc_to && CONSTANT_P (to_addr))
2334 data.to_addr = copy_addr_to_reg (to_addr);
2337 if (! SLOW_UNALIGNED_ACCESS
2338 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2341 /* First move what we can in the largest integer mode, then go to
2342 successively smaller modes. */
2344 while (max_size > 1)
2346 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2347 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2348 if (GET_MODE_SIZE (tmode) < max_size)
2351 if (mode == VOIDmode)
2354 icode = mov_optab->handlers[(int) mode].insn_code;
2355 if (icode != CODE_FOR_nothing
2356 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2357 GET_MODE_SIZE (mode)))
2358 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2360 max_size = GET_MODE_SIZE (mode);
2363 /* The code above should have handled everything. */
2368 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2369 with move instructions for mode MODE. GENFUN is the gen_... function
2370 to make a move insn for that mode. DATA has all the other info. */
2373 clear_by_pieces_1 (genfun, mode, data)
2374 rtx (*genfun) PROTO ((rtx, ...));
2375 enum machine_mode mode;
2376 struct clear_by_pieces *data;
2378 register int size = GET_MODE_SIZE (mode);
2381 while (data->len >= size)
2383 if (data->reverse) data->offset -= size;
2385 to1 = (data->autinc_to
2386 ? gen_rtx_MEM (mode, data->to_addr)
2387 : copy_rtx (change_address (data->to, mode,
2388 plus_constant (data->to_addr,
2390 MEM_IN_STRUCT_P (to1) = data->to_struct;
2392 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2393 if (data->explicit_inc_to-- < -1) emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2395 emit_insn ((*genfun) (to1, const0_rtx));
2396 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2397 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2399 if (! data->reverse) data->offset += size;
2405 /* Write zeros through the storage of OBJECT.
2406 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2407 the maximum alignment we can is has, measured in bytes.
2409 If we call a function that returns the length of the block, return it. */
2412 clear_storage (object, size, align)
2417 #ifdef TARGET_MEM_FUNCTIONS
2419 tree call_expr, arg_list;
2423 if (GET_MODE (object) == BLKmode)
2425 object = protect_from_queue (object, 1);
2426 size = protect_from_queue (size, 0);
2428 if (GET_CODE (size) == CONST_INT
2429 && MOVE_BY_PIECES_P (INTVAL (size), align))
2430 clear_by_pieces (object, INTVAL (size), align);
2434 /* Try the most limited insn first, because there's no point
2435 including more than one in the machine description unless
2436 the more limited one has some advantage. */
2438 rtx opalign = GEN_INT (align);
2439 enum machine_mode mode;
2441 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2442 mode = GET_MODE_WIDER_MODE (mode))
2444 enum insn_code code = clrstr_optab[(int) mode];
2446 if (code != CODE_FOR_nothing
2447 /* We don't need MODE to be narrower than
2448 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2449 the mode mask, as it is returned by the macro, it will
2450 definitely be less than the actual mode mask. */
2451 && ((GET_CODE (size) == CONST_INT
2452 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2453 <= (GET_MODE_MASK (mode) >> 1)))
2454 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2455 && (insn_operand_predicate[(int) code][0] == 0
2456 || (*insn_operand_predicate[(int) code][0]) (object,
2458 && (insn_operand_predicate[(int) code][2] == 0
2459 || (*insn_operand_predicate[(int) code][2]) (opalign,
2463 rtx last = get_last_insn ();
2466 op1 = convert_to_mode (mode, size, 1);
2467 if (insn_operand_predicate[(int) code][1] != 0
2468 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2470 op1 = copy_to_mode_reg (mode, op1);
2472 pat = GEN_FCN ((int) code) (object, op1, opalign);
2479 delete_insns_since (last);
2483 /* OBJECT or SIZE may have been passed through protect_from_queue.
2485 It is unsafe to save the value generated by protect_from_queue
2486 and reuse it later. Consider what happens if emit_queue is
2487 called before the return value from protect_from_queue is used.
2489 Expansion of the CALL_EXPR below will call emit_queue before
2490 we are finished emitting RTL for argument setup. So if we are
2491 not careful we could get the wrong value for an argument.
2493 To avoid this problem we go ahead and emit code to copy OBJECT
2494 and SIZE into new pseudos. We can then place those new pseudos
2495 into an RTL_EXPR and use them later, even after a call to
2498 Note this is not strictly needed for library calls since they
2499 do not call emit_queue before loading their arguments. However,
2500 we may need to have library calls call emit_queue in the future
2501 since failing to do so could cause problems for targets which
2502 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2503 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2505 #ifdef TARGET_MEM_FUNCTIONS
2506 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2508 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2509 TREE_UNSIGNED (integer_type_node));
2510 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2514 #ifdef TARGET_MEM_FUNCTIONS
2515 /* It is incorrect to use the libcall calling conventions to call
2516 memset in this context.
2518 This could be a user call to memset and the user may wish to
2519 examine the return value from memset.
2521 For targets where libcalls and normal calls have different
2522 conventions for returning pointers, we could end up generating
2525 So instead of using a libcall sequence we build up a suitable
2526 CALL_EXPR and expand the call in the normal fashion. */
2527 if (fn == NULL_TREE)
2531 /* This was copied from except.c, I don't know if all this is
2532 necessary in this context or not. */
2533 fn = get_identifier ("memset");
2534 push_obstacks_nochange ();
2535 end_temporary_allocation ();
2536 fntype = build_pointer_type (void_type_node);
2537 fntype = build_function_type (fntype, NULL_TREE);
2538 fn = build_decl (FUNCTION_DECL, fn, fntype);
2539 DECL_EXTERNAL (fn) = 1;
2540 TREE_PUBLIC (fn) = 1;
2541 DECL_ARTIFICIAL (fn) = 1;
2542 make_decl_rtl (fn, NULL_PTR, 1);
2543 assemble_external (fn);
2547 /* We need to make an argument list for the function call.
2549 memset has three arguments, the first is a void * addresses, the
2550 second a integer with the initialization value, the last is a
2551 size_t byte count for the copy. */
2553 = build_tree_list (NULL_TREE,
2554 make_tree (build_pointer_type (void_type_node),
2556 TREE_CHAIN (arg_list)
2557 = build_tree_list (NULL_TREE,
2558 make_tree (integer_type_node, const0_rtx));
2559 TREE_CHAIN (TREE_CHAIN (arg_list))
2560 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2561 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2563 /* Now we have to build up the CALL_EXPR itself. */
2564 call_expr = build1 (ADDR_EXPR,
2565 build_pointer_type (TREE_TYPE (fn)), fn);
2566 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2567 call_expr, arg_list, NULL_TREE);
2568 TREE_SIDE_EFFECTS (call_expr) = 1;
2570 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2572 emit_library_call (bzero_libfunc, 0,
2573 VOIDmode, 2, object, Pmode, size,
2574 TYPE_MODE (integer_type_node));
2579 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2584 /* Generate code to copy Y into X.
2585 Both Y and X must have the same mode, except that
2586 Y can be a constant with VOIDmode.
2587 This mode cannot be BLKmode; use emit_block_move for that.
2589 Return the last instruction emitted. */
2592 emit_move_insn (x, y)
2595 enum machine_mode mode = GET_MODE (x);
2597 x = protect_from_queue (x, 1);
2598 y = protect_from_queue (y, 0);
2600 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2603 /* Never force constant_p_rtx to memory. */
2604 if (GET_CODE (y) == CONSTANT_P_RTX)
2606 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2607 y = force_const_mem (mode, y);
2609 /* If X or Y are memory references, verify that their addresses are valid
2611 if (GET_CODE (x) == MEM
2612 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2613 && ! push_operand (x, GET_MODE (x)))
2615 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2616 x = change_address (x, VOIDmode, XEXP (x, 0));
2618 if (GET_CODE (y) == MEM
2619 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2621 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2622 y = change_address (y, VOIDmode, XEXP (y, 0));
2624 if (mode == BLKmode)
2627 return emit_move_insn_1 (x, y);
2630 /* Low level part of emit_move_insn.
2631 Called just like emit_move_insn, but assumes X and Y
2632 are basically valid. */
2635 emit_move_insn_1 (x, y)
2638 enum machine_mode mode = GET_MODE (x);
2639 enum machine_mode submode;
2640 enum mode_class class = GET_MODE_CLASS (mode);
2643 if (mode >= MAX_MACHINE_MODE)
2646 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2648 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2650 /* Expand complex moves by moving real part and imag part, if possible. */
2651 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2652 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2654 (class == MODE_COMPLEX_INT
2655 ? MODE_INT : MODE_FLOAT),
2657 && (mov_optab->handlers[(int) submode].insn_code
2658 != CODE_FOR_nothing))
2660 /* Don't split destination if it is a stack push. */
2661 int stack = push_operand (x, GET_MODE (x));
2663 /* If this is a stack, push the highpart first, so it
2664 will be in the argument order.
2666 In that case, change_address is used only to convert
2667 the mode, not to change the address. */
2670 /* Note that the real part always precedes the imag part in memory
2671 regardless of machine's endianness. */
2672 #ifdef STACK_GROWS_DOWNWARD
2673 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2674 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2675 gen_imagpart (submode, y)));
2676 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2677 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2678 gen_realpart (submode, y)));
2680 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2681 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2682 gen_realpart (submode, y)));
2683 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2684 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2685 gen_imagpart (submode, y)));
2690 rtx realpart_x, realpart_y;
2691 rtx imagpart_x, imagpart_y;
2693 /* If this is a complex value with each part being smaller than a
2694 word, the usual calling sequence will likely pack the pieces into
2695 a single register. Unfortunately, SUBREG of hard registers only
2696 deals in terms of words, so we have a problem converting input
2697 arguments to the CONCAT of two registers that is used elsewhere
2698 for complex values. If this is before reload, we can copy it into
2699 memory and reload. FIXME, we should see about using extract and
2700 insert on integer registers, but complex short and complex char
2701 variables should be rarely used. */
2702 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2703 && (reload_in_progress | reload_completed) == 0)
2705 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2706 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2708 if (packed_dest_p || packed_src_p)
2710 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2711 ? MODE_FLOAT : MODE_INT);
2713 enum machine_mode reg_mode =
2714 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2716 if (reg_mode != BLKmode)
2718 rtx mem = assign_stack_temp (reg_mode,
2719 GET_MODE_SIZE (mode), 0);
2721 rtx cmem = change_address (mem, mode, NULL_RTX);
2723 current_function_cannot_inline
2724 = "function using short complex types cannot be inline";
2728 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2729 emit_move_insn_1 (cmem, y);
2730 return emit_move_insn_1 (sreg, mem);
2734 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2735 emit_move_insn_1 (mem, sreg);
2736 return emit_move_insn_1 (x, cmem);
2742 realpart_x = gen_realpart (submode, x);
2743 realpart_y = gen_realpart (submode, y);
2744 imagpart_x = gen_imagpart (submode, x);
2745 imagpart_y = gen_imagpart (submode, y);
2747 /* Show the output dies here. This is necessary for SUBREGs
2748 of pseudos since we cannot track their lifetimes correctly;
2749 hard regs shouldn't appear here except as return values.
2750 We never want to emit such a clobber after reload. */
2752 && ! (reload_in_progress || reload_completed)
2753 && (GET_CODE (realpart_x) == SUBREG
2754 || GET_CODE (imagpart_x) == SUBREG))
2756 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2759 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2760 (realpart_x, realpart_y));
2761 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2762 (imagpart_x, imagpart_y));
2765 return get_last_insn ();
2768 /* This will handle any multi-word mode that lacks a move_insn pattern.
2769 However, you will get better code if you define such patterns,
2770 even if they must turn into multiple assembler instructions. */
2771 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2777 #ifdef PUSH_ROUNDING
2779 /* If X is a push on the stack, do the push now and replace
2780 X with a reference to the stack pointer. */
2781 if (push_operand (x, GET_MODE (x)))
2783 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2784 x = change_address (x, VOIDmode, stack_pointer_rtx);
2792 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2795 rtx xpart = operand_subword (x, i, 1, mode);
2796 rtx ypart = operand_subword (y, i, 1, mode);
2798 /* If we can't get a part of Y, put Y into memory if it is a
2799 constant. Otherwise, force it into a register. If we still
2800 can't get a part of Y, abort. */
2801 if (ypart == 0 && CONSTANT_P (y))
2803 y = force_const_mem (mode, y);
2804 ypart = operand_subword (y, i, 1, mode);
2806 else if (ypart == 0)
2807 ypart = operand_subword_force (y, i, mode);
2809 if (xpart == 0 || ypart == 0)
2812 need_clobber |= (GET_CODE (xpart) == SUBREG);
2814 last_insn = emit_move_insn (xpart, ypart);
2817 seq = gen_sequence ();
2820 /* Show the output dies here. This is necessary for SUBREGs
2821 of pseudos since we cannot track their lifetimes correctly;
2822 hard regs shouldn't appear here except as return values.
2823 We never want to emit such a clobber after reload. */
2825 && ! (reload_in_progress || reload_completed)
2826 && need_clobber != 0)
2828 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2839 /* Pushing data onto the stack. */
2841 /* Push a block of length SIZE (perhaps variable)
2842 and return an rtx to address the beginning of the block.
2843 Note that it is not possible for the value returned to be a QUEUED.
2844 The value may be virtual_outgoing_args_rtx.
2846 EXTRA is the number of bytes of padding to push in addition to SIZE.
2847 BELOW nonzero means this padding comes at low addresses;
2848 otherwise, the padding comes at high addresses. */
2851 push_block (size, extra, below)
2857 size = convert_modes (Pmode, ptr_mode, size, 1);
2858 if (CONSTANT_P (size))
2859 anti_adjust_stack (plus_constant (size, extra));
2860 else if (GET_CODE (size) == REG && extra == 0)
2861 anti_adjust_stack (size);
2864 rtx temp = copy_to_mode_reg (Pmode, size);
2866 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2867 temp, 0, OPTAB_LIB_WIDEN);
2868 anti_adjust_stack (temp);
2871 #if defined (STACK_GROWS_DOWNWARD) \
2872 || (defined (ARGS_GROW_DOWNWARD) \
2873 && !defined (ACCUMULATE_OUTGOING_ARGS))
2875 /* Return the lowest stack address when STACK or ARGS grow downward and
2876 we are not aaccumulating outgoing arguments (the c4x port uses such
2878 temp = virtual_outgoing_args_rtx;
2879 if (extra != 0 && below)
2880 temp = plus_constant (temp, extra);
2882 if (GET_CODE (size) == CONST_INT)
2883 temp = plus_constant (virtual_outgoing_args_rtx,
2884 - INTVAL (size) - (below ? 0 : extra));
2885 else if (extra != 0 && !below)
2886 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2887 negate_rtx (Pmode, plus_constant (size, extra)));
2889 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2890 negate_rtx (Pmode, size));
2893 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2899 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2902 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2903 block of SIZE bytes. */
2906 get_push_address (size)
2911 if (STACK_PUSH_CODE == POST_DEC)
2912 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2913 else if (STACK_PUSH_CODE == POST_INC)
2914 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2916 temp = stack_pointer_rtx;
2918 return copy_to_reg (temp);
2921 /* Generate code to push X onto the stack, assuming it has mode MODE and
2923 MODE is redundant except when X is a CONST_INT (since they don't
2925 SIZE is an rtx for the size of data to be copied (in bytes),
2926 needed only if X is BLKmode.
2928 ALIGN (in bytes) is maximum alignment we can assume.
2930 If PARTIAL and REG are both nonzero, then copy that many of the first
2931 words of X into registers starting with REG, and push the rest of X.
2932 The amount of space pushed is decreased by PARTIAL words,
2933 rounded *down* to a multiple of PARM_BOUNDARY.
2934 REG must be a hard register in this case.
2935 If REG is zero but PARTIAL is not, take any all others actions for an
2936 argument partially in registers, but do not actually load any
2939 EXTRA is the amount in bytes of extra space to leave next to this arg.
2940 This is ignored if an argument block has already been allocated.
2942 On a machine that lacks real push insns, ARGS_ADDR is the address of
2943 the bottom of the argument block for this call. We use indexing off there
2944 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2945 argument block has not been preallocated.
2947 ARGS_SO_FAR is the size of args previously pushed for this call.
2949 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2950 for arguments passed in registers. If nonzero, it will be the number
2951 of bytes required. */
2954 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2955 args_addr, args_so_far, reg_parm_stack_space)
2957 enum machine_mode mode;
2966 int reg_parm_stack_space;
2969 enum direction stack_direction
2970 #ifdef STACK_GROWS_DOWNWARD
2976 /* Decide where to pad the argument: `downward' for below,
2977 `upward' for above, or `none' for don't pad it.
2978 Default is below for small data on big-endian machines; else above. */
2979 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2981 /* Invert direction if stack is post-update. */
2982 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2983 if (where_pad != none)
2984 where_pad = (where_pad == downward ? upward : downward);
2986 xinner = x = protect_from_queue (x, 0);
2988 if (mode == BLKmode)
2990 /* Copy a block into the stack, entirely or partially. */
2993 int used = partial * UNITS_PER_WORD;
2994 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3002 /* USED is now the # of bytes we need not copy to the stack
3003 because registers will take care of them. */
3006 xinner = change_address (xinner, BLKmode,
3007 plus_constant (XEXP (xinner, 0), used));
3009 /* If the partial register-part of the arg counts in its stack size,
3010 skip the part of stack space corresponding to the registers.
3011 Otherwise, start copying to the beginning of the stack space,
3012 by setting SKIP to 0. */
3013 skip = (reg_parm_stack_space == 0) ? 0 : used;
3015 #ifdef PUSH_ROUNDING
3016 /* Do it with several push insns if that doesn't take lots of insns
3017 and if there is no difficulty with push insns that skip bytes
3018 on the stack for alignment purposes. */
3020 && GET_CODE (size) == CONST_INT
3022 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3023 /* Here we avoid the case of a structure whose weak alignment
3024 forces many pushes of a small amount of data,
3025 and such small pushes do rounding that causes trouble. */
3026 && ((! SLOW_UNALIGNED_ACCESS)
3027 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
3028 || PUSH_ROUNDING (align) == align)
3029 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3031 /* Push padding now if padding above and stack grows down,
3032 or if padding below and stack grows up.
3033 But if space already allocated, this has already been done. */
3034 if (extra && args_addr == 0
3035 && where_pad != none && where_pad != stack_direction)
3036 anti_adjust_stack (GEN_INT (extra));
3038 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3039 INTVAL (size) - used, align);
3041 if (current_function_check_memory_usage && ! in_check_memory_usage)
3045 in_check_memory_usage = 1;
3046 temp = get_push_address (INTVAL(size) - used);
3047 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3048 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3050 XEXP (xinner, 0), Pmode,
3051 GEN_INT (INTVAL(size) - used),
3052 TYPE_MODE (sizetype));
3054 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3056 GEN_INT (INTVAL(size) - used),
3057 TYPE_MODE (sizetype),
3058 GEN_INT (MEMORY_USE_RW),
3059 TYPE_MODE (integer_type_node));
3060 in_check_memory_usage = 0;
3064 #endif /* PUSH_ROUNDING */
3066 /* Otherwise make space on the stack and copy the data
3067 to the address of that space. */
3069 /* Deduct words put into registers from the size we must copy. */
3072 if (GET_CODE (size) == CONST_INT)
3073 size = GEN_INT (INTVAL (size) - used);
3075 size = expand_binop (GET_MODE (size), sub_optab, size,
3076 GEN_INT (used), NULL_RTX, 0,
3080 /* Get the address of the stack space.
3081 In this case, we do not deal with EXTRA separately.
3082 A single stack adjust will do. */
3085 temp = push_block (size, extra, where_pad == downward);
3088 else if (GET_CODE (args_so_far) == CONST_INT)
3089 temp = memory_address (BLKmode,
3090 plus_constant (args_addr,
3091 skip + INTVAL (args_so_far)));
3093 temp = memory_address (BLKmode,
3094 plus_constant (gen_rtx_PLUS (Pmode,
3098 if (current_function_check_memory_usage && ! in_check_memory_usage)
3102 in_check_memory_usage = 1;
3103 target = copy_to_reg (temp);
3104 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3105 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3107 XEXP (xinner, 0), Pmode,
3108 size, TYPE_MODE (sizetype));
3110 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3112 size, TYPE_MODE (sizetype),
3113 GEN_INT (MEMORY_USE_RW),
3114 TYPE_MODE (integer_type_node));
3115 in_check_memory_usage = 0;
3118 /* TEMP is the address of the block. Copy the data there. */
3119 if (GET_CODE (size) == CONST_INT
3120 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3122 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3123 INTVAL (size), align);
3128 rtx opalign = GEN_INT (align);
3129 enum machine_mode mode;
3130 rtx target = gen_rtx_MEM (BLKmode, temp);
3132 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3134 mode = GET_MODE_WIDER_MODE (mode))
3136 enum insn_code code = movstr_optab[(int) mode];
3138 if (code != CODE_FOR_nothing
3139 && ((GET_CODE (size) == CONST_INT
3140 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3141 <= (GET_MODE_MASK (mode) >> 1)))
3142 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3143 && (insn_operand_predicate[(int) code][0] == 0
3144 || ((*insn_operand_predicate[(int) code][0])
3146 && (insn_operand_predicate[(int) code][1] == 0
3147 || ((*insn_operand_predicate[(int) code][1])
3149 && (insn_operand_predicate[(int) code][3] == 0
3150 || ((*insn_operand_predicate[(int) code][3])
3151 (opalign, VOIDmode))))
3153 rtx op2 = convert_to_mode (mode, size, 1);
3154 rtx last = get_last_insn ();
3157 if (insn_operand_predicate[(int) code][2] != 0
3158 && ! ((*insn_operand_predicate[(int) code][2])
3160 op2 = copy_to_mode_reg (mode, op2);
3162 pat = GEN_FCN ((int) code) (target, xinner,
3170 delete_insns_since (last);
3175 #ifndef ACCUMULATE_OUTGOING_ARGS
3176 /* If the source is referenced relative to the stack pointer,
3177 copy it to another register to stabilize it. We do not need
3178 to do this if we know that we won't be changing sp. */
3180 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3181 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3182 temp = copy_to_reg (temp);
3185 /* Make inhibit_defer_pop nonzero around the library call
3186 to force it to pop the bcopy-arguments right away. */
3188 #ifdef TARGET_MEM_FUNCTIONS
3189 emit_library_call (memcpy_libfunc, 0,
3190 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3191 convert_to_mode (TYPE_MODE (sizetype),
3192 size, TREE_UNSIGNED (sizetype)),
3193 TYPE_MODE (sizetype));
3195 emit_library_call (bcopy_libfunc, 0,
3196 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3197 convert_to_mode (TYPE_MODE (integer_type_node),
3199 TREE_UNSIGNED (integer_type_node)),
3200 TYPE_MODE (integer_type_node));
3205 else if (partial > 0)
3207 /* Scalar partly in registers. */
3209 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3212 /* # words of start of argument
3213 that we must make space for but need not store. */
3214 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3215 int args_offset = INTVAL (args_so_far);
3218 /* Push padding now if padding above and stack grows down,
3219 or if padding below and stack grows up.
3220 But if space already allocated, this has already been done. */
3221 if (extra && args_addr == 0
3222 && where_pad != none && where_pad != stack_direction)
3223 anti_adjust_stack (GEN_INT (extra));
3225 /* If we make space by pushing it, we might as well push
3226 the real data. Otherwise, we can leave OFFSET nonzero
3227 and leave the space uninitialized. */
3231 /* Now NOT_STACK gets the number of words that we don't need to
3232 allocate on the stack. */
3233 not_stack = partial - offset;
3235 /* If the partial register-part of the arg counts in its stack size,
3236 skip the part of stack space corresponding to the registers.
3237 Otherwise, start copying to the beginning of the stack space,
3238 by setting SKIP to 0. */
3239 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3241 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3242 x = validize_mem (force_const_mem (mode, x));
3244 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3245 SUBREGs of such registers are not allowed. */
3246 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3247 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3248 x = copy_to_reg (x);
3250 /* Loop over all the words allocated on the stack for this arg. */
3251 /* We can do it by words, because any scalar bigger than a word
3252 has a size a multiple of a word. */
3253 #ifndef PUSH_ARGS_REVERSED
3254 for (i = not_stack; i < size; i++)
3256 for (i = size - 1; i >= not_stack; i--)
3258 if (i >= not_stack + offset)
3259 emit_push_insn (operand_subword_force (x, i, mode),
3260 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3262 GEN_INT (args_offset + ((i - not_stack + skip)
3264 reg_parm_stack_space);
3269 rtx target = NULL_RTX;
3271 /* Push padding now if padding above and stack grows down,
3272 or if padding below and stack grows up.
3273 But if space already allocated, this has already been done. */
3274 if (extra && args_addr == 0
3275 && where_pad != none && where_pad != stack_direction)
3276 anti_adjust_stack (GEN_INT (extra));
3278 #ifdef PUSH_ROUNDING
3280 addr = gen_push_operand ();
3284 if (GET_CODE (args_so_far) == CONST_INT)
3286 = memory_address (mode,
3287 plus_constant (args_addr,
3288 INTVAL (args_so_far)));
3290 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3295 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3297 if (current_function_check_memory_usage && ! in_check_memory_usage)
3299 in_check_memory_usage = 1;
3301 target = get_push_address (GET_MODE_SIZE (mode));
3303 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3304 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3307 GEN_INT (GET_MODE_SIZE (mode)),
3308 TYPE_MODE (sizetype));
3310 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3312 GEN_INT (GET_MODE_SIZE (mode)),
3313 TYPE_MODE (sizetype),
3314 GEN_INT (MEMORY_USE_RW),
3315 TYPE_MODE (integer_type_node));
3316 in_check_memory_usage = 0;
3321 /* If part should go in registers, copy that part
3322 into the appropriate registers. Do this now, at the end,
3323 since mem-to-mem copies above may do function calls. */
3324 if (partial > 0 && reg != 0)
3326 /* Handle calls that pass values in multiple non-contiguous locations.
3327 The Irix 6 ABI has examples of this. */
3328 if (GET_CODE (reg) == PARALLEL)
3329 emit_group_load (reg, x, -1, align); /* ??? size? */
3331 move_block_to_reg (REGNO (reg), x, partial, mode);
3334 if (extra && args_addr == 0 && where_pad == stack_direction)
3335 anti_adjust_stack (GEN_INT (extra));
3338 /* Expand an assignment that stores the value of FROM into TO.
3339 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3340 (This may contain a QUEUED rtx;
3341 if the value is constant, this rtx is a constant.)
3342 Otherwise, the returned value is NULL_RTX.
3344 SUGGEST_REG is no longer actually used.
3345 It used to mean, copy the value through a register
3346 and return that register, if that is possible.
3347 We now use WANT_VALUE to decide whether to do this. */
3350 expand_assignment (to, from, want_value, suggest_reg)
3355 register rtx to_rtx = 0;
3358 /* Don't crash if the lhs of the assignment was erroneous. */
3360 if (TREE_CODE (to) == ERROR_MARK)
3362 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3363 return want_value ? result : NULL_RTX;
3366 /* Assignment of a structure component needs special treatment
3367 if the structure component's rtx is not simply a MEM.
3368 Assignment of an array element at a constant index, and assignment of
3369 an array element in an unaligned packed structure field, has the same
3372 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3373 || TREE_CODE (to) == ARRAY_REF)
3375 enum machine_mode mode1;
3385 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3386 &unsignedp, &volatilep, &alignment);
3388 /* If we are going to use store_bit_field and extract_bit_field,
3389 make sure to_rtx will be safe for multiple use. */
3391 if (mode1 == VOIDmode && want_value)
3392 tem = stabilize_reference (tem);
3394 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3397 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3399 if (GET_CODE (to_rtx) != MEM)
3402 if (GET_MODE (offset_rtx) != ptr_mode)
3404 #ifdef POINTERS_EXTEND_UNSIGNED
3405 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3407 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3411 /* A constant address in TO_RTX can have VOIDmode, we must not try
3412 to call force_reg for that case. Avoid that case. */
3413 if (GET_CODE (to_rtx) == MEM
3414 && GET_MODE (to_rtx) == BLKmode
3415 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3417 && (bitpos % bitsize) == 0
3418 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3419 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3421 rtx temp = change_address (to_rtx, mode1,
3422 plus_constant (XEXP (to_rtx, 0),
3425 if (GET_CODE (XEXP (temp, 0)) == REG)
3428 to_rtx = change_address (to_rtx, mode1,
3429 force_reg (GET_MODE (XEXP (temp, 0)),
3434 to_rtx = change_address (to_rtx, VOIDmode,
3435 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3436 force_reg (ptr_mode, offset_rtx)));
3440 if (GET_CODE (to_rtx) == MEM)
3442 /* When the offset is zero, to_rtx is the address of the
3443 structure we are storing into, and hence may be shared.
3444 We must make a new MEM before setting the volatile bit. */
3446 to_rtx = copy_rtx (to_rtx);
3448 MEM_VOLATILE_P (to_rtx) = 1;
3450 #if 0 /* This was turned off because, when a field is volatile
3451 in an object which is not volatile, the object may be in a register,
3452 and then we would abort over here. */
3458 if (TREE_CODE (to) == COMPONENT_REF
3459 && TREE_READONLY (TREE_OPERAND (to, 1)))
3462 to_rtx = copy_rtx (to_rtx);
3464 RTX_UNCHANGING_P (to_rtx) = 1;
3467 /* Check the access. */
3468 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3473 enum machine_mode best_mode;
3475 best_mode = get_best_mode (bitsize, bitpos,
3476 TYPE_ALIGN (TREE_TYPE (tem)),
3478 if (best_mode == VOIDmode)
3481 best_mode_size = GET_MODE_BITSIZE (best_mode);
3482 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3483 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3484 size *= GET_MODE_SIZE (best_mode);
3486 /* Check the access right of the pointer. */
3488 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3490 GEN_INT (size), TYPE_MODE (sizetype),
3491 GEN_INT (MEMORY_USE_WO),
3492 TYPE_MODE (integer_type_node));
3495 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3497 /* Spurious cast makes HPUX compiler happy. */
3498 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3501 /* Required alignment of containing datum. */
3503 int_size_in_bytes (TREE_TYPE (tem)),
3504 get_alias_set (to));
3505 preserve_temp_slots (result);
3509 /* If the value is meaningful, convert RESULT to the proper mode.
3510 Otherwise, return nothing. */
3511 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3512 TYPE_MODE (TREE_TYPE (from)),
3514 TREE_UNSIGNED (TREE_TYPE (to)))
3518 /* If the rhs is a function call and its value is not an aggregate,
3519 call the function before we start to compute the lhs.
3520 This is needed for correct code for cases such as
3521 val = setjmp (buf) on machines where reference to val
3522 requires loading up part of an address in a separate insn.
3524 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3525 a promoted variable where the zero- or sign- extension needs to be done.
3526 Handling this in the normal way is safe because no computation is done
3528 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3529 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3530 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3535 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3537 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3539 /* Handle calls that return values in multiple non-contiguous locations.
3540 The Irix 6 ABI has examples of this. */
3541 if (GET_CODE (to_rtx) == PARALLEL)
3542 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3543 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3544 else if (GET_MODE (to_rtx) == BLKmode)
3545 emit_block_move (to_rtx, value, expr_size (from),
3546 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3549 #ifdef POINTERS_EXTEND_UNSIGNED
3550 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3551 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3552 value = convert_memory_address (GET_MODE (to_rtx), value);
3554 emit_move_insn (to_rtx, value);
3556 preserve_temp_slots (to_rtx);
3559 return want_value ? to_rtx : NULL_RTX;
3562 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3563 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3567 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3568 if (GET_CODE (to_rtx) == MEM)
3569 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3572 /* Don't move directly into a return register. */
3573 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3578 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3579 emit_move_insn (to_rtx, temp);
3580 preserve_temp_slots (to_rtx);
3583 return want_value ? to_rtx : NULL_RTX;
3586 /* In case we are returning the contents of an object which overlaps
3587 the place the value is being stored, use a safe function when copying
3588 a value through a pointer into a structure value return block. */
3589 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3590 && current_function_returns_struct
3591 && !current_function_returns_pcc_struct)
3596 size = expr_size (from);
3597 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3598 EXPAND_MEMORY_USE_DONT);
3600 /* Copy the rights of the bitmap. */
3601 if (current_function_check_memory_usage)
3602 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3603 XEXP (to_rtx, 0), Pmode,
3604 XEXP (from_rtx, 0), Pmode,
3605 convert_to_mode (TYPE_MODE (sizetype),
3606 size, TREE_UNSIGNED (sizetype)),
3607 TYPE_MODE (sizetype));
3609 #ifdef TARGET_MEM_FUNCTIONS
3610 emit_library_call (memcpy_libfunc, 0,
3611 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3612 XEXP (from_rtx, 0), Pmode,
3613 convert_to_mode (TYPE_MODE (sizetype),
3614 size, TREE_UNSIGNED (sizetype)),
3615 TYPE_MODE (sizetype));
3617 emit_library_call (bcopy_libfunc, 0,
3618 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3619 XEXP (to_rtx, 0), Pmode,
3620 convert_to_mode (TYPE_MODE (integer_type_node),
3621 size, TREE_UNSIGNED (integer_type_node)),
3622 TYPE_MODE (integer_type_node));
3625 preserve_temp_slots (to_rtx);
3628 return want_value ? to_rtx : NULL_RTX;
3631 /* Compute FROM and store the value in the rtx we got. */
3634 result = store_expr (from, to_rtx, want_value);
3635 preserve_temp_slots (result);
3638 return want_value ? result : NULL_RTX;
3641 /* Generate code for computing expression EXP,
3642 and storing the value into TARGET.
3643 TARGET may contain a QUEUED rtx.
3645 If WANT_VALUE is nonzero, return a copy of the value
3646 not in TARGET, so that we can be sure to use the proper
3647 value in a containing expression even if TARGET has something
3648 else stored in it. If possible, we copy the value through a pseudo
3649 and return that pseudo. Or, if the value is constant, we try to
3650 return the constant. In some cases, we return a pseudo
3651 copied *from* TARGET.
3653 If the mode is BLKmode then we may return TARGET itself.
3654 It turns out that in BLKmode it doesn't cause a problem.
3655 because C has no operators that could combine two different
3656 assignments into the same BLKmode object with different values
3657 with no sequence point. Will other languages need this to
3660 If WANT_VALUE is 0, we return NULL, to make sure
3661 to catch quickly any cases where the caller uses the value
3662 and fails to set WANT_VALUE. */
3665 store_expr (exp, target, want_value)
3667 register rtx target;
3671 int dont_return_target = 0;
3673 if (TREE_CODE (exp) == COMPOUND_EXPR)
3675 /* Perform first part of compound expression, then assign from second
3677 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3679 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3681 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3683 /* For conditional expression, get safe form of the target. Then
3684 test the condition, doing the appropriate assignment on either
3685 side. This avoids the creation of unnecessary temporaries.
3686 For non-BLKmode, it is more efficient not to do this. */
3688 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3691 target = protect_from_queue (target, 1);
3693 do_pending_stack_adjust ();
3695 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3696 start_cleanup_deferral ();
3697 store_expr (TREE_OPERAND (exp, 1), target, 0);
3698 end_cleanup_deferral ();
3700 emit_jump_insn (gen_jump (lab2));
3703 start_cleanup_deferral ();
3704 store_expr (TREE_OPERAND (exp, 2), target, 0);
3705 end_cleanup_deferral ();
3710 return want_value ? target : NULL_RTX;
3712 else if (queued_subexp_p (target))
3713 /* If target contains a postincrement, let's not risk
3714 using it as the place to generate the rhs. */
3716 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3718 /* Expand EXP into a new pseudo. */
3719 temp = gen_reg_rtx (GET_MODE (target));
3720 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3723 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3725 /* If target is volatile, ANSI requires accessing the value
3726 *from* the target, if it is accessed. So make that happen.
3727 In no case return the target itself. */
3728 if (! MEM_VOLATILE_P (target) && want_value)
3729 dont_return_target = 1;
3731 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3732 && GET_MODE (target) != BLKmode)
3733 /* If target is in memory and caller wants value in a register instead,
3734 arrange that. Pass TARGET as target for expand_expr so that,
3735 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3736 We know expand_expr will not use the target in that case.
3737 Don't do this if TARGET is volatile because we are supposed
3738 to write it and then read it. */
3740 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3741 GET_MODE (target), 0);
3742 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3743 temp = copy_to_reg (temp);
3744 dont_return_target = 1;
3746 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3747 /* If this is an scalar in a register that is stored in a wider mode
3748 than the declared mode, compute the result into its declared mode
3749 and then convert to the wider mode. Our value is the computed
3752 /* If we don't want a value, we can do the conversion inside EXP,
3753 which will often result in some optimizations. Do the conversion
3754 in two steps: first change the signedness, if needed, then
3755 the extend. But don't do this if the type of EXP is a subtype
3756 of something else since then the conversion might involve
3757 more than just converting modes. */
3758 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3759 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3761 if (TREE_UNSIGNED (TREE_TYPE (exp))
3762 != SUBREG_PROMOTED_UNSIGNED_P (target))
3765 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3769 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3770 SUBREG_PROMOTED_UNSIGNED_P (target)),
3774 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3776 /* If TEMP is a volatile MEM and we want a result value, make
3777 the access now so it gets done only once. Likewise if
3778 it contains TARGET. */
3779 if (GET_CODE (temp) == MEM && want_value
3780 && (MEM_VOLATILE_P (temp)
3781 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3782 temp = copy_to_reg (temp);
3784 /* If TEMP is a VOIDmode constant, use convert_modes to make
3785 sure that we properly convert it. */
3786 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3787 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3788 TYPE_MODE (TREE_TYPE (exp)), temp,
3789 SUBREG_PROMOTED_UNSIGNED_P (target));
3791 convert_move (SUBREG_REG (target), temp,
3792 SUBREG_PROMOTED_UNSIGNED_P (target));
3793 return want_value ? temp : NULL_RTX;
3797 temp = expand_expr (exp, target, GET_MODE (target), 0);
3798 /* Return TARGET if it's a specified hardware register.
3799 If TARGET is a volatile mem ref, either return TARGET
3800 or return a reg copied *from* TARGET; ANSI requires this.
3802 Otherwise, if TEMP is not TARGET, return TEMP
3803 if it is constant (for efficiency),
3804 or if we really want the correct value. */
3805 if (!(target && GET_CODE (target) == REG
3806 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3807 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3808 && ! rtx_equal_p (temp, target)
3809 && (CONSTANT_P (temp) || want_value))
3810 dont_return_target = 1;
3813 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3814 the same as that of TARGET, adjust the constant. This is needed, for
3815 example, in case it is a CONST_DOUBLE and we want only a word-sized
3817 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3818 && TREE_CODE (exp) != ERROR_MARK
3819 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3820 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3821 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3823 if (current_function_check_memory_usage
3824 && GET_CODE (target) == MEM
3825 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3827 if (GET_CODE (temp) == MEM)
3828 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3829 XEXP (target, 0), Pmode,
3830 XEXP (temp, 0), Pmode,
3831 expr_size (exp), TYPE_MODE (sizetype));
3833 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3834 XEXP (target, 0), Pmode,
3835 expr_size (exp), TYPE_MODE (sizetype),
3836 GEN_INT (MEMORY_USE_WO),
3837 TYPE_MODE (integer_type_node));
3840 /* If value was not generated in the target, store it there.
3841 Convert the value to TARGET's type first if nec. */
3842 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3843 one or both of them are volatile memory refs, we have to distinguish
3845 - expand_expr has used TARGET. In this case, we must not generate
3846 another copy. This can be detected by TARGET being equal according
3848 - expand_expr has not used TARGET - that means that the source just
3849 happens to have the same RTX form. Since temp will have been created
3850 by expand_expr, it will compare unequal according to == .
3851 We must generate a copy in this case, to reach the correct number
3852 of volatile memory references. */
3854 if ((! rtx_equal_p (temp, target)
3855 || (temp != target && (side_effects_p (temp)
3856 || side_effects_p (target))))
3857 && TREE_CODE (exp) != ERROR_MARK)
3859 target = protect_from_queue (target, 1);
3860 if (GET_MODE (temp) != GET_MODE (target)
3861 && GET_MODE (temp) != VOIDmode)
3863 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3864 if (dont_return_target)
3866 /* In this case, we will return TEMP,
3867 so make sure it has the proper mode.
3868 But don't forget to store the value into TARGET. */
3869 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3870 emit_move_insn (target, temp);
3873 convert_move (target, temp, unsignedp);
3876 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3878 /* Handle copying a string constant into an array.
3879 The string constant may be shorter than the array.
3880 So copy just the string's actual length, and clear the rest. */
3884 /* Get the size of the data type of the string,
3885 which is actually the size of the target. */
3886 size = expr_size (exp);
3887 if (GET_CODE (size) == CONST_INT
3888 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3889 emit_block_move (target, temp, size,
3890 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3893 /* Compute the size of the data to copy from the string. */
3895 = size_binop (MIN_EXPR,
3896 make_tree (sizetype, size),
3898 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3899 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3903 /* Copy that much. */
3904 emit_block_move (target, temp, copy_size_rtx,
3905 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3907 /* Figure out how much is left in TARGET that we have to clear.
3908 Do all calculations in ptr_mode. */
3910 addr = XEXP (target, 0);
3911 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3913 if (GET_CODE (copy_size_rtx) == CONST_INT)
3915 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3916 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3920 addr = force_reg (ptr_mode, addr);
3921 addr = expand_binop (ptr_mode, add_optab, addr,
3922 copy_size_rtx, NULL_RTX, 0,
3925 size = expand_binop (ptr_mode, sub_optab, size,
3926 copy_size_rtx, NULL_RTX, 0,
3929 label = gen_label_rtx ();
3930 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3931 GET_MODE (size), 0, 0, label);
3934 if (size != const0_rtx)
3936 /* Be sure we can write on ADDR. */
3937 if (current_function_check_memory_usage)
3938 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3940 size, TYPE_MODE (sizetype),
3941 GEN_INT (MEMORY_USE_WO),
3942 TYPE_MODE (integer_type_node));
3943 #ifdef TARGET_MEM_FUNCTIONS
3944 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3946 const0_rtx, TYPE_MODE (integer_type_node),
3947 convert_to_mode (TYPE_MODE (sizetype),
3949 TREE_UNSIGNED (sizetype)),
3950 TYPE_MODE (sizetype));
3952 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3954 convert_to_mode (TYPE_MODE (integer_type_node),
3956 TREE_UNSIGNED (integer_type_node)),
3957 TYPE_MODE (integer_type_node));
3965 /* Handle calls that return values in multiple non-contiguous locations.
3966 The Irix 6 ABI has examples of this. */
3967 else if (GET_CODE (target) == PARALLEL)
3968 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3969 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3970 else if (GET_MODE (temp) == BLKmode)
3971 emit_block_move (target, temp, expr_size (exp),
3972 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3974 emit_move_insn (target, temp);
3977 /* If we don't want a value, return NULL_RTX. */
3981 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3982 ??? The latter test doesn't seem to make sense. */
3983 else if (dont_return_target && GET_CODE (temp) != MEM)
3986 /* Return TARGET itself if it is a hard register. */
3987 else if (want_value && GET_MODE (target) != BLKmode
3988 && ! (GET_CODE (target) == REG
3989 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3990 return copy_to_reg (target);
3996 /* Return 1 if EXP just contains zeros. */
4004 switch (TREE_CODE (exp))
4008 case NON_LVALUE_EXPR:
4009 return is_zeros_p (TREE_OPERAND (exp, 0));
4012 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
4016 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4019 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4022 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4023 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4024 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4025 if (! is_zeros_p (TREE_VALUE (elt)))
4035 /* Return 1 if EXP contains mostly (3/4) zeros. */
4038 mostly_zeros_p (exp)
4041 if (TREE_CODE (exp) == CONSTRUCTOR)
4043 int elts = 0, zeros = 0;
4044 tree elt = CONSTRUCTOR_ELTS (exp);
4045 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4047 /* If there are no ranges of true bits, it is all zero. */
4048 return elt == NULL_TREE;
4050 for (; elt; elt = TREE_CHAIN (elt))
4052 /* We do not handle the case where the index is a RANGE_EXPR,
4053 so the statistic will be somewhat inaccurate.
4054 We do make a more accurate count in store_constructor itself,
4055 so since this function is only used for nested array elements,
4056 this should be close enough. */
4057 if (mostly_zeros_p (TREE_VALUE (elt)))
4062 return 4 * zeros >= 3 * elts;
4065 return is_zeros_p (exp);
4068 /* Helper function for store_constructor.
4069 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4070 TYPE is the type of the CONSTRUCTOR, not the element type.
4071 CLEARED is as for store_constructor.
4073 This provides a recursive shortcut back to store_constructor when it isn't
4074 necessary to go through store_field. This is so that we can pass through
4075 the cleared field to let store_constructor know that we may not have to
4076 clear a substructure if the outer structure has already been cleared. */
4079 store_constructor_field (target, bitsize, bitpos,
4080 mode, exp, type, cleared)
4082 int bitsize, bitpos;
4083 enum machine_mode mode;
4087 if (TREE_CODE (exp) == CONSTRUCTOR
4088 && bitpos % BITS_PER_UNIT == 0
4089 /* If we have a non-zero bitpos for a register target, then we just
4090 let store_field do the bitfield handling. This is unlikely to
4091 generate unnecessary clear instructions anyways. */
4092 && (bitpos == 0 || GET_CODE (target) == MEM))
4095 target = change_address (target, VOIDmode,
4096 plus_constant (XEXP (target, 0),
4097 bitpos / BITS_PER_UNIT));
4098 store_constructor (exp, target, cleared);
4101 store_field (target, bitsize, bitpos, mode, exp,
4102 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
4103 int_size_in_bytes (type), 0);
4106 /* Store the value of constructor EXP into the rtx TARGET.
4107 TARGET is either a REG or a MEM.
4108 CLEARED is true if TARGET is known to have been zero'd. */
4111 store_constructor (exp, target, cleared)
4116 tree type = TREE_TYPE (exp);
4117 rtx exp_size = expr_size (exp);
4119 /* We know our target cannot conflict, since safe_from_p has been called. */
4121 /* Don't try copying piece by piece into a hard register
4122 since that is vulnerable to being clobbered by EXP.
4123 Instead, construct in a pseudo register and then copy it all. */
4124 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4126 rtx temp = gen_reg_rtx (GET_MODE (target));
4127 store_constructor (exp, temp, 0);
4128 emit_move_insn (target, temp);
4133 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4134 || TREE_CODE (type) == QUAL_UNION_TYPE)
4138 /* Inform later passes that the whole union value is dead. */
4139 if (TREE_CODE (type) == UNION_TYPE
4140 || TREE_CODE (type) == QUAL_UNION_TYPE)
4141 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4143 /* If we are building a static constructor into a register,
4144 set the initial value as zero so we can fold the value into
4145 a constant. But if more than one register is involved,
4146 this probably loses. */
4147 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4148 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4151 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4156 /* If the constructor has fewer fields than the structure
4157 or if we are initializing the structure to mostly zeros,
4158 clear the whole structure first. */
4159 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4160 != list_length (TYPE_FIELDS (type)))
4161 || mostly_zeros_p (exp))
4164 clear_storage (target, expr_size (exp),
4165 TYPE_ALIGN (type) / BITS_PER_UNIT);
4170 /* Inform later passes that the old value is dead. */
4171 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4173 /* Store each element of the constructor into
4174 the corresponding field of TARGET. */
4176 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4178 register tree field = TREE_PURPOSE (elt);
4179 tree value = TREE_VALUE (elt);
4180 register enum machine_mode mode;
4184 tree pos, constant = 0, offset = 0;
4185 rtx to_rtx = target;
4187 /* Just ignore missing fields.
4188 We cleared the whole structure, above,
4189 if any fields are missing. */
4193 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4196 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4197 unsignedp = TREE_UNSIGNED (field);
4198 mode = DECL_MODE (field);
4199 if (DECL_BIT_FIELD (field))
4202 pos = DECL_FIELD_BITPOS (field);
4203 if (TREE_CODE (pos) == INTEGER_CST)
4205 else if (TREE_CODE (pos) == PLUS_EXPR
4206 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4207 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4212 bitpos = TREE_INT_CST_LOW (constant);
4218 if (contains_placeholder_p (offset))
4219 offset = build (WITH_RECORD_EXPR, sizetype,
4220 offset, make_tree (TREE_TYPE (exp), target));
4222 offset = size_binop (FLOOR_DIV_EXPR, offset,
4223 size_int (BITS_PER_UNIT));
4225 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4226 if (GET_CODE (to_rtx) != MEM)
4229 if (GET_MODE (offset_rtx) != ptr_mode)
4231 #ifdef POINTERS_EXTEND_UNSIGNED
4232 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4234 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4239 = change_address (to_rtx, VOIDmode,
4240 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4241 force_reg (ptr_mode, offset_rtx)));
4243 if (TREE_READONLY (field))
4245 if (GET_CODE (to_rtx) == MEM)
4246 to_rtx = copy_rtx (to_rtx);
4248 RTX_UNCHANGING_P (to_rtx) = 1;
4251 #ifdef WORD_REGISTER_OPERATIONS
4252 /* If this initializes a field that is smaller than a word, at the
4253 start of a word, try to widen it to a full word.
4254 This special case allows us to output C++ member function
4255 initializations in a form that the optimizers can understand. */
4257 && GET_CODE (target) == REG
4258 && bitsize < BITS_PER_WORD
4259 && bitpos % BITS_PER_WORD == 0
4260 && GET_MODE_CLASS (mode) == MODE_INT
4261 && TREE_CODE (value) == INTEGER_CST
4262 && GET_CODE (exp_size) == CONST_INT
4263 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4265 tree type = TREE_TYPE (value);
4266 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4268 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4269 value = convert (type, value);
4271 if (BYTES_BIG_ENDIAN)
4273 = fold (build (LSHIFT_EXPR, type, value,
4274 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4275 bitsize = BITS_PER_WORD;
4279 store_constructor_field (to_rtx, bitsize, bitpos,
4280 mode, value, type, cleared);
4283 else if (TREE_CODE (type) == ARRAY_TYPE)
4288 tree domain = TYPE_DOMAIN (type);
4289 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4290 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4291 tree elttype = TREE_TYPE (type);
4293 /* If the constructor has fewer elements than the array,
4294 clear the whole array first. Similarly if this is
4295 static constructor of a non-BLKmode object. */
4296 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4300 HOST_WIDE_INT count = 0, zero_count = 0;
4302 /* This loop is a more accurate version of the loop in
4303 mostly_zeros_p (it handles RANGE_EXPR in an index).
4304 It is also needed to check for missing elements. */
4305 for (elt = CONSTRUCTOR_ELTS (exp);
4307 elt = TREE_CHAIN (elt))
4309 tree index = TREE_PURPOSE (elt);
4310 HOST_WIDE_INT this_node_count;
4311 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4313 tree lo_index = TREE_OPERAND (index, 0);
4314 tree hi_index = TREE_OPERAND (index, 1);
4315 if (TREE_CODE (lo_index) != INTEGER_CST
4316 || TREE_CODE (hi_index) != INTEGER_CST)
4321 this_node_count = TREE_INT_CST_LOW (hi_index)
4322 - TREE_INT_CST_LOW (lo_index) + 1;
4325 this_node_count = 1;
4326 count += this_node_count;
4327 if (mostly_zeros_p (TREE_VALUE (elt)))
4328 zero_count += this_node_count;
4330 /* Clear the entire array first if there are any missing elements,
4331 or if the incidence of zero elements is >= 75%. */
4332 if (count < maxelt - minelt + 1
4333 || 4 * zero_count >= 3 * count)
4339 clear_storage (target, expr_size (exp),
4340 TYPE_ALIGN (type) / BITS_PER_UNIT);
4344 /* Inform later passes that the old value is dead. */
4345 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4347 /* Store each element of the constructor into
4348 the corresponding element of TARGET, determined
4349 by counting the elements. */
4350 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4352 elt = TREE_CHAIN (elt), i++)
4354 register enum machine_mode mode;
4358 tree value = TREE_VALUE (elt);
4359 tree index = TREE_PURPOSE (elt);
4360 rtx xtarget = target;
4362 if (cleared && is_zeros_p (value))
4365 mode = TYPE_MODE (elttype);
4366 bitsize = GET_MODE_BITSIZE (mode);
4367 unsignedp = TREE_UNSIGNED (elttype);
4369 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4371 tree lo_index = TREE_OPERAND (index, 0);
4372 tree hi_index = TREE_OPERAND (index, 1);
4373 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4374 struct nesting *loop;
4375 HOST_WIDE_INT lo, hi, count;
4378 /* If the range is constant and "small", unroll the loop. */
4379 if (TREE_CODE (lo_index) == INTEGER_CST
4380 && TREE_CODE (hi_index) == INTEGER_CST
4381 && (lo = TREE_INT_CST_LOW (lo_index),
4382 hi = TREE_INT_CST_LOW (hi_index),
4383 count = hi - lo + 1,
4384 (GET_CODE (target) != MEM
4386 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4387 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4390 lo -= minelt; hi -= minelt;
4391 for (; lo <= hi; lo++)
4393 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4394 store_constructor_field (target, bitsize, bitpos,
4395 mode, value, type, cleared);
4400 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4401 loop_top = gen_label_rtx ();
4402 loop_end = gen_label_rtx ();
4404 unsignedp = TREE_UNSIGNED (domain);
4406 index = build_decl (VAR_DECL, NULL_TREE, domain);
4408 DECL_RTL (index) = index_r
4409 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4412 if (TREE_CODE (value) == SAVE_EXPR
4413 && SAVE_EXPR_RTL (value) == 0)
4415 /* Make sure value gets expanded once before the
4417 expand_expr (value, const0_rtx, VOIDmode, 0);
4420 store_expr (lo_index, index_r, 0);
4421 loop = expand_start_loop (0);
4423 /* Assign value to element index. */
4424 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4425 size_int (BITS_PER_UNIT));
4426 position = size_binop (MULT_EXPR,
4427 size_binop (MINUS_EXPR, index,
4428 TYPE_MIN_VALUE (domain)),
4430 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4431 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4432 xtarget = change_address (target, mode, addr);
4433 if (TREE_CODE (value) == CONSTRUCTOR)
4434 store_constructor (value, xtarget, cleared);
4436 store_expr (value, xtarget, 0);
4438 expand_exit_loop_if_false (loop,
4439 build (LT_EXPR, integer_type_node,
4442 expand_increment (build (PREINCREMENT_EXPR,
4444 index, integer_one_node), 0, 0);
4446 emit_label (loop_end);
4448 /* Needed by stupid register allocation. to extend the
4449 lifetime of pseudo-regs used by target past the end
4451 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4454 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4455 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4461 index = size_int (i);
4464 index = size_binop (MINUS_EXPR, index,
4465 TYPE_MIN_VALUE (domain));
4466 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4467 size_int (BITS_PER_UNIT));
4468 position = size_binop (MULT_EXPR, index, position);
4469 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4470 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4471 xtarget = change_address (target, mode, addr);
4472 store_expr (value, xtarget, 0);
4477 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4478 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4480 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4481 store_constructor_field (target, bitsize, bitpos,
4482 mode, value, type, cleared);
4486 /* set constructor assignments */
4487 else if (TREE_CODE (type) == SET_TYPE)
4489 tree elt = CONSTRUCTOR_ELTS (exp);
4490 int nbytes = int_size_in_bytes (type), nbits;
4491 tree domain = TYPE_DOMAIN (type);
4492 tree domain_min, domain_max, bitlength;
4494 /* The default implementation strategy is to extract the constant
4495 parts of the constructor, use that to initialize the target,
4496 and then "or" in whatever non-constant ranges we need in addition.
4498 If a large set is all zero or all ones, it is
4499 probably better to set it using memset (if available) or bzero.
4500 Also, if a large set has just a single range, it may also be
4501 better to first clear all the first clear the set (using
4502 bzero/memset), and set the bits we want. */
4504 /* Check for all zeros. */
4505 if (elt == NULL_TREE)
4508 clear_storage (target, expr_size (exp),
4509 TYPE_ALIGN (type) / BITS_PER_UNIT);
4513 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4514 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4515 bitlength = size_binop (PLUS_EXPR,
4516 size_binop (MINUS_EXPR, domain_max, domain_min),
4519 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4521 nbits = TREE_INT_CST_LOW (bitlength);
4523 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4524 are "complicated" (more than one range), initialize (the
4525 constant parts) by copying from a constant. */
4526 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4527 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4529 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4530 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4531 char *bit_buffer = (char *) alloca (nbits);
4532 HOST_WIDE_INT word = 0;
4535 int offset = 0; /* In bytes from beginning of set. */
4536 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4539 if (bit_buffer[ibit])
4541 if (BYTES_BIG_ENDIAN)
4542 word |= (1 << (set_word_size - 1 - bit_pos));
4544 word |= 1 << bit_pos;
4547 if (bit_pos >= set_word_size || ibit == nbits)
4549 if (word != 0 || ! cleared)
4551 rtx datum = GEN_INT (word);
4553 /* The assumption here is that it is safe to use
4554 XEXP if the set is multi-word, but not if
4555 it's single-word. */
4556 if (GET_CODE (target) == MEM)
4558 to_rtx = plus_constant (XEXP (target, 0), offset);
4559 to_rtx = change_address (target, mode, to_rtx);
4561 else if (offset == 0)
4565 emit_move_insn (to_rtx, datum);
4571 offset += set_word_size / BITS_PER_UNIT;
4577 /* Don't bother clearing storage if the set is all ones. */
4578 if (TREE_CHAIN (elt) != NULL_TREE
4579 || (TREE_PURPOSE (elt) == NULL_TREE
4581 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4582 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4583 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4584 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4586 clear_storage (target, expr_size (exp),
4587 TYPE_ALIGN (type) / BITS_PER_UNIT);
4590 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4592 /* start of range of element or NULL */
4593 tree startbit = TREE_PURPOSE (elt);
4594 /* end of range of element, or element value */
4595 tree endbit = TREE_VALUE (elt);
4596 #ifdef TARGET_MEM_FUNCTIONS
4597 HOST_WIDE_INT startb, endb;
4599 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4601 bitlength_rtx = expand_expr (bitlength,
4602 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4604 /* handle non-range tuple element like [ expr ] */
4605 if (startbit == NULL_TREE)
4607 startbit = save_expr (endbit);
4610 startbit = convert (sizetype, startbit);
4611 endbit = convert (sizetype, endbit);
4612 if (! integer_zerop (domain_min))
4614 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4615 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4617 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4618 EXPAND_CONST_ADDRESS);
4619 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4620 EXPAND_CONST_ADDRESS);
4624 targetx = assign_stack_temp (GET_MODE (target),
4625 GET_MODE_SIZE (GET_MODE (target)),
4627 emit_move_insn (targetx, target);
4629 else if (GET_CODE (target) == MEM)
4634 #ifdef TARGET_MEM_FUNCTIONS
4635 /* Optimization: If startbit and endbit are
4636 constants divisible by BITS_PER_UNIT,
4637 call memset instead. */
4638 if (TREE_CODE (startbit) == INTEGER_CST
4639 && TREE_CODE (endbit) == INTEGER_CST
4640 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4641 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4643 emit_library_call (memset_libfunc, 0,
4645 plus_constant (XEXP (targetx, 0),
4646 startb / BITS_PER_UNIT),
4648 constm1_rtx, TYPE_MODE (integer_type_node),
4649 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4650 TYPE_MODE (sizetype));
4655 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4656 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4657 bitlength_rtx, TYPE_MODE (sizetype),
4658 startbit_rtx, TYPE_MODE (sizetype),
4659 endbit_rtx, TYPE_MODE (sizetype));
4662 emit_move_insn (target, targetx);
4670 /* Store the value of EXP (an expression tree)
4671 into a subfield of TARGET which has mode MODE and occupies
4672 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4673 If MODE is VOIDmode, it means that we are storing into a bit-field.
4675 If VALUE_MODE is VOIDmode, return nothing in particular.
4676 UNSIGNEDP is not used in this case.
4678 Otherwise, return an rtx for the value stored. This rtx
4679 has mode VALUE_MODE if that is convenient to do.
4680 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4682 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4683 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4685 ALIAS_SET is the alias set for the destination. This value will
4686 (in general) be different from that for TARGET, since TARGET is a
4687 reference to the containing structure. */
4690 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4691 unsignedp, align, total_size, alias_set)
4693 int bitsize, bitpos;
4694 enum machine_mode mode;
4696 enum machine_mode value_mode;
4702 HOST_WIDE_INT width_mask = 0;
4704 if (TREE_CODE (exp) == ERROR_MARK)
4707 if (bitsize < HOST_BITS_PER_WIDE_INT)
4708 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4710 /* If we are storing into an unaligned field of an aligned union that is
4711 in a register, we may have the mode of TARGET being an integer mode but
4712 MODE == BLKmode. In that case, get an aligned object whose size and
4713 alignment are the same as TARGET and store TARGET into it (we can avoid
4714 the store if the field being stored is the entire width of TARGET). Then
4715 call ourselves recursively to store the field into a BLKmode version of
4716 that object. Finally, load from the object into TARGET. This is not
4717 very efficient in general, but should only be slightly more expensive
4718 than the otherwise-required unaligned accesses. Perhaps this can be
4719 cleaned up later. */
4722 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4724 rtx object = assign_stack_temp (GET_MODE (target),
4725 GET_MODE_SIZE (GET_MODE (target)), 0);
4726 rtx blk_object = copy_rtx (object);
4728 MEM_SET_IN_STRUCT_P (object, 1);
4729 MEM_SET_IN_STRUCT_P (blk_object, 1);
4730 PUT_MODE (blk_object, BLKmode);
4732 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4733 emit_move_insn (object, target);
4735 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4736 align, total_size, alias_set);
4738 /* Even though we aren't returning target, we need to
4739 give it the updated value. */
4740 emit_move_insn (target, object);
4745 /* If the structure is in a register or if the component
4746 is a bit field, we cannot use addressing to access it.
4747 Use bit-field techniques or SUBREG to store in it. */
4749 if (mode == VOIDmode
4750 || (mode != BLKmode && ! direct_store[(int) mode]
4751 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4752 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4753 || GET_CODE (target) == REG
4754 || GET_CODE (target) == SUBREG
4755 /* If the field isn't aligned enough to store as an ordinary memref,
4756 store it as a bit field. */
4757 || (SLOW_UNALIGNED_ACCESS
4758 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4759 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4761 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4763 /* If BITSIZE is narrower than the size of the type of EXP
4764 we will be narrowing TEMP. Normally, what's wanted are the
4765 low-order bits. However, if EXP's type is a record and this is
4766 big-endian machine, we want the upper BITSIZE bits. */
4767 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4768 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4769 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4770 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4771 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4775 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4777 if (mode != VOIDmode && mode != BLKmode
4778 && mode != TYPE_MODE (TREE_TYPE (exp)))
4779 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4781 /* If the modes of TARGET and TEMP are both BLKmode, both
4782 must be in memory and BITPOS must be aligned on a byte
4783 boundary. If so, we simply do a block copy. */
4784 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4786 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4787 || bitpos % BITS_PER_UNIT != 0)
4790 target = change_address (target, VOIDmode,
4791 plus_constant (XEXP (target, 0),
4792 bitpos / BITS_PER_UNIT));
4794 emit_block_move (target, temp,
4795 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4799 return value_mode == VOIDmode ? const0_rtx : target;
4802 /* Store the value in the bitfield. */
4803 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4804 if (value_mode != VOIDmode)
4806 /* The caller wants an rtx for the value. */
4807 /* If possible, avoid refetching from the bitfield itself. */
4809 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4812 enum machine_mode tmode;
4815 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4816 tmode = GET_MODE (temp);
4817 if (tmode == VOIDmode)
4819 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4820 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4821 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4823 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4824 NULL_RTX, value_mode, 0, align,
4831 rtx addr = XEXP (target, 0);
4834 /* If a value is wanted, it must be the lhs;
4835 so make the address stable for multiple use. */
4837 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4838 && ! CONSTANT_ADDRESS_P (addr)
4839 /* A frame-pointer reference is already stable. */
4840 && ! (GET_CODE (addr) == PLUS
4841 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4842 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4843 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4844 addr = copy_to_reg (addr);
4846 /* Now build a reference to just the desired component. */
4848 to_rtx = copy_rtx (change_address (target, mode,
4849 plus_constant (addr,
4851 / BITS_PER_UNIT))));
4852 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4853 MEM_ALIAS_SET (to_rtx) = alias_set;
4855 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4859 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4860 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4861 ARRAY_REFs and find the ultimate containing object, which we return.
4863 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4864 bit position, and *PUNSIGNEDP to the signedness of the field.
4865 If the position of the field is variable, we store a tree
4866 giving the variable offset (in units) in *POFFSET.
4867 This offset is in addition to the bit position.
4868 If the position is not variable, we store 0 in *POFFSET.
4869 We set *PALIGNMENT to the alignment in bytes of the address that will be
4870 computed. This is the alignment of the thing we return if *POFFSET
4871 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4873 If any of the extraction expressions is volatile,
4874 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4876 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4877 is a mode that can be used to access the field. In that case, *PBITSIZE
4880 If the field describes a variable-sized object, *PMODE is set to
4881 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4882 this case, but the address of the object can be found. */
4885 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4886 punsignedp, pvolatilep, palignment)
4891 enum machine_mode *pmode;
4896 tree orig_exp = exp;
4898 enum machine_mode mode = VOIDmode;
4899 tree offset = integer_zero_node;
4900 unsigned int alignment = BIGGEST_ALIGNMENT;
4902 if (TREE_CODE (exp) == COMPONENT_REF)
4904 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4905 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4906 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4907 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4909 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4911 size_tree = TREE_OPERAND (exp, 1);
4912 *punsignedp = TREE_UNSIGNED (exp);
4916 mode = TYPE_MODE (TREE_TYPE (exp));
4917 if (mode == BLKmode)
4918 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4920 *pbitsize = GET_MODE_BITSIZE (mode);
4921 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4926 if (TREE_CODE (size_tree) != INTEGER_CST)
4927 mode = BLKmode, *pbitsize = -1;
4929 *pbitsize = TREE_INT_CST_LOW (size_tree);
4932 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4933 and find the ultimate containing object. */
4939 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4941 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4942 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4943 : TREE_OPERAND (exp, 2));
4944 tree constant = integer_zero_node, var = pos;
4946 /* If this field hasn't been filled in yet, don't go
4947 past it. This should only happen when folding expressions
4948 made during type construction. */
4952 /* Assume here that the offset is a multiple of a unit.
4953 If not, there should be an explicitly added constant. */
4954 if (TREE_CODE (pos) == PLUS_EXPR
4955 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4956 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4957 else if (TREE_CODE (pos) == INTEGER_CST)
4958 constant = pos, var = integer_zero_node;
4960 *pbitpos += TREE_INT_CST_LOW (constant);
4961 offset = size_binop (PLUS_EXPR, offset,
4962 size_binop (EXACT_DIV_EXPR, var,
4963 size_int (BITS_PER_UNIT)));
4966 else if (TREE_CODE (exp) == ARRAY_REF)
4968 /* This code is based on the code in case ARRAY_REF in expand_expr
4969 below. We assume here that the size of an array element is
4970 always an integral multiple of BITS_PER_UNIT. */
4972 tree index = TREE_OPERAND (exp, 1);
4973 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4975 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4976 tree index_type = TREE_TYPE (index);
4979 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4981 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4983 index_type = TREE_TYPE (index);
4986 /* Optimize the special-case of a zero lower bound.
4988 We convert the low_bound to sizetype to avoid some problems
4989 with constant folding. (E.g. suppose the lower bound is 1,
4990 and its mode is QI. Without the conversion, (ARRAY
4991 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4992 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4994 But sizetype isn't quite right either (especially if
4995 the lowbound is negative). FIXME */
4997 if (! integer_zerop (low_bound))
4998 index = fold (build (MINUS_EXPR, index_type, index,
4999 convert (sizetype, low_bound)));
5001 if (TREE_CODE (index) == INTEGER_CST)
5003 index = convert (sbitsizetype, index);
5004 index_type = TREE_TYPE (index);
5007 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5008 convert (sbitsizetype,
5009 TYPE_SIZE (TREE_TYPE (exp)))));
5011 if (TREE_CODE (xindex) == INTEGER_CST
5012 && TREE_INT_CST_HIGH (xindex) == 0)
5013 *pbitpos += TREE_INT_CST_LOW (xindex);
5016 /* Either the bit offset calculated above is not constant, or
5017 it overflowed. In either case, redo the multiplication
5018 against the size in units. This is especially important
5019 in the non-constant case to avoid a division at runtime. */
5020 xindex = fold (build (MULT_EXPR, ssizetype, index,
5022 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5024 if (contains_placeholder_p (xindex))
5025 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5027 offset = size_binop (PLUS_EXPR, offset, xindex);
5030 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5031 && ! ((TREE_CODE (exp) == NOP_EXPR
5032 || TREE_CODE (exp) == CONVERT_EXPR)
5033 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
5034 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5036 && (TYPE_MODE (TREE_TYPE (exp))
5037 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5040 /* If any reference in the chain is volatile, the effect is volatile. */
5041 if (TREE_THIS_VOLATILE (exp))
5044 /* If the offset is non-constant already, then we can't assume any
5045 alignment more than the alignment here. */
5046 if (! integer_zerop (offset))
5047 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5049 exp = TREE_OPERAND (exp, 0);
5052 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5053 alignment = MIN (alignment, DECL_ALIGN (exp));
5054 else if (TREE_TYPE (exp) != 0)
5055 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5057 if (integer_zerop (offset))
5060 if (offset != 0 && contains_placeholder_p (offset))
5061 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5065 *palignment = alignment / BITS_PER_UNIT;
5069 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5070 static enum memory_use_mode
5071 get_memory_usage_from_modifier (modifier)
5072 enum expand_modifier modifier;
5078 return MEMORY_USE_RO;
5080 case EXPAND_MEMORY_USE_WO:
5081 return MEMORY_USE_WO;
5083 case EXPAND_MEMORY_USE_RW:
5084 return MEMORY_USE_RW;
5086 case EXPAND_MEMORY_USE_DONT:
5087 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5088 MEMORY_USE_DONT, because they are modifiers to a call of
5089 expand_expr in the ADDR_EXPR case of expand_expr. */
5090 case EXPAND_CONST_ADDRESS:
5091 case EXPAND_INITIALIZER:
5092 return MEMORY_USE_DONT;
5093 case EXPAND_MEMORY_USE_BAD:
5099 /* Given an rtx VALUE that may contain additions and multiplications,
5100 return an equivalent value that just refers to a register or memory.
5101 This is done by generating instructions to perform the arithmetic
5102 and returning a pseudo-register containing the value.
5104 The returned value may be a REG, SUBREG, MEM or constant. */
5107 force_operand (value, target)
5110 register optab binoptab = 0;
5111 /* Use a temporary to force order of execution of calls to
5115 /* Use subtarget as the target for operand 0 of a binary operation. */
5116 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5118 /* Check for a PIC address load. */
5120 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5121 && XEXP (value, 0) == pic_offset_table_rtx
5122 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5123 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5124 || GET_CODE (XEXP (value, 1)) == CONST))
5127 subtarget = gen_reg_rtx (GET_MODE (value));
5128 emit_move_insn (subtarget, value);
5132 if (GET_CODE (value) == PLUS)
5133 binoptab = add_optab;
5134 else if (GET_CODE (value) == MINUS)
5135 binoptab = sub_optab;
5136 else if (GET_CODE (value) == MULT)
5138 op2 = XEXP (value, 1);
5139 if (!CONSTANT_P (op2)
5140 && !(GET_CODE (op2) == REG && op2 != subtarget))
5142 tmp = force_operand (XEXP (value, 0), subtarget);
5143 return expand_mult (GET_MODE (value), tmp,
5144 force_operand (op2, NULL_RTX),
5150 op2 = XEXP (value, 1);
5151 if (!CONSTANT_P (op2)
5152 && !(GET_CODE (op2) == REG && op2 != subtarget))
5154 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5156 binoptab = add_optab;
5157 op2 = negate_rtx (GET_MODE (value), op2);
5160 /* Check for an addition with OP2 a constant integer and our first
5161 operand a PLUS of a virtual register and something else. In that
5162 case, we want to emit the sum of the virtual register and the
5163 constant first and then add the other value. This allows virtual
5164 register instantiation to simply modify the constant rather than
5165 creating another one around this addition. */
5166 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5167 && GET_CODE (XEXP (value, 0)) == PLUS
5168 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5169 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5170 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
5171 && (!flag_propolice_protection
5172 || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
5174 rtx temp = expand_binop (GET_MODE (value), binoptab,
5175 XEXP (XEXP (value, 0), 0), op2,
5176 subtarget, 0, OPTAB_LIB_WIDEN);
5177 return expand_binop (GET_MODE (value), binoptab, temp,
5178 force_operand (XEXP (XEXP (value, 0), 1), 0),
5179 target, 0, OPTAB_LIB_WIDEN);
5182 tmp = force_operand (XEXP (value, 0), subtarget);
5183 return expand_binop (GET_MODE (value), binoptab, tmp,
5184 force_operand (op2, NULL_RTX),
5185 target, 0, OPTAB_LIB_WIDEN);
5186 /* We give UNSIGNEDP = 0 to expand_binop
5187 because the only operations we are expanding here are signed ones. */
5192 /* Subroutine of expand_expr:
5193 save the non-copied parts (LIST) of an expr (LHS), and return a list
5194 which can restore these values to their previous values,
5195 should something modify their storage. */
5198 save_noncopied_parts (lhs, list)
5205 for (tail = list; tail; tail = TREE_CHAIN (tail))
5206 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5207 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5210 tree part = TREE_VALUE (tail);
5211 tree part_type = TREE_TYPE (part);
5212 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5213 rtx target = assign_temp (part_type, 0, 1, 1);
5214 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5215 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5216 parts = tree_cons (to_be_saved,
5217 build (RTL_EXPR, part_type, NULL_TREE,
5220 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5225 /* Subroutine of expand_expr:
5226 record the non-copied parts (LIST) of an expr (LHS), and return a list
5227 which specifies the initial values of these parts. */
5230 init_noncopied_parts (lhs, list)
5237 for (tail = list; tail; tail = TREE_CHAIN (tail))
5238 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5239 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5240 else if (TREE_PURPOSE (tail))
5242 tree part = TREE_VALUE (tail);
5243 tree part_type = TREE_TYPE (part);
5244 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5245 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5250 /* Subroutine of expand_expr: return nonzero iff there is no way that
5251 EXP can reference X, which is being modified. TOP_P is nonzero if this
5252 call is going to be used to determine whether we need a temporary
5253 for EXP, as opposed to a recursive call to this function.
5255 It is always safe for this routine to return zero since it merely
5256 searches for optimization opportunities. */
5259 safe_from_p (x, exp, top_p)
5266 static int save_expr_count;
5267 static int save_expr_size = 0;
5268 static tree *save_expr_rewritten;
5269 static tree save_expr_trees[256];
5272 /* If EXP has varying size, we MUST use a target since we currently
5273 have no way of allocating temporaries of variable size
5274 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5275 So we assume here that something at a higher level has prevented a
5276 clash. This is somewhat bogus, but the best we can do. Only
5277 do this when X is BLKmode and when we are at the top level. */
5278 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5279 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5280 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5281 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5282 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5284 && GET_MODE (x) == BLKmode))
5287 if (top_p && save_expr_size == 0)
5291 save_expr_count = 0;
5292 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5293 save_expr_rewritten = &save_expr_trees[0];
5295 rtn = safe_from_p (x, exp, 1);
5297 for (i = 0; i < save_expr_count; ++i)
5299 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5301 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5309 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5310 find the underlying pseudo. */
5311 if (GET_CODE (x) == SUBREG)
5314 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5318 /* If X is a location in the outgoing argument area, it is always safe. */
5319 if (GET_CODE (x) == MEM
5320 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5321 || (GET_CODE (XEXP (x, 0)) == PLUS
5322 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5325 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5328 exp_rtl = DECL_RTL (exp);
5335 if (TREE_CODE (exp) == TREE_LIST)
5336 return ((TREE_VALUE (exp) == 0
5337 || safe_from_p (x, TREE_VALUE (exp), 0))
5338 && (TREE_CHAIN (exp) == 0
5339 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5340 else if (TREE_CODE (exp) == ERROR_MARK)
5341 return 1; /* An already-visited SAVE_EXPR? */
5346 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5350 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5351 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5355 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5356 the expression. If it is set, we conflict iff we are that rtx or
5357 both are in memory. Otherwise, we check all operands of the
5358 expression recursively. */
5360 switch (TREE_CODE (exp))
5363 return (staticp (TREE_OPERAND (exp, 0))
5364 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5365 || TREE_STATIC (exp));
5368 if (GET_CODE (x) == MEM)
5373 exp_rtl = CALL_EXPR_RTL (exp);
5376 /* Assume that the call will clobber all hard registers and
5378 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5379 || GET_CODE (x) == MEM)
5386 /* If a sequence exists, we would have to scan every instruction
5387 in the sequence to see if it was safe. This is probably not
5389 if (RTL_EXPR_SEQUENCE (exp))
5392 exp_rtl = RTL_EXPR_RTL (exp);
5395 case WITH_CLEANUP_EXPR:
5396 exp_rtl = RTL_EXPR_RTL (exp);
5399 case CLEANUP_POINT_EXPR:
5400 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5403 exp_rtl = SAVE_EXPR_RTL (exp);
5407 /* This SAVE_EXPR might appear many times in the top-level
5408 safe_from_p() expression, and if it has a complex
5409 subexpression, examining it multiple times could result
5410 in a combinatorial explosion. E.g. on an Alpha
5411 running at least 200MHz, a Fortran test case compiled with
5412 optimization took about 28 minutes to compile -- even though
5413 it was only a few lines long, and the complicated line causing
5414 so much time to be spent in the earlier version of safe_from_p()
5415 had only 293 or so unique nodes.
5417 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5418 where it is so we can turn it back in the top-level safe_from_p()
5421 /* For now, don't bother re-sizing the array. */
5422 if (save_expr_count >= save_expr_size)
5424 save_expr_rewritten[save_expr_count++] = exp;
5426 nops = tree_code_length[(int) SAVE_EXPR];
5427 for (i = 0; i < nops; i++)
5429 tree operand = TREE_OPERAND (exp, i);
5430 if (operand == NULL_TREE)
5432 TREE_SET_CODE (exp, ERROR_MARK);
5433 if (!safe_from_p (x, operand, 0))
5435 TREE_SET_CODE (exp, SAVE_EXPR);
5437 TREE_SET_CODE (exp, ERROR_MARK);
5441 /* The only operand we look at is operand 1. The rest aren't
5442 part of the expression. */
5443 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5445 case METHOD_CALL_EXPR:
5446 /* This takes a rtx argument, but shouldn't appear here. */
5453 /* If we have an rtx, we do not need to scan our operands. */
5457 nops = tree_code_length[(int) TREE_CODE (exp)];
5458 for (i = 0; i < nops; i++)
5459 if (TREE_OPERAND (exp, i) != 0
5460 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5464 /* If we have an rtl, find any enclosed object. Then see if we conflict
5468 if (GET_CODE (exp_rtl) == SUBREG)
5470 exp_rtl = SUBREG_REG (exp_rtl);
5471 if (GET_CODE (exp_rtl) == REG
5472 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5476 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5477 are memory and EXP is not readonly. */
5478 return ! (rtx_equal_p (x, exp_rtl)
5479 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5480 && ! TREE_READONLY (exp)));
5483 /* If we reach here, it is safe. */
5487 /* Subroutine of expand_expr: return nonzero iff EXP is an
5488 expression whose type is statically determinable. */
5494 if (TREE_CODE (exp) == PARM_DECL
5495 || TREE_CODE (exp) == VAR_DECL
5496 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5497 || TREE_CODE (exp) == COMPONENT_REF
5498 || TREE_CODE (exp) == ARRAY_REF)
5503 /* Subroutine of expand_expr: return rtx if EXP is a
5504 variable or parameter; else return 0. */
5511 switch (TREE_CODE (exp))
5515 return DECL_RTL (exp);
5521 #ifdef MAX_INTEGER_COMPUTATION_MODE
5523 check_max_integer_computation_mode (exp)
5526 enum tree_code code = TREE_CODE (exp);
5527 enum machine_mode mode;
5529 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5530 if (code == NOP_EXPR
5531 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5534 /* First check the type of the overall operation. We need only look at
5535 unary, binary and relational operations. */
5536 if (TREE_CODE_CLASS (code) == '1'
5537 || TREE_CODE_CLASS (code) == '2'
5538 || TREE_CODE_CLASS (code) == '<')
5540 mode = TYPE_MODE (TREE_TYPE (exp));
5541 if (GET_MODE_CLASS (mode) == MODE_INT
5542 && mode > MAX_INTEGER_COMPUTATION_MODE)
5543 fatal ("unsupported wide integer operation");
5546 /* Check operand of a unary op. */
5547 if (TREE_CODE_CLASS (code) == '1')
5549 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5550 if (GET_MODE_CLASS (mode) == MODE_INT
5551 && mode > MAX_INTEGER_COMPUTATION_MODE)
5552 fatal ("unsupported wide integer operation");
5555 /* Check operands of a binary/comparison op. */
5556 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5558 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5559 if (GET_MODE_CLASS (mode) == MODE_INT
5560 && mode > MAX_INTEGER_COMPUTATION_MODE)
5561 fatal ("unsupported wide integer operation");
5563 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5564 if (GET_MODE_CLASS (mode) == MODE_INT
5565 && mode > MAX_INTEGER_COMPUTATION_MODE)
5566 fatal ("unsupported wide integer operation");
5572 /* expand_expr: generate code for computing expression EXP.
5573 An rtx for the computed value is returned. The value is never null.
5574 In the case of a void EXP, const0_rtx is returned.
5576 The value may be stored in TARGET if TARGET is nonzero.
5577 TARGET is just a suggestion; callers must assume that
5578 the rtx returned may not be the same as TARGET.
5580 If TARGET is CONST0_RTX, it means that the value will be ignored.
5582 If TMODE is not VOIDmode, it suggests generating the
5583 result in mode TMODE. But this is done only when convenient.
5584 Otherwise, TMODE is ignored and the value generated in its natural mode.
5585 TMODE is just a suggestion; callers must assume that
5586 the rtx returned may not have mode TMODE.
5588 Note that TARGET may have neither TMODE nor MODE. In that case, it
5589 probably will not be used.
5591 If MODIFIER is EXPAND_SUM then when EXP is an addition
5592 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5593 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5594 products as above, or REG or MEM, or constant.
5595 Ordinarily in such cases we would output mul or add instructions
5596 and then return a pseudo reg containing the sum.
5598 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5599 it also marks a label as absolutely required (it can't be dead).
5600 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5601 This is used for outputting expressions used in initializers.
5603 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5604 with a constant address even if that address is not normally legitimate.
5605 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5608 expand_expr (exp, target, tmode, modifier)
5611 enum machine_mode tmode;
5612 enum expand_modifier modifier;
5614 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5615 This is static so it will be accessible to our recursive callees. */
5616 static tree placeholder_list = 0;
5617 register rtx op0, op1, temp;
5618 tree type = TREE_TYPE (exp);
5619 int unsignedp = TREE_UNSIGNED (type);
5620 register enum machine_mode mode;
5621 register enum tree_code code = TREE_CODE (exp);
5623 rtx subtarget, original_target;
5626 /* Used by check-memory-usage to make modifier read only. */
5627 enum expand_modifier ro_modifier;
5629 /* Handle ERROR_MARK before anybody tries to access its type. */
5630 if (TREE_CODE (exp) == ERROR_MARK)
5632 op0 = CONST0_RTX (tmode);
5638 mode = TYPE_MODE (type);
5639 /* Use subtarget as the target for operand 0 of a binary operation. */
5640 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5641 original_target = target;
5642 ignore = (target == const0_rtx
5643 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5644 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5645 || code == COND_EXPR)
5646 && TREE_CODE (type) == VOID_TYPE));
5648 /* Make a read-only version of the modifier. */
5649 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5650 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5651 ro_modifier = modifier;
5653 ro_modifier = EXPAND_NORMAL;
5655 /* Don't use hard regs as subtargets, because the combiner
5656 can only handle pseudo regs. */
5657 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5659 /* Avoid subtargets inside loops,
5660 since they hide some invariant expressions. */
5661 if (preserve_subexpressions_p ())
5664 /* If we are going to ignore this result, we need only do something
5665 if there is a side-effect somewhere in the expression. If there
5666 is, short-circuit the most common cases here. Note that we must
5667 not call expand_expr with anything but const0_rtx in case this
5668 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5672 if (! TREE_SIDE_EFFECTS (exp))
5675 /* Ensure we reference a volatile object even if value is ignored. */
5676 if (TREE_THIS_VOLATILE (exp)
5677 && TREE_CODE (exp) != FUNCTION_DECL
5678 && mode != VOIDmode && mode != BLKmode)
5680 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5681 if (GET_CODE (temp) == MEM)
5682 temp = copy_to_reg (temp);
5686 if (TREE_CODE_CLASS (code) == '1')
5687 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5688 VOIDmode, ro_modifier);
5689 else if (TREE_CODE_CLASS (code) == '2'
5690 || TREE_CODE_CLASS (code) == '<')
5692 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5693 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5696 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5697 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5698 /* If the second operand has no side effects, just evaluate
5700 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5701 VOIDmode, ro_modifier);
5706 #ifdef MAX_INTEGER_COMPUTATION_MODE
5708 && TREE_CODE (exp) != INTEGER_CST
5709 && TREE_CODE (exp) != PARM_DECL
5710 && TREE_CODE (exp) != ARRAY_REF
5711 && TREE_CODE (exp) != COMPONENT_REF
5712 && TREE_CODE (exp) != BIT_FIELD_REF
5713 && TREE_CODE (exp) != INDIRECT_REF
5714 && TREE_CODE (exp) != CALL_EXPR
5715 && TREE_CODE (exp) != VAR_DECL)
5717 enum machine_mode mode = GET_MODE (target);
5719 if (GET_MODE_CLASS (mode) == MODE_INT
5720 && mode > MAX_INTEGER_COMPUTATION_MODE)
5721 fatal ("unsupported wide integer operation");
5724 if (TREE_CODE (exp) != INTEGER_CST
5725 && TREE_CODE (exp) != PARM_DECL
5726 && TREE_CODE (exp) != ARRAY_REF
5727 && TREE_CODE (exp) != COMPONENT_REF
5728 && TREE_CODE (exp) != BIT_FIELD_REF
5729 && TREE_CODE (exp) != INDIRECT_REF
5730 && TREE_CODE (exp) != VAR_DECL
5731 && TREE_CODE (exp) != CALL_EXPR
5732 && GET_MODE_CLASS (tmode) == MODE_INT
5733 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5734 fatal ("unsupported wide integer operation");
5736 check_max_integer_computation_mode (exp);
5739 /* If will do cse, generate all results into pseudo registers
5740 since 1) that allows cse to find more things
5741 and 2) otherwise cse could produce an insn the machine
5744 if (! cse_not_expected && mode != BLKmode && target
5745 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5752 tree function = decl_function_context (exp);
5753 /* Handle using a label in a containing function. */
5754 if (function != current_function_decl
5755 && function != inline_function_decl && function != 0)
5757 struct function *p = find_function_data (function);
5758 /* Allocate in the memory associated with the function
5759 that the label is in. */
5760 push_obstacks (p->function_obstack,
5761 p->function_maybepermanent_obstack);
5763 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5770 if (modifier == EXPAND_INITIALIZER)
5771 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5775 temp = gen_rtx_MEM (FUNCTION_MODE,
5776 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5777 if (function != current_function_decl
5778 && function != inline_function_decl && function != 0)
5779 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5784 if (DECL_RTL (exp) == 0)
5786 error_with_decl (exp, "prior parameter's size depends on `%s'");
5787 return CONST0_RTX (mode);
5790 /* ... fall through ... */
5793 /* If a static var's type was incomplete when the decl was written,
5794 but the type is complete now, lay out the decl now. */
5795 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5796 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5798 push_obstacks_nochange ();
5799 end_temporary_allocation ();
5800 layout_decl (exp, 0);
5801 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5805 /* Although static-storage variables start off initialized, according to
5806 ANSI C, a memcpy could overwrite them with uninitialized values. So
5807 we check them too. This also lets us check for read-only variables
5808 accessed via a non-const declaration, in case it won't be detected
5809 any other way (e.g., in an embedded system or OS kernel without
5812 Aggregates are not checked here; they're handled elsewhere. */
5813 if (current_function_check_memory_usage && code == VAR_DECL
5814 && GET_CODE (DECL_RTL (exp)) == MEM
5815 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5817 enum memory_use_mode memory_usage;
5818 memory_usage = get_memory_usage_from_modifier (modifier);
5820 if (memory_usage != MEMORY_USE_DONT)
5821 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5822 XEXP (DECL_RTL (exp), 0), Pmode,
5823 GEN_INT (int_size_in_bytes (type)),
5824 TYPE_MODE (sizetype),
5825 GEN_INT (memory_usage),
5826 TYPE_MODE (integer_type_node));
5829 /* ... fall through ... */
5833 if (DECL_RTL (exp) == 0)
5836 /* Ensure variable marked as used even if it doesn't go through
5837 a parser. If it hasn't be used yet, write out an external
5839 if (! TREE_USED (exp))
5841 assemble_external (exp);
5842 TREE_USED (exp) = 1;
5845 /* Show we haven't gotten RTL for this yet. */
5848 /* Handle variables inherited from containing functions. */
5849 context = decl_function_context (exp);
5851 /* We treat inline_function_decl as an alias for the current function
5852 because that is the inline function whose vars, types, etc.
5853 are being merged into the current function.
5854 See expand_inline_function. */
5856 if (context != 0 && context != current_function_decl
5857 && context != inline_function_decl
5858 /* If var is static, we don't need a static chain to access it. */
5859 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5860 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5864 /* Mark as non-local and addressable. */
5865 DECL_NONLOCAL (exp) = 1;
5866 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5868 mark_addressable (exp);
5869 if (GET_CODE (DECL_RTL (exp)) != MEM)
5871 addr = XEXP (DECL_RTL (exp), 0);
5872 if (GET_CODE (addr) == MEM)
5873 addr = gen_rtx_MEM (Pmode,
5874 fix_lexical_addr (XEXP (addr, 0), exp));
5876 addr = fix_lexical_addr (addr, exp);
5877 temp = change_address (DECL_RTL (exp), mode, addr);
5880 /* This is the case of an array whose size is to be determined
5881 from its initializer, while the initializer is still being parsed.
5884 else if (GET_CODE (DECL_RTL (exp)) == MEM
5885 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5886 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5887 XEXP (DECL_RTL (exp), 0));
5889 /* If DECL_RTL is memory, we are in the normal case and either
5890 the address is not valid or it is not a register and -fforce-addr
5891 is specified, get the address into a register. */
5893 else if (GET_CODE (DECL_RTL (exp)) == MEM
5894 && modifier != EXPAND_CONST_ADDRESS
5895 && modifier != EXPAND_SUM
5896 && modifier != EXPAND_INITIALIZER
5897 && (! memory_address_p (DECL_MODE (exp),
5898 XEXP (DECL_RTL (exp), 0))
5900 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5901 temp = change_address (DECL_RTL (exp), VOIDmode,
5902 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5904 /* If we got something, return it. But first, set the alignment
5905 the address is a register. */
5908 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5909 mark_reg_pointer (XEXP (temp, 0),
5910 DECL_ALIGN (exp) / BITS_PER_UNIT);
5915 /* If the mode of DECL_RTL does not match that of the decl, it
5916 must be a promoted value. We return a SUBREG of the wanted mode,
5917 but mark it so that we know that it was already extended. */
5919 if (GET_CODE (DECL_RTL (exp)) == REG
5920 && GET_MODE (DECL_RTL (exp)) != mode)
5922 /* Get the signedness used for this variable. Ensure we get the
5923 same mode we got when the variable was declared. */
5924 if (GET_MODE (DECL_RTL (exp))
5925 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5928 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5929 SUBREG_PROMOTED_VAR_P (temp) = 1;
5930 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5934 return DECL_RTL (exp);
5937 return immed_double_const (TREE_INT_CST_LOW (exp),
5938 TREE_INT_CST_HIGH (exp),
5942 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5943 EXPAND_MEMORY_USE_BAD);
5946 /* If optimized, generate immediate CONST_DOUBLE
5947 which will be turned into memory by reload if necessary.
5949 We used to force a register so that loop.c could see it. But
5950 this does not allow gen_* patterns to perform optimizations with
5951 the constants. It also produces two insns in cases like "x = 1.0;".
5952 On most machines, floating-point constants are not permitted in
5953 many insns, so we'd end up copying it to a register in any case.
5955 Now, we do the copying in expand_binop, if appropriate. */
5956 return immed_real_const (exp);
5960 if (! TREE_CST_RTL (exp))
5961 output_constant_def (exp);
5963 /* TREE_CST_RTL probably contains a constant address.
5964 On RISC machines where a constant address isn't valid,
5965 make some insns to get that address into a register. */
5966 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5967 && modifier != EXPAND_CONST_ADDRESS
5968 && modifier != EXPAND_INITIALIZER
5969 && modifier != EXPAND_SUM
5970 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5972 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5973 return change_address (TREE_CST_RTL (exp), VOIDmode,
5974 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5975 return TREE_CST_RTL (exp);
5977 case EXPR_WITH_FILE_LOCATION:
5980 char *saved_input_filename = input_filename;
5981 int saved_lineno = lineno;
5982 input_filename = EXPR_WFL_FILENAME (exp);
5983 lineno = EXPR_WFL_LINENO (exp);
5984 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5985 emit_line_note (input_filename, lineno);
5986 /* Possibly avoid switching back and force here */
5987 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5988 input_filename = saved_input_filename;
5989 lineno = saved_lineno;
5994 context = decl_function_context (exp);
5996 /* If this SAVE_EXPR was at global context, assume we are an
5997 initialization function and move it into our context. */
5999 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6001 /* We treat inline_function_decl as an alias for the current function
6002 because that is the inline function whose vars, types, etc.
6003 are being merged into the current function.
6004 See expand_inline_function. */
6005 if (context == current_function_decl || context == inline_function_decl)
6008 /* If this is non-local, handle it. */
6011 /* The following call just exists to abort if the context is
6012 not of a containing function. */
6013 find_function_data (context);
6015 temp = SAVE_EXPR_RTL (exp);
6016 if (temp && GET_CODE (temp) == REG)
6018 put_var_into_stack (exp);
6019 temp = SAVE_EXPR_RTL (exp);
6021 if (temp == 0 || GET_CODE (temp) != MEM)
6023 return change_address (temp, mode,
6024 fix_lexical_addr (XEXP (temp, 0), exp));
6026 if (SAVE_EXPR_RTL (exp) == 0)
6028 if (mode == VOIDmode)
6031 temp = assign_temp (type, 3, 0, 0);
6033 SAVE_EXPR_RTL (exp) = temp;
6034 if (!optimize && GET_CODE (temp) == REG)
6035 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6038 /* If the mode of TEMP does not match that of the expression, it
6039 must be a promoted value. We pass store_expr a SUBREG of the
6040 wanted mode but mark it so that we know that it was already
6041 extended. Note that `unsignedp' was modified above in
6044 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6046 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6047 SUBREG_PROMOTED_VAR_P (temp) = 1;
6048 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6051 if (temp == const0_rtx)
6052 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6053 EXPAND_MEMORY_USE_BAD);
6055 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6057 TREE_USED (exp) = 1;
6060 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6061 must be a promoted value. We return a SUBREG of the wanted mode,
6062 but mark it so that we know that it was already extended. */
6064 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6065 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6067 /* Compute the signedness and make the proper SUBREG. */
6068 promote_mode (type, mode, &unsignedp, 0);
6069 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6070 SUBREG_PROMOTED_VAR_P (temp) = 1;
6071 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6075 return SAVE_EXPR_RTL (exp);
6080 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6081 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6085 case PLACEHOLDER_EXPR:
6087 tree placeholder_expr;
6089 /* If there is an object on the head of the placeholder list,
6090 see if some object in it of type TYPE or a pointer to it. For
6091 further information, see tree.def. */
6092 for (placeholder_expr = placeholder_list;
6093 placeholder_expr != 0;
6094 placeholder_expr = TREE_CHAIN (placeholder_expr))
6096 tree need_type = TYPE_MAIN_VARIANT (type);
6098 tree old_list = placeholder_list;
6101 /* Find the outermost reference that is of the type we want.
6102 If none, see if any object has a type that is a pointer to
6103 the type we want. */
6104 for (elt = TREE_PURPOSE (placeholder_expr);
6105 elt != 0 && object == 0;
6107 = ((TREE_CODE (elt) == COMPOUND_EXPR
6108 || TREE_CODE (elt) == COND_EXPR)
6109 ? TREE_OPERAND (elt, 1)
6110 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6111 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6112 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6113 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6114 ? TREE_OPERAND (elt, 0) : 0))
6115 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6118 for (elt = TREE_PURPOSE (placeholder_expr);
6119 elt != 0 && object == 0;
6121 = ((TREE_CODE (elt) == COMPOUND_EXPR
6122 || TREE_CODE (elt) == COND_EXPR)
6123 ? TREE_OPERAND (elt, 1)
6124 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6125 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6126 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6127 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6128 ? TREE_OPERAND (elt, 0) : 0))
6129 if (POINTER_TYPE_P (TREE_TYPE (elt))
6130 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6132 object = build1 (INDIRECT_REF, need_type, elt);
6136 /* Expand this object skipping the list entries before
6137 it was found in case it is also a PLACEHOLDER_EXPR.
6138 In that case, we want to translate it using subsequent
6140 placeholder_list = TREE_CHAIN (placeholder_expr);
6141 temp = expand_expr (object, original_target, tmode,
6143 placeholder_list = old_list;
6149 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6152 case WITH_RECORD_EXPR:
6153 /* Put the object on the placeholder list, expand our first operand,
6154 and pop the list. */
6155 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6157 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6158 tmode, ro_modifier);
6159 placeholder_list = TREE_CHAIN (placeholder_list);
6163 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6164 expand_goto (TREE_OPERAND (exp, 0));
6166 expand_computed_goto (TREE_OPERAND (exp, 0));
6170 expand_exit_loop_if_false (NULL_PTR,
6171 invert_truthvalue (TREE_OPERAND (exp, 0)));
6174 case LABELED_BLOCK_EXPR:
6175 if (LABELED_BLOCK_BODY (exp))
6176 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6177 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6180 case EXIT_BLOCK_EXPR:
6181 if (EXIT_BLOCK_RETURN (exp))
6182 sorry ("returned value in block_exit_expr");
6183 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6188 expand_start_loop (1);
6189 expand_expr_stmt (TREE_OPERAND (exp, 0));
6197 tree vars = TREE_OPERAND (exp, 0);
6198 int vars_need_expansion = 0;
6200 /* Need to open a binding contour here because
6201 if there are any cleanups they must be contained here. */
6202 expand_start_bindings (0);
6204 /* Mark the corresponding BLOCK for output in its proper place. */
6205 if (TREE_OPERAND (exp, 2) != 0
6206 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6207 insert_block (TREE_OPERAND (exp, 2));
6209 /* If VARS have not yet been expanded, expand them now. */
6212 if (DECL_RTL (vars) == 0)
6214 vars_need_expansion = 1;
6217 expand_decl_init (vars);
6218 vars = TREE_CHAIN (vars);
6221 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6223 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6229 if (RTL_EXPR_SEQUENCE (exp))
6231 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6233 emit_insns (RTL_EXPR_SEQUENCE (exp));
6234 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6236 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6237 free_temps_for_rtl_expr (exp);
6238 return RTL_EXPR_RTL (exp);
6241 /* If we don't need the result, just ensure we evaluate any
6246 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6247 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6248 EXPAND_MEMORY_USE_BAD);
6252 /* All elts simple constants => refer to a constant in memory. But
6253 if this is a non-BLKmode mode, let it store a field at a time
6254 since that should make a CONST_INT or CONST_DOUBLE when we
6255 fold. Likewise, if we have a target we can use, it is best to
6256 store directly into the target unless the type is large enough
6257 that memcpy will be used. If we are making an initializer and
6258 all operands are constant, put it in memory as well. */
6259 else if ((TREE_STATIC (exp)
6260 && ((mode == BLKmode
6261 && ! (target != 0 && safe_from_p (target, exp, 1)))
6262 || TREE_ADDRESSABLE (exp)
6263 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6264 && (!MOVE_BY_PIECES_P
6265 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6266 TYPE_ALIGN (type) / BITS_PER_UNIT))
6267 && ! mostly_zeros_p (exp))))
6268 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6270 rtx constructor = output_constant_def (exp);
6271 if (modifier != EXPAND_CONST_ADDRESS
6272 && modifier != EXPAND_INITIALIZER
6273 && modifier != EXPAND_SUM
6274 && (! memory_address_p (GET_MODE (constructor),
6275 XEXP (constructor, 0))
6277 && GET_CODE (XEXP (constructor, 0)) != REG)))
6278 constructor = change_address (constructor, VOIDmode,
6279 XEXP (constructor, 0));
6285 /* Handle calls that pass values in multiple non-contiguous
6286 locations. The Irix 6 ABI has examples of this. */
6287 if (target == 0 || ! safe_from_p (target, exp, 1)
6288 || GET_CODE (target) == PARALLEL)
6290 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6291 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6293 target = assign_temp (type, 0, 1, 1);
6296 if (TREE_READONLY (exp))
6298 if (GET_CODE (target) == MEM)
6299 target = copy_rtx (target);
6301 RTX_UNCHANGING_P (target) = 1;
6304 store_constructor (exp, target, 0);
6310 tree exp1 = TREE_OPERAND (exp, 0);
6313 tree string = string_constant (exp1, &index);
6316 /* Try to optimize reads from const strings. */
6318 && TREE_CODE (string) == STRING_CST
6319 && TREE_CODE (index) == INTEGER_CST
6320 && !TREE_INT_CST_HIGH (index)
6321 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6322 && GET_MODE_CLASS (mode) == MODE_INT
6323 && GET_MODE_SIZE (mode) == 1
6324 && modifier != EXPAND_MEMORY_USE_WO)
6325 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6327 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6328 op0 = memory_address (mode, op0);
6330 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6332 enum memory_use_mode memory_usage;
6333 memory_usage = get_memory_usage_from_modifier (modifier);
6335 if (memory_usage != MEMORY_USE_DONT)
6337 in_check_memory_usage = 1;
6338 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6340 GEN_INT (int_size_in_bytes (type)),
6341 TYPE_MODE (sizetype),
6342 GEN_INT (memory_usage),
6343 TYPE_MODE (integer_type_node));
6344 in_check_memory_usage = 0;
6348 temp = gen_rtx_MEM (mode, op0);
6350 if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
6351 || (TREE_CODE (exp1) == ADDR_EXPR
6352 && (exp2 = TREE_OPERAND (exp1, 0))
6353 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6354 MEM_SET_IN_STRUCT_P (temp, 1);
6356 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6357 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6359 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6360 here, because, in C and C++, the fact that a location is accessed
6361 through a pointer to const does not mean that the value there can
6362 never change. Languages where it can never change should
6363 also set TREE_STATIC. */
6364 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6369 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6373 tree array = TREE_OPERAND (exp, 0);
6374 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6375 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6376 tree index = TREE_OPERAND (exp, 1);
6377 tree index_type = TREE_TYPE (index);
6380 /* Optimize the special-case of a zero lower bound.
6382 We convert the low_bound to sizetype to avoid some problems
6383 with constant folding. (E.g. suppose the lower bound is 1,
6384 and its mode is QI. Without the conversion, (ARRAY
6385 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6386 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6388 But sizetype isn't quite right either (especially if
6389 the lowbound is negative). FIXME */
6391 if (! integer_zerop (low_bound))
6392 index = fold (build (MINUS_EXPR, index_type, index,
6393 convert (sizetype, low_bound)));
6395 /* Fold an expression like: "foo"[2].
6396 This is not done in fold so it won't happen inside &.
6397 Don't fold if this is for wide characters since it's too
6398 difficult to do correctly and this is a very rare case. */
6400 if (TREE_CODE (array) == STRING_CST
6401 && TREE_CODE (index) == INTEGER_CST
6402 && !TREE_INT_CST_HIGH (index)
6403 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6404 && GET_MODE_CLASS (mode) == MODE_INT
6405 && GET_MODE_SIZE (mode) == 1)
6406 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6408 /* If this is a constant index into a constant array,
6409 just get the value from the array. Handle both the cases when
6410 we have an explicit constructor and when our operand is a variable
6411 that was declared const. */
6413 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6415 if (TREE_CODE (index) == INTEGER_CST
6416 && TREE_INT_CST_HIGH (index) == 0)
6418 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6420 i = TREE_INT_CST_LOW (index);
6422 elem = TREE_CHAIN (elem);
6424 return expand_expr (fold (TREE_VALUE (elem)), target,
6425 tmode, ro_modifier);
6429 else if (optimize >= 1
6430 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6431 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6432 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6434 if (TREE_CODE (index) == INTEGER_CST)
6436 tree init = DECL_INITIAL (array);
6438 i = TREE_INT_CST_LOW (index);
6439 if (TREE_CODE (init) == CONSTRUCTOR)
6441 tree elem = CONSTRUCTOR_ELTS (init);
6444 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6445 elem = TREE_CHAIN (elem);
6447 return expand_expr (fold (TREE_VALUE (elem)), target,
6448 tmode, ro_modifier);
6450 else if (TREE_CODE (init) == STRING_CST
6451 && TREE_INT_CST_HIGH (index) == 0
6452 && (TREE_INT_CST_LOW (index)
6453 < TREE_STRING_LENGTH (init)))
6455 (TREE_STRING_POINTER
6456 (init)[TREE_INT_CST_LOW (index)]));
6461 /* ... fall through ... */
6465 /* If the operand is a CONSTRUCTOR, we can just extract the
6466 appropriate field if it is present. Don't do this if we have
6467 already written the data since we want to refer to that copy
6468 and varasm.c assumes that's what we'll do. */
6469 if (code != ARRAY_REF
6470 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6471 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6475 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6476 elt = TREE_CHAIN (elt))
6477 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6478 /* We can normally use the value of the field in the
6479 CONSTRUCTOR. However, if this is a bitfield in
6480 an integral mode that we can fit in a HOST_WIDE_INT,
6481 we must mask only the number of bits in the bitfield,
6482 since this is done implicitly by the constructor. If
6483 the bitfield does not meet either of those conditions,
6484 we can't do this optimization. */
6485 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6486 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6488 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6489 <= HOST_BITS_PER_WIDE_INT))))
6491 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6492 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6494 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6496 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6498 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6499 op0 = expand_and (op0, op1, target);
6503 enum machine_mode imode
6504 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6506 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6509 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6511 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6521 enum machine_mode mode1;
6527 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6528 &mode1, &unsignedp, &volatilep,
6531 /* If we got back the original object, something is wrong. Perhaps
6532 we are evaluating an expression too early. In any event, don't
6533 infinitely recurse. */
6537 /* If TEM's type is a union of variable size, pass TARGET to the inner
6538 computation, since it will need a temporary and TARGET is known
6539 to have to do. This occurs in unchecked conversion in Ada. */
6541 op0 = expand_expr (tem,
6542 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6543 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6545 ? target : NULL_RTX),
6547 modifier == EXPAND_INITIALIZER
6548 ? modifier : EXPAND_NORMAL);
6550 /* If this is a constant, put it into a register if it is a
6551 legitimate constant and memory if it isn't. */
6552 if (CONSTANT_P (op0))
6554 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6555 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6556 op0 = force_reg (mode, op0);
6558 op0 = validize_mem (force_const_mem (mode, op0));
6563 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6565 if (GET_CODE (op0) != MEM)
6568 if (GET_MODE (offset_rtx) != ptr_mode)
6570 #ifdef POINTERS_EXTEND_UNSIGNED
6571 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6573 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6577 /* A constant address in TO_RTX can have VOIDmode, we must not try
6578 to call force_reg for that case. Avoid that case. */
6579 if (GET_CODE (op0) == MEM
6580 && GET_MODE (op0) == BLKmode
6581 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6583 && (bitpos % bitsize) == 0
6584 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6585 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6587 rtx temp = change_address (op0, mode1,
6588 plus_constant (XEXP (op0, 0),
6591 if (GET_CODE (XEXP (temp, 0)) == REG)
6594 op0 = change_address (op0, mode1,
6595 force_reg (GET_MODE (XEXP (temp, 0)),
6601 op0 = change_address (op0, VOIDmode,
6602 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6603 force_reg (ptr_mode, offset_rtx)));
6606 /* Don't forget about volatility even if this is a bitfield. */
6607 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6609 op0 = copy_rtx (op0);
6610 MEM_VOLATILE_P (op0) = 1;
6613 /* Check the access. */
6614 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6616 enum memory_use_mode memory_usage;
6617 memory_usage = get_memory_usage_from_modifier (modifier);
6619 if (memory_usage != MEMORY_USE_DONT)
6624 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6625 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6627 /* Check the access right of the pointer. */
6628 if (size > BITS_PER_UNIT)
6629 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6631 GEN_INT (size / BITS_PER_UNIT),
6632 TYPE_MODE (sizetype),
6633 GEN_INT (memory_usage),
6634 TYPE_MODE (integer_type_node));
6638 /* In cases where an aligned union has an unaligned object
6639 as a field, we might be extracting a BLKmode value from
6640 an integer-mode (e.g., SImode) object. Handle this case
6641 by doing the extract into an object as wide as the field
6642 (which we know to be the width of a basic mode), then
6643 storing into memory, and changing the mode to BLKmode.
6644 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6645 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6646 if (mode1 == VOIDmode
6647 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6648 || (modifier != EXPAND_CONST_ADDRESS
6649 && modifier != EXPAND_INITIALIZER
6650 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6651 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6652 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6653 /* If the field isn't aligned enough to fetch as a memref,
6654 fetch it as a bit field. */
6655 || (SLOW_UNALIGNED_ACCESS
6656 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6657 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6659 enum machine_mode ext_mode = mode;
6661 if (ext_mode == BLKmode)
6662 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6664 if (ext_mode == BLKmode)
6666 /* In this case, BITPOS must start at a byte boundary and
6667 TARGET, if specified, must be a MEM. */
6668 if (GET_CODE (op0) != MEM
6669 || (target != 0 && GET_CODE (target) != MEM)
6670 || bitpos % BITS_PER_UNIT != 0)
6673 op0 = change_address (op0, VOIDmode,
6674 plus_constant (XEXP (op0, 0),
6675 bitpos / BITS_PER_UNIT));
6677 target = assign_temp (type, 0, 1, 1);
6679 emit_block_move (target, op0,
6680 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6687 op0 = validize_mem (op0);
6689 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6690 mark_reg_pointer (XEXP (op0, 0), alignment);
6692 op0 = extract_bit_field (op0, bitsize, bitpos,
6693 unsignedp, target, ext_mode, ext_mode,
6695 int_size_in_bytes (TREE_TYPE (tem)));
6697 /* If the result is a record type and BITSIZE is narrower than
6698 the mode of OP0, an integral mode, and this is a big endian
6699 machine, we must put the field into the high-order bits. */
6700 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6701 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6702 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6703 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6704 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6708 if (mode == BLKmode)
6710 rtx new = assign_stack_temp (ext_mode,
6711 bitsize / BITS_PER_UNIT, 0);
6713 emit_move_insn (new, op0);
6714 op0 = copy_rtx (new);
6715 PUT_MODE (op0, BLKmode);
6716 MEM_SET_IN_STRUCT_P (op0, 1);
6722 /* If the result is BLKmode, use that to access the object
6724 if (mode == BLKmode)
6727 /* Get a reference to just this component. */
6728 if (modifier == EXPAND_CONST_ADDRESS
6729 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6730 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6731 (bitpos / BITS_PER_UNIT)));
6733 op0 = change_address (op0, mode1,
6734 plus_constant (XEXP (op0, 0),
6735 (bitpos / BITS_PER_UNIT)));
6737 if (GET_CODE (op0) == MEM)
6738 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6740 if (GET_CODE (XEXP (op0, 0)) == REG)
6741 mark_reg_pointer (XEXP (op0, 0), alignment);
6743 MEM_SET_IN_STRUCT_P (op0, 1);
6744 MEM_VOLATILE_P (op0) |= volatilep;
6745 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6746 || modifier == EXPAND_CONST_ADDRESS
6747 || modifier == EXPAND_INITIALIZER)
6749 else if (target == 0)
6750 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6752 convert_move (target, op0, unsignedp);
6756 /* Intended for a reference to a buffer of a file-object in Pascal.
6757 But it's not certain that a special tree code will really be
6758 necessary for these. INDIRECT_REF might work for them. */
6764 /* Pascal set IN expression.
6767 rlo = set_low - (set_low%bits_per_word);
6768 the_word = set [ (index - rlo)/bits_per_word ];
6769 bit_index = index % bits_per_word;
6770 bitmask = 1 << bit_index;
6771 return !!(the_word & bitmask); */
6773 tree set = TREE_OPERAND (exp, 0);
6774 tree index = TREE_OPERAND (exp, 1);
6775 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6776 tree set_type = TREE_TYPE (set);
6777 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6778 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6779 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6780 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6781 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6782 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6783 rtx setaddr = XEXP (setval, 0);
6784 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6786 rtx diff, quo, rem, addr, bit, result;
6788 preexpand_calls (exp);
6790 /* If domain is empty, answer is no. Likewise if index is constant
6791 and out of bounds. */
6792 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6793 && TREE_CODE (set_low_bound) == INTEGER_CST
6794 && tree_int_cst_lt (set_high_bound, set_low_bound))
6795 || (TREE_CODE (index) == INTEGER_CST
6796 && TREE_CODE (set_low_bound) == INTEGER_CST
6797 && tree_int_cst_lt (index, set_low_bound))
6798 || (TREE_CODE (set_high_bound) == INTEGER_CST
6799 && TREE_CODE (index) == INTEGER_CST
6800 && tree_int_cst_lt (set_high_bound, index))))
6804 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6806 /* If we get here, we have to generate the code for both cases
6807 (in range and out of range). */
6809 op0 = gen_label_rtx ();
6810 op1 = gen_label_rtx ();
6812 if (! (GET_CODE (index_val) == CONST_INT
6813 && GET_CODE (lo_r) == CONST_INT))
6815 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6816 GET_MODE (index_val), iunsignedp, 0, op1);
6819 if (! (GET_CODE (index_val) == CONST_INT
6820 && GET_CODE (hi_r) == CONST_INT))
6822 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6823 GET_MODE (index_val), iunsignedp, 0, op1);
6826 /* Calculate the element number of bit zero in the first word
6828 if (GET_CODE (lo_r) == CONST_INT)
6829 rlow = GEN_INT (INTVAL (lo_r)
6830 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6832 rlow = expand_binop (index_mode, and_optab, lo_r,
6833 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6834 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6836 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6837 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6839 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6840 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6841 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6842 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6844 addr = memory_address (byte_mode,
6845 expand_binop (index_mode, add_optab, diff,
6846 setaddr, NULL_RTX, iunsignedp,
6849 /* Extract the bit we want to examine */
6850 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6851 gen_rtx_MEM (byte_mode, addr),
6852 make_tree (TREE_TYPE (index), rem),
6854 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6855 GET_MODE (target) == byte_mode ? target : 0,
6856 1, OPTAB_LIB_WIDEN);
6858 if (result != target)
6859 convert_move (target, result, 1);
6861 /* Output the code to handle the out-of-range case. */
6864 emit_move_insn (target, const0_rtx);
6869 case WITH_CLEANUP_EXPR:
6870 if (RTL_EXPR_RTL (exp) == 0)
6873 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6874 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6876 /* That's it for this cleanup. */
6877 TREE_OPERAND (exp, 2) = 0;
6879 return RTL_EXPR_RTL (exp);
6881 case CLEANUP_POINT_EXPR:
6883 /* Start a new binding layer that will keep track of all cleanup
6884 actions to be performed. */
6885 expand_start_bindings (0);
6887 target_temp_slot_level = temp_slot_level;
6889 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6890 /* If we're going to use this value, load it up now. */
6892 op0 = force_not_mem (op0);
6893 preserve_temp_slots (op0);
6894 expand_end_bindings (NULL_TREE, 0, 0);
6899 /* Check for a built-in function. */
6900 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6901 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6903 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6904 return expand_builtin (exp, target, subtarget, tmode, ignore);
6906 /* If this call was expanded already by preexpand_calls,
6907 just return the result we got. */
6908 if (CALL_EXPR_RTL (exp) != 0)
6909 return CALL_EXPR_RTL (exp);
6911 return expand_call (exp, target, ignore);
6913 case NON_LVALUE_EXPR:
6916 case REFERENCE_EXPR:
6917 if (TREE_CODE (type) == UNION_TYPE)
6919 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6922 if (mode != BLKmode)
6923 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6925 target = assign_temp (type, 0, 1, 1);
6928 if (GET_CODE (target) == MEM)
6929 /* Store data into beginning of memory target. */
6930 store_expr (TREE_OPERAND (exp, 0),
6931 change_address (target, TYPE_MODE (valtype), 0), 0);
6933 else if (GET_CODE (target) == REG)
6934 /* Store this field into a union of the proper type. */
6935 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6936 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6938 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6943 /* Return the entire union. */
6947 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6949 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6952 /* If the signedness of the conversion differs and OP0 is
6953 a promoted SUBREG, clear that indication since we now
6954 have to do the proper extension. */
6955 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6956 && GET_CODE (op0) == SUBREG)
6957 SUBREG_PROMOTED_VAR_P (op0) = 0;
6962 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6963 if (GET_MODE (op0) == mode)
6966 /* If OP0 is a constant, just convert it into the proper mode. */
6967 if (CONSTANT_P (op0))
6969 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6970 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6972 if (modifier == EXPAND_INITIALIZER)
6973 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6977 convert_to_mode (mode, op0,
6978 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6980 convert_move (target, op0,
6981 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6985 /* We come here from MINUS_EXPR when the second operand is a
6988 this_optab = add_optab;
6990 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6991 something else, make sure we add the register to the constant and
6992 then to the other thing. This case can occur during strength
6993 reduction and doing it this way will produce better code if the
6994 frame pointer or argument pointer is eliminated.
6996 fold-const.c will ensure that the constant is always in the inner
6997 PLUS_EXPR, so the only case we need to do anything about is if
6998 sp, ap, or fp is our second argument, in which case we must swap
6999 the innermost first argument and our second argument. */
7001 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7002 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7003 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7004 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7005 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7006 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7008 tree t = TREE_OPERAND (exp, 1);
7010 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7011 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7014 /* If the result is to be ptr_mode and we are adding an integer to
7015 something, we might be forming a constant. So try to use
7016 plus_constant. If it produces a sum and we can't accept it,
7017 use force_operand. This allows P = &ARR[const] to generate
7018 efficient code on machines where a SYMBOL_REF is not a valid
7021 If this is an EXPAND_SUM call, always return the sum. */
7022 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7023 || mode == ptr_mode)
7025 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7026 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7027 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7029 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7031 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
7032 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7033 op1 = force_operand (op1, target);
7037 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7038 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7039 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7041 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7043 if (! CONSTANT_P (op0))
7045 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7046 VOIDmode, modifier);
7047 /* Don't go to both_summands if modifier
7048 says it's not right to return a PLUS. */
7049 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7053 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
7054 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7055 op0 = force_operand (op0, target);
7060 /* No sense saving up arithmetic to be done
7061 if it's all in the wrong mode to form part of an address.
7062 And force_operand won't know whether to sign-extend or
7064 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7065 || mode != ptr_mode)
7068 preexpand_calls (exp);
7069 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7072 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7073 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7076 /* Make sure any term that's a sum with a constant comes last. */
7077 if (GET_CODE (op0) == PLUS
7078 && CONSTANT_P (XEXP (op0, 1)))
7084 /* If adding to a sum including a constant,
7085 associate it to put the constant outside. */
7086 if (GET_CODE (op1) == PLUS
7087 && CONSTANT_P (XEXP (op1, 1))
7088 && !(flag_propolice_protection && (contains_fp (op0) || contains_fp (op1))))
7090 rtx constant_term = const0_rtx;
7092 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7095 /* Ensure that MULT comes first if there is one. */
7096 else if (GET_CODE (op0) == MULT)
7097 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7099 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7101 /* Let's also eliminate constants from op0 if possible. */
7102 op0 = eliminate_constant_term (op0, &constant_term);
7104 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7105 their sum should be a constant. Form it into OP1, since the
7106 result we want will then be OP0 + OP1. */
7108 temp = simplify_binary_operation (PLUS, mode, constant_term,
7113 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7116 /* Put a constant term last and put a multiplication first. */
7117 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7118 temp = op1, op1 = op0, op0 = temp;
7120 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7121 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7124 /* For initializers, we are allowed to return a MINUS of two
7125 symbolic constants. Here we handle all cases when both operands
7127 /* Handle difference of two symbolic constants,
7128 for the sake of an initializer. */
7129 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7130 && really_constant_p (TREE_OPERAND (exp, 0))
7131 && really_constant_p (TREE_OPERAND (exp, 1)))
7133 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7134 VOIDmode, ro_modifier);
7135 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7136 VOIDmode, ro_modifier);
7138 /* If the last operand is a CONST_INT, use plus_constant of
7139 the negated constant. Else make the MINUS. */
7140 if (GET_CODE (op1) == CONST_INT)
7141 return plus_constant (op0, - INTVAL (op1));
7143 return gen_rtx_MINUS (mode, op0, op1);
7145 /* Convert A - const to A + (-const). */
7146 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7148 tree negated = fold (build1 (NEGATE_EXPR, type,
7149 TREE_OPERAND (exp, 1)));
7151 /* Deal with the case where we can't negate the constant
7153 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7155 tree newtype = signed_type (type);
7156 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7157 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7158 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7160 if (! TREE_OVERFLOW (newneg))
7161 return expand_expr (convert (type,
7162 build (PLUS_EXPR, newtype,
7164 target, tmode, ro_modifier);
7168 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7172 this_optab = sub_optab;
7176 preexpand_calls (exp);
7177 /* If first operand is constant, swap them.
7178 Thus the following special case checks need only
7179 check the second operand. */
7180 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7182 register tree t1 = TREE_OPERAND (exp, 0);
7183 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7184 TREE_OPERAND (exp, 1) = t1;
7187 /* Attempt to return something suitable for generating an
7188 indexed address, for machines that support that. */
7190 if (modifier == EXPAND_SUM && mode == ptr_mode
7191 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7192 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7194 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7197 /* Apply distributive law if OP0 is x+c. */
7198 if (GET_CODE (op0) == PLUS
7199 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7200 return gen_rtx_PLUS (mode,
7201 gen_rtx_MULT (mode, XEXP (op0, 0),
7202 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7203 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7204 * INTVAL (XEXP (op0, 1))));
7206 if (GET_CODE (op0) != REG)
7207 op0 = force_operand (op0, NULL_RTX);
7208 if (GET_CODE (op0) != REG)
7209 op0 = copy_to_mode_reg (mode, op0);
7211 return gen_rtx_MULT (mode, op0,
7212 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7215 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7218 /* Check for multiplying things that have been extended
7219 from a narrower type. If this machine supports multiplying
7220 in that narrower type with a result in the desired type,
7221 do it that way, and avoid the explicit type-conversion. */
7222 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7223 && TREE_CODE (type) == INTEGER_TYPE
7224 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7225 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7226 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7227 && int_fits_type_p (TREE_OPERAND (exp, 1),
7228 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7229 /* Don't use a widening multiply if a shift will do. */
7230 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7231 > HOST_BITS_PER_WIDE_INT)
7232 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7234 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7235 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7237 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7238 /* If both operands are extended, they must either both
7239 be zero-extended or both be sign-extended. */
7240 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7242 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7244 enum machine_mode innermode
7245 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7246 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7247 ? smul_widen_optab : umul_widen_optab);
7248 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7249 ? umul_widen_optab : smul_widen_optab);
7250 if (mode == GET_MODE_WIDER_MODE (innermode))
7252 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7254 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7255 NULL_RTX, VOIDmode, 0);
7256 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7257 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7260 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7261 NULL_RTX, VOIDmode, 0);
7264 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7265 && innermode == word_mode)
7268 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7269 NULL_RTX, VOIDmode, 0);
7270 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7271 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7274 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7275 NULL_RTX, VOIDmode, 0);
7276 temp = expand_binop (mode, other_optab, op0, op1, target,
7277 unsignedp, OPTAB_LIB_WIDEN);
7278 htem = expand_mult_highpart_adjust (innermode,
7279 gen_highpart (innermode, temp),
7281 gen_highpart (innermode, temp),
7283 emit_move_insn (gen_highpart (innermode, temp), htem);
7288 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7289 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7290 return expand_mult (mode, op0, op1, target, unsignedp);
7292 case TRUNC_DIV_EXPR:
7293 case FLOOR_DIV_EXPR:
7295 case ROUND_DIV_EXPR:
7296 case EXACT_DIV_EXPR:
7297 preexpand_calls (exp);
7298 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7300 /* Possible optimization: compute the dividend with EXPAND_SUM
7301 then if the divisor is constant can optimize the case
7302 where some terms of the dividend have coeffs divisible by it. */
7303 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7304 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7305 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7308 this_optab = flodiv_optab;
7311 case TRUNC_MOD_EXPR:
7312 case FLOOR_MOD_EXPR:
7314 case ROUND_MOD_EXPR:
7315 preexpand_calls (exp);
7316 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7318 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7319 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7320 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7322 case FIX_ROUND_EXPR:
7323 case FIX_FLOOR_EXPR:
7325 abort (); /* Not used for C. */
7327 case FIX_TRUNC_EXPR:
7328 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7330 target = gen_reg_rtx (mode);
7331 expand_fix (target, op0, unsignedp);
7335 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7337 target = gen_reg_rtx (mode);
7338 /* expand_float can't figure out what to do if FROM has VOIDmode.
7339 So give it the correct mode. With -O, cse will optimize this. */
7340 if (GET_MODE (op0) == VOIDmode)
7341 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7343 expand_float (target, op0,
7344 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7348 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7349 temp = expand_unop (mode, neg_optab, op0, target, 0);
7355 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7357 /* Handle complex values specially. */
7358 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7359 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7360 return expand_complex_abs (mode, op0, target, unsignedp);
7362 /* Unsigned abs is simply the operand. Testing here means we don't
7363 risk generating incorrect code below. */
7364 if (TREE_UNSIGNED (type))
7367 return expand_abs (mode, op0, target,
7368 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7372 target = original_target;
7373 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7374 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7375 || GET_MODE (target) != mode
7376 || (GET_CODE (target) == REG
7377 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7378 target = gen_reg_rtx (mode);
7379 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7380 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7382 /* First try to do it with a special MIN or MAX instruction.
7383 If that does not win, use a conditional jump to select the proper
7385 this_optab = (TREE_UNSIGNED (type)
7386 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7387 : (code == MIN_EXPR ? smin_optab : smax_optab));
7389 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7394 /* At this point, a MEM target is no longer useful; we will get better
7397 if (GET_CODE (target) == MEM)
7398 target = gen_reg_rtx (mode);
7401 emit_move_insn (target, op0);
7403 op0 = gen_label_rtx ();
7405 /* If this mode is an integer too wide to compare properly,
7406 compare word by word. Rely on cse to optimize constant cases. */
7407 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7409 if (code == MAX_EXPR)
7410 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7411 target, op1, NULL_RTX, op0);
7413 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7414 op1, target, NULL_RTX, op0);
7415 emit_move_insn (target, op1);
7419 if (code == MAX_EXPR)
7420 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7421 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7422 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7424 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7425 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7426 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7427 if (temp == const0_rtx)
7428 emit_move_insn (target, op1);
7429 else if (temp != const_true_rtx)
7431 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7432 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7435 emit_move_insn (target, op1);
7442 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7443 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7449 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7450 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7455 /* ??? Can optimize bitwise operations with one arg constant.
7456 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7457 and (a bitwise1 b) bitwise2 b (etc)
7458 but that is probably not worth while. */
7460 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7461 boolean values when we want in all cases to compute both of them. In
7462 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7463 as actual zero-or-1 values and then bitwise anding. In cases where
7464 there cannot be any side effects, better code would be made by
7465 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7466 how to recognize those cases. */
7468 case TRUTH_AND_EXPR:
7470 this_optab = and_optab;
7475 this_optab = ior_optab;
7478 case TRUTH_XOR_EXPR:
7480 this_optab = xor_optab;
7487 preexpand_calls (exp);
7488 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7490 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7491 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7494 /* Could determine the answer when only additive constants differ. Also,
7495 the addition of one can be handled by changing the condition. */
7502 preexpand_calls (exp);
7503 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7507 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7508 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7510 && GET_CODE (original_target) == REG
7511 && (GET_MODE (original_target)
7512 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7514 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7517 if (temp != original_target)
7518 temp = copy_to_reg (temp);
7520 op1 = gen_label_rtx ();
7521 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7522 GET_MODE (temp), unsignedp, 0, op1);
7523 emit_move_insn (temp, const1_rtx);
7528 /* If no set-flag instruction, must generate a conditional
7529 store into a temporary variable. Drop through
7530 and handle this like && and ||. */
7532 case TRUTH_ANDIF_EXPR:
7533 case TRUTH_ORIF_EXPR:
7535 && (target == 0 || ! safe_from_p (target, exp, 1)
7536 /* Make sure we don't have a hard reg (such as function's return
7537 value) live across basic blocks, if not optimizing. */
7538 || (!optimize && GET_CODE (target) == REG
7539 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7540 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7543 emit_clr_insn (target);
7545 op1 = gen_label_rtx ();
7546 jumpifnot (exp, op1);
7549 emit_0_to_1_insn (target);
7552 return ignore ? const0_rtx : target;
7554 case TRUTH_NOT_EXPR:
7555 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7556 /* The parser is careful to generate TRUTH_NOT_EXPR
7557 only with operands that are always zero or one. */
7558 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7559 target, 1, OPTAB_LIB_WIDEN);
7565 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7567 return expand_expr (TREE_OPERAND (exp, 1),
7568 (ignore ? const0_rtx : target),
7572 /* If we would have a "singleton" (see below) were it not for a
7573 conversion in each arm, bring that conversion back out. */
7574 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7575 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7576 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7577 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7579 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7580 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7582 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7583 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7584 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7585 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7586 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7587 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7588 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7589 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7590 return expand_expr (build1 (NOP_EXPR, type,
7591 build (COND_EXPR, TREE_TYPE (true),
7592 TREE_OPERAND (exp, 0),
7594 target, tmode, modifier);
7598 /* Note that COND_EXPRs whose type is a structure or union
7599 are required to be constructed to contain assignments of
7600 a temporary variable, so that we can evaluate them here
7601 for side effect only. If type is void, we must do likewise. */
7603 /* If an arm of the branch requires a cleanup,
7604 only that cleanup is performed. */
7607 tree binary_op = 0, unary_op = 0;
7609 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7610 convert it to our mode, if necessary. */
7611 if (integer_onep (TREE_OPERAND (exp, 1))
7612 && integer_zerop (TREE_OPERAND (exp, 2))
7613 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7617 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7622 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7623 if (GET_MODE (op0) == mode)
7627 target = gen_reg_rtx (mode);
7628 convert_move (target, op0, unsignedp);
7632 /* Check for X ? A + B : A. If we have this, we can copy A to the
7633 output and conditionally add B. Similarly for unary operations.
7634 Don't do this if X has side-effects because those side effects
7635 might affect A or B and the "?" operation is a sequence point in
7636 ANSI. (operand_equal_p tests for side effects.) */
7638 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7639 && operand_equal_p (TREE_OPERAND (exp, 2),
7640 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7641 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7642 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7643 && operand_equal_p (TREE_OPERAND (exp, 1),
7644 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7645 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7646 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7647 && operand_equal_p (TREE_OPERAND (exp, 2),
7648 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7649 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7650 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7651 && operand_equal_p (TREE_OPERAND (exp, 1),
7652 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7653 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7655 /* If we are not to produce a result, we have no target. Otherwise,
7656 if a target was specified use it; it will not be used as an
7657 intermediate target unless it is safe. If no target, use a
7662 else if (original_target
7663 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7664 || (singleton && GET_CODE (original_target) == REG
7665 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7666 && original_target == var_rtx (singleton)))
7667 && GET_MODE (original_target) == mode
7668 #ifdef HAVE_conditional_move
7669 && (! can_conditionally_move_p (mode)
7670 || GET_CODE (original_target) == REG
7671 || TREE_ADDRESSABLE (type))
7673 && ! (GET_CODE (original_target) == MEM
7674 && MEM_VOLATILE_P (original_target)))
7675 temp = original_target;
7676 else if (TREE_ADDRESSABLE (type))
7679 temp = assign_temp (type, 0, 0, 1);
7681 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7682 do the test of X as a store-flag operation, do this as
7683 A + ((X != 0) << log C). Similarly for other simple binary
7684 operators. Only do for C == 1 if BRANCH_COST is low. */
7685 if (temp && singleton && binary_op
7686 && (TREE_CODE (binary_op) == PLUS_EXPR
7687 || TREE_CODE (binary_op) == MINUS_EXPR
7688 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7689 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7690 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7691 : integer_onep (TREE_OPERAND (binary_op, 1)))
7692 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7695 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7696 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7697 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7700 /* If we had X ? A : A + 1, do this as A + (X == 0).
7702 We have to invert the truth value here and then put it
7703 back later if do_store_flag fails. We cannot simply copy
7704 TREE_OPERAND (exp, 0) to another variable and modify that
7705 because invert_truthvalue can modify the tree pointed to
7707 if (singleton == TREE_OPERAND (exp, 1))
7708 TREE_OPERAND (exp, 0)
7709 = invert_truthvalue (TREE_OPERAND (exp, 0));
7711 result = do_store_flag (TREE_OPERAND (exp, 0),
7712 (safe_from_p (temp, singleton, 1)
7714 mode, BRANCH_COST <= 1);
7716 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7717 result = expand_shift (LSHIFT_EXPR, mode, result,
7718 build_int_2 (tree_log2
7722 (safe_from_p (temp, singleton, 1)
7723 ? temp : NULL_RTX), 0);
7727 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7728 return expand_binop (mode, boptab, op1, result, temp,
7729 unsignedp, OPTAB_LIB_WIDEN);
7731 else if (singleton == TREE_OPERAND (exp, 1))
7732 TREE_OPERAND (exp, 0)
7733 = invert_truthvalue (TREE_OPERAND (exp, 0));
7736 do_pending_stack_adjust ();
7738 op0 = gen_label_rtx ();
7740 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7744 /* If the target conflicts with the other operand of the
7745 binary op, we can't use it. Also, we can't use the target
7746 if it is a hard register, because evaluating the condition
7747 might clobber it. */
7749 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7750 || (GET_CODE (temp) == REG
7751 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7752 temp = gen_reg_rtx (mode);
7753 store_expr (singleton, temp, 0);
7756 expand_expr (singleton,
7757 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7758 if (singleton == TREE_OPERAND (exp, 1))
7759 jumpif (TREE_OPERAND (exp, 0), op0);
7761 jumpifnot (TREE_OPERAND (exp, 0), op0);
7763 start_cleanup_deferral ();
7764 if (binary_op && temp == 0)
7765 /* Just touch the other operand. */
7766 expand_expr (TREE_OPERAND (binary_op, 1),
7767 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7769 store_expr (build (TREE_CODE (binary_op), type,
7770 make_tree (type, temp),
7771 TREE_OPERAND (binary_op, 1)),
7774 store_expr (build1 (TREE_CODE (unary_op), type,
7775 make_tree (type, temp)),
7779 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7780 comparison operator. If we have one of these cases, set the
7781 output to A, branch on A (cse will merge these two references),
7782 then set the output to FOO. */
7784 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7785 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7786 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7787 TREE_OPERAND (exp, 1), 0)
7788 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7789 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7790 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7792 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7793 temp = gen_reg_rtx (mode);
7794 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7795 jumpif (TREE_OPERAND (exp, 0), op0);
7797 start_cleanup_deferral ();
7798 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7802 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7803 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7804 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7805 TREE_OPERAND (exp, 2), 0)
7806 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7807 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7808 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7810 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7811 temp = gen_reg_rtx (mode);
7812 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7813 jumpifnot (TREE_OPERAND (exp, 0), op0);
7815 start_cleanup_deferral ();
7816 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7821 op1 = gen_label_rtx ();
7822 jumpifnot (TREE_OPERAND (exp, 0), op0);
7824 start_cleanup_deferral ();
7826 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7828 expand_expr (TREE_OPERAND (exp, 1),
7829 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7830 end_cleanup_deferral ();
7832 emit_jump_insn (gen_jump (op1));
7835 start_cleanup_deferral ();
7837 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7839 expand_expr (TREE_OPERAND (exp, 2),
7840 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7843 end_cleanup_deferral ();
7854 /* Something needs to be initialized, but we didn't know
7855 where that thing was when building the tree. For example,
7856 it could be the return value of a function, or a parameter
7857 to a function which lays down in the stack, or a temporary
7858 variable which must be passed by reference.
7860 We guarantee that the expression will either be constructed
7861 or copied into our original target. */
7863 tree slot = TREE_OPERAND (exp, 0);
7864 tree cleanups = NULL_TREE;
7867 if (TREE_CODE (slot) != VAR_DECL)
7871 target = original_target;
7875 if (DECL_RTL (slot) != 0)
7877 target = DECL_RTL (slot);
7878 /* If we have already expanded the slot, so don't do
7880 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7885 target = assign_temp (type, 2, 0, 1);
7886 /* All temp slots at this level must not conflict. */
7887 preserve_temp_slots (target);
7888 DECL_RTL (slot) = target;
7889 if (TREE_ADDRESSABLE (slot))
7891 TREE_ADDRESSABLE (slot) = 0;
7892 mark_addressable (slot);
7895 /* Since SLOT is not known to the called function
7896 to belong to its stack frame, we must build an explicit
7897 cleanup. This case occurs when we must build up a reference
7898 to pass the reference as an argument. In this case,
7899 it is very likely that such a reference need not be
7902 if (TREE_OPERAND (exp, 2) == 0)
7903 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7904 cleanups = TREE_OPERAND (exp, 2);
7909 /* This case does occur, when expanding a parameter which
7910 needs to be constructed on the stack. The target
7911 is the actual stack address that we want to initialize.
7912 The function we call will perform the cleanup in this case. */
7914 /* If we have already assigned it space, use that space,
7915 not target that we were passed in, as our target
7916 parameter is only a hint. */
7917 if (DECL_RTL (slot) != 0)
7919 target = DECL_RTL (slot);
7920 /* If we have already expanded the slot, so don't do
7922 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7927 DECL_RTL (slot) = target;
7928 /* If we must have an addressable slot, then make sure that
7929 the RTL that we just stored in slot is OK. */
7930 if (TREE_ADDRESSABLE (slot))
7932 TREE_ADDRESSABLE (slot) = 0;
7933 mark_addressable (slot);
7938 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7939 /* Mark it as expanded. */
7940 TREE_OPERAND (exp, 1) = NULL_TREE;
7942 TREE_USED (slot) = 1;
7943 store_expr (exp1, target, 0);
7945 expand_decl_cleanup (NULL_TREE, cleanups);
7952 tree lhs = TREE_OPERAND (exp, 0);
7953 tree rhs = TREE_OPERAND (exp, 1);
7954 tree noncopied_parts = 0;
7955 tree lhs_type = TREE_TYPE (lhs);
7957 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7958 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7959 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7960 TYPE_NONCOPIED_PARTS (lhs_type));
7961 while (noncopied_parts != 0)
7963 expand_assignment (TREE_VALUE (noncopied_parts),
7964 TREE_PURPOSE (noncopied_parts), 0, 0);
7965 noncopied_parts = TREE_CHAIN (noncopied_parts);
7972 /* If lhs is complex, expand calls in rhs before computing it.
7973 That's so we don't compute a pointer and save it over a call.
7974 If lhs is simple, compute it first so we can give it as a
7975 target if the rhs is just a call. This avoids an extra temp and copy
7976 and that prevents a partial-subsumption which makes bad code.
7977 Actually we could treat component_ref's of vars like vars. */
7979 tree lhs = TREE_OPERAND (exp, 0);
7980 tree rhs = TREE_OPERAND (exp, 1);
7981 tree noncopied_parts = 0;
7982 tree lhs_type = TREE_TYPE (lhs);
7986 if (TREE_CODE (lhs) != VAR_DECL
7987 && TREE_CODE (lhs) != RESULT_DECL
7988 && TREE_CODE (lhs) != PARM_DECL
7989 && ! (TREE_CODE (lhs) == INDIRECT_REF
7990 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7991 preexpand_calls (exp);
7993 /* Check for |= or &= of a bitfield of size one into another bitfield
7994 of size 1. In this case, (unless we need the result of the
7995 assignment) we can do this more efficiently with a
7996 test followed by an assignment, if necessary.
7998 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7999 things change so we do, this code should be enhanced to
8002 && TREE_CODE (lhs) == COMPONENT_REF
8003 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8004 || TREE_CODE (rhs) == BIT_AND_EXPR)
8005 && TREE_OPERAND (rhs, 0) == lhs
8006 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8007 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8008 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8010 rtx label = gen_label_rtx ();
8012 do_jump (TREE_OPERAND (rhs, 1),
8013 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8014 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8015 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8016 (TREE_CODE (rhs) == BIT_IOR_EXPR
8018 : integer_zero_node)),
8020 do_pending_stack_adjust ();
8025 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8026 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8027 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8028 TYPE_NONCOPIED_PARTS (lhs_type));
8030 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8031 while (noncopied_parts != 0)
8033 expand_assignment (TREE_PURPOSE (noncopied_parts),
8034 TREE_VALUE (noncopied_parts), 0, 0);
8035 noncopied_parts = TREE_CHAIN (noncopied_parts);
8041 if (!TREE_OPERAND (exp, 0))
8042 expand_null_return ();
8044 expand_return (TREE_OPERAND (exp, 0));
8047 case PREINCREMENT_EXPR:
8048 case PREDECREMENT_EXPR:
8049 return expand_increment (exp, 0, ignore);
8051 case POSTINCREMENT_EXPR:
8052 case POSTDECREMENT_EXPR:
8053 /* Faster to treat as pre-increment if result is not used. */
8054 return expand_increment (exp, ! ignore, ignore);
8057 /* If nonzero, TEMP will be set to the address of something that might
8058 be a MEM corresponding to a stack slot. */
8061 /* Are we taking the address of a nested function? */
8062 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8063 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8064 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8065 && ! TREE_STATIC (exp))
8067 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8068 op0 = force_operand (op0, target);
8070 /* If we are taking the address of something erroneous, just
8072 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8076 /* We make sure to pass const0_rtx down if we came in with
8077 ignore set, to avoid doing the cleanups twice for something. */
8078 op0 = expand_expr (TREE_OPERAND (exp, 0),
8079 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8080 (modifier == EXPAND_INITIALIZER
8081 ? modifier : EXPAND_CONST_ADDRESS));
8083 /* If we are going to ignore the result, OP0 will have been set
8084 to const0_rtx, so just return it. Don't get confused and
8085 think we are taking the address of the constant. */
8089 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8090 clever and returns a REG when given a MEM. */
8091 op0 = protect_from_queue (op0, 1);
8093 /* We would like the object in memory. If it is a constant,
8094 we can have it be statically allocated into memory. For
8095 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
8096 memory and store the value into it. */
8098 if (CONSTANT_P (op0))
8099 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8101 else if (GET_CODE (op0) == MEM)
8103 mark_temp_addr_taken (op0);
8104 temp = XEXP (op0, 0);
8107 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8108 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8110 /* If this object is in a register, it must be not
8112 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8113 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8115 mark_temp_addr_taken (memloc);
8116 emit_move_insn (memloc, op0);
8120 if (GET_CODE (op0) != MEM)
8123 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8125 temp = XEXP (op0, 0);
8126 #ifdef POINTERS_EXTEND_UNSIGNED
8127 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8128 && mode == ptr_mode)
8129 temp = convert_memory_address (ptr_mode, temp);
8134 op0 = force_operand (XEXP (op0, 0), target);
8137 if (flag_force_addr && GET_CODE (op0) != REG)
8138 op0 = force_reg (Pmode, op0);
8140 if (GET_CODE (op0) == REG
8141 && ! REG_USERVAR_P (op0))
8142 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8144 /* If we might have had a temp slot, add an equivalent address
8147 update_temp_slot_address (temp, op0);
8149 #ifdef POINTERS_EXTEND_UNSIGNED
8150 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8151 && mode == ptr_mode)
8152 op0 = convert_memory_address (ptr_mode, op0);
8157 case ENTRY_VALUE_EXPR:
8160 /* COMPLEX type for Extended Pascal & Fortran */
8163 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8166 /* Get the rtx code of the operands. */
8167 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8168 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8171 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8175 /* Move the real (op0) and imaginary (op1) parts to their location. */
8176 emit_move_insn (gen_realpart (mode, target), op0);
8177 emit_move_insn (gen_imagpart (mode, target), op1);
8179 insns = get_insns ();
8182 /* Complex construction should appear as a single unit. */
8183 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8184 each with a separate pseudo as destination.
8185 It's not correct for flow to treat them as a unit. */
8186 if (GET_CODE (target) != CONCAT)
8187 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8195 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8196 return gen_realpart (mode, op0);
8199 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8200 return gen_imagpart (mode, op0);
8204 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8208 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8211 target = gen_reg_rtx (mode);
8215 /* Store the realpart and the negated imagpart to target. */
8216 emit_move_insn (gen_realpart (partmode, target),
8217 gen_realpart (partmode, op0));
8219 imag_t = gen_imagpart (partmode, target);
8220 temp = expand_unop (partmode, neg_optab,
8221 gen_imagpart (partmode, op0), imag_t, 0);
8223 emit_move_insn (imag_t, temp);
8225 insns = get_insns ();
8228 /* Conjugate should appear as a single unit
8229 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8230 each with a separate pseudo as destination.
8231 It's not correct for flow to treat them as a unit. */
8232 if (GET_CODE (target) != CONCAT)
8233 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8240 case TRY_CATCH_EXPR:
8242 tree handler = TREE_OPERAND (exp, 1);
8244 expand_eh_region_start ();
8246 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8248 expand_eh_region_end (handler);
8253 case TRY_FINALLY_EXPR:
8255 tree try_block = TREE_OPERAND (exp, 0);
8256 tree finally_block = TREE_OPERAND (exp, 1);
8257 rtx finally_label = gen_label_rtx ();
8258 rtx done_label = gen_label_rtx ();
8259 rtx return_link = gen_reg_rtx (Pmode);
8260 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8261 (tree) finally_label, (tree) return_link);
8262 TREE_SIDE_EFFECTS (cleanup) = 1;
8264 /* Start a new binding layer that will keep track of all cleanup
8265 actions to be performed. */
8266 expand_start_bindings (0);
8268 target_temp_slot_level = temp_slot_level;
8270 expand_decl_cleanup (NULL_TREE, cleanup);
8271 op0 = expand_expr (try_block, target, tmode, modifier);
8273 preserve_temp_slots (op0);
8274 expand_end_bindings (NULL_TREE, 0, 0);
8275 emit_jump (done_label);
8276 emit_label (finally_label);
8277 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8278 emit_indirect_jump (return_link);
8279 emit_label (done_label);
8283 case GOTO_SUBROUTINE_EXPR:
8285 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8286 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8287 rtx return_address = gen_label_rtx ();
8288 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8290 emit_label (return_address);
8296 rtx dcc = get_dynamic_cleanup_chain ();
8297 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8303 rtx dhc = get_dynamic_handler_chain ();
8304 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8309 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8312 /* Here to do an ordinary binary operator, generating an instruction
8313 from the optab already placed in `this_optab'. */
8315 preexpand_calls (exp);
8316 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8318 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8319 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8321 temp = expand_binop (mode, this_optab, op0, op1, target,
8322 unsignedp, OPTAB_LIB_WIDEN);
8330 /* Return the alignment in bits of EXP, a pointer valued expression.
8331 But don't return more than MAX_ALIGN no matter what.
8332 The alignment returned is, by default, the alignment of the thing that
8333 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8335 Otherwise, look at the expression to see if we can do better, i.e., if the
8336 expression is actually pointing at an object whose alignment is tighter. */
8339 get_pointer_alignment (exp, max_align)
8343 unsigned align, inner;
8345 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8348 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8349 align = MIN (align, max_align);
8353 switch (TREE_CODE (exp))
8357 case NON_LVALUE_EXPR:
8358 exp = TREE_OPERAND (exp, 0);
8359 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8361 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8362 align = MIN (inner, max_align);
8366 /* If sum of pointer + int, restrict our maximum alignment to that
8367 imposed by the integer. If not, we can't do any better than
8369 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8372 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8377 exp = TREE_OPERAND (exp, 0);
8381 /* See what we are pointing at and look at its alignment. */
8382 exp = TREE_OPERAND (exp, 0);
8383 if (TREE_CODE (exp) == FUNCTION_DECL)
8384 align = FUNCTION_BOUNDARY;
8385 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8386 align = DECL_ALIGN (exp);
8387 #ifdef CONSTANT_ALIGNMENT
8388 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8389 align = CONSTANT_ALIGNMENT (exp, align);
8391 return MIN (align, max_align);
8399 /* Return the tree node and offset if a given argument corresponds to
8400 a string constant. */
8403 string_constant (arg, ptr_offset)
8409 if (TREE_CODE (arg) == ADDR_EXPR
8410 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8412 *ptr_offset = integer_zero_node;
8413 return TREE_OPERAND (arg, 0);
8415 else if (TREE_CODE (arg) == PLUS_EXPR)
8417 tree arg0 = TREE_OPERAND (arg, 0);
8418 tree arg1 = TREE_OPERAND (arg, 1);
8423 if (TREE_CODE (arg0) == ADDR_EXPR
8424 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8427 return TREE_OPERAND (arg0, 0);
8429 else if (TREE_CODE (arg1) == ADDR_EXPR
8430 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8433 return TREE_OPERAND (arg1, 0);
8440 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8441 way, because it could contain a zero byte in the middle.
8442 TREE_STRING_LENGTH is the size of the character array, not the string.
8444 Unfortunately, string_constant can't access the values of const char
8445 arrays with initializers, so neither can we do so here. */
8455 src = string_constant (src, &offset_node);
8458 max = TREE_STRING_LENGTH (src);
8459 ptr = TREE_STRING_POINTER (src);
8460 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8462 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8463 compute the offset to the following null if we don't know where to
8464 start searching for it. */
8466 for (i = 0; i < max; i++)
8469 /* We don't know the starting offset, but we do know that the string
8470 has no internal zero bytes. We can assume that the offset falls
8471 within the bounds of the string; otherwise, the programmer deserves
8472 what he gets. Subtract the offset from the length of the string,
8474 /* This would perhaps not be valid if we were dealing with named
8475 arrays in addition to literal string constants. */
8476 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8479 /* We have a known offset into the string. Start searching there for
8480 a null character. */
8481 if (offset_node == 0)
8485 /* Did we get a long long offset? If so, punt. */
8486 if (TREE_INT_CST_HIGH (offset_node) != 0)
8488 offset = TREE_INT_CST_LOW (offset_node);
8490 /* If the offset is known to be out of bounds, warn, and call strlen at
8492 if (offset < 0 || offset > max)
8494 warning ("offset outside bounds of constant string");
8497 /* Use strlen to search for the first zero byte. Since any strings
8498 constructed with build_string will have nulls appended, we win even
8499 if we get handed something like (char[4])"abcd".
8501 Since OFFSET is our starting index into the string, no further
8502 calculation is needed. */
8503 return size_int (strlen (ptr + offset));
8507 expand_builtin_return_addr (fndecl_code, count, tem)
8508 enum built_in_function fndecl_code;
8514 /* Some machines need special handling before we can access
8515 arbitrary frames. For example, on the sparc, we must first flush
8516 all register windows to the stack. */
8517 #ifdef SETUP_FRAME_ADDRESSES
8519 SETUP_FRAME_ADDRESSES ();
8522 /* On the sparc, the return address is not in the frame, it is in a
8523 register. There is no way to access it off of the current frame
8524 pointer, but it can be accessed off the previous frame pointer by
8525 reading the value from the register window save area. */
8526 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8527 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8531 /* Scan back COUNT frames to the specified frame. */
8532 for (i = 0; i < count; i++)
8534 /* Assume the dynamic chain pointer is in the word that the
8535 frame address points to, unless otherwise specified. */
8536 #ifdef DYNAMIC_CHAIN_ADDRESS
8537 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8539 tem = memory_address (Pmode, tem);
8540 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8543 /* For __builtin_frame_address, return what we've got. */
8544 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8547 /* For __builtin_return_address, Get the return address from that
8549 #ifdef RETURN_ADDR_RTX
8550 tem = RETURN_ADDR_RTX (count, tem);
8552 tem = memory_address (Pmode,
8553 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8554 tem = gen_rtx_MEM (Pmode, tem);
8559 /* Construct the leading half of a __builtin_setjmp call. Control will
8560 return to RECEIVER_LABEL. This is used directly by sjlj exception
8564 expand_builtin_setjmp_setup (buf_addr, receiver_label)
8568 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8571 #ifdef POINTERS_EXTEND_UNSIGNED
8572 buf_addr = convert_memory_address (Pmode, buf_addr);
8575 buf_addr = force_reg (Pmode, buf_addr);
8579 /* We store the frame pointer and the address of receiver_label in
8580 the buffer and use the rest of it for the stack save area, which
8581 is machine-dependent. */
8583 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8584 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8587 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8588 BUILTIN_SETJMP_FRAME_VALUE);
8589 emit_move_insn (validize_mem
8590 (gen_rtx_MEM (Pmode,
8591 plus_constant (buf_addr,
8592 GET_MODE_SIZE (Pmode)))),
8593 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
8595 stack_save = gen_rtx_MEM (sa_mode,
8596 plus_constant (buf_addr,
8597 2 * GET_MODE_SIZE (Pmode)));
8598 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8600 /* If there is further processing to do, do it. */
8601 #ifdef HAVE_builtin_setjmp_setup
8602 if (HAVE_builtin_setjmp_setup)
8603 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8606 /* Tell optimize_save_area_alloca that extra work is going to
8607 need to go on during alloca. */
8608 current_function_calls_setjmp = 1;
8610 /* Set this so all the registers get saved in our frame; we need to be
8611 able to copy the saved values for any registers from frames we unwind. */
8612 current_function_has_nonlocal_label = 1;
8615 /* Construct the trailing part of a __builtin_setjmp call.
8616 This is used directly by sjlj exception handling code. */
8619 expand_builtin_setjmp_receiver (receiver_label)
8620 rtx receiver_label ATTRIBUTE_UNUSED;
8622 /* Clobber the FP when we get here, so we have to make sure it's
8623 marked as used by this function. */
8624 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8626 /* Mark the static chain as clobbered here so life information
8627 doesn't get messed up for it. */
8628 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8630 /* Now put in the code to restore the frame pointer, and argument
8631 pointer, if needed. The code below is from expand_end_bindings
8632 in stmt.c; see detailed documentation there. */
8633 #ifdef HAVE_nonlocal_goto
8634 if (! HAVE_nonlocal_goto)
8636 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8638 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8639 if (fixed_regs[ARG_POINTER_REGNUM])
8641 #ifdef ELIMINABLE_REGS
8643 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8645 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8646 if (elim_regs[i].from == ARG_POINTER_REGNUM
8647 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8650 if (i == sizeof elim_regs / sizeof elim_regs [0])
8653 /* Now restore our arg pointer from the address at which it
8654 was saved in our stack frame.
8655 If there hasn't be space allocated for it yet, make
8657 if (arg_pointer_save_area == 0)
8658 arg_pointer_save_area
8659 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8660 emit_move_insn (virtual_incoming_args_rtx,
8661 copy_to_reg (arg_pointer_save_area));
8666 #ifdef HAVE_builtin_setjmp_receiver
8667 if (HAVE_builtin_setjmp_receiver)
8668 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
8671 #ifdef HAVE_nonlocal_goto_receiver
8672 if (HAVE_nonlocal_goto_receiver)
8673 emit_insn (gen_nonlocal_goto_receiver ());
8680 /* @@@ This is a kludge. Not all machine descriptions define a blockage
8681 insn, but we must not allow the code we just generated to be reordered
8682 by scheduling. Specifically, the update of the frame pointer must
8683 happen immediately, not later. So emit an ASM_INPUT to act as blockage
8685 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
8689 /* __builtin_setjmp is passed a pointer to an array of five words (not
8690 all will be used on all machines). It operates similarly to the C
8691 library function of the same name, but is more efficient. Much of
8692 the code below (and for longjmp) is copied from the handling of
8695 NOTE: This is intended for use by GNAT and the exception handling
8696 scheme in the compiler and will only work in the method used by
8700 expand_builtin_setjmp (arglist, target)
8704 rtx buf_addr, next_lab, cont_lab;
8707 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8710 if (target == 0 || GET_CODE (target) != REG
8711 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8712 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
8714 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8716 next_lab = gen_label_rtx ();
8717 cont_lab = gen_label_rtx ();
8719 expand_builtin_setjmp_setup (buf_addr, next_lab);
8721 /* Set TARGET to zero and branch to the continue label. */
8722 emit_move_insn (target, const0_rtx);
8723 emit_jump_insn (gen_jump (cont_lab));
8725 emit_label (next_lab);
8727 expand_builtin_setjmp_receiver (next_lab);
8729 /* Set TARGET to one. */
8730 emit_move_insn (target, const1_rtx);
8731 emit_label (cont_lab);
8733 /* Tell flow about the strange goings on. Putting `next_lab' on
8734 `nonlocal_goto_handler_labels' to indicates that function
8735 calls may traverse the arc back to this label. */
8737 current_function_has_nonlocal_label = 1;
8738 nonlocal_goto_handler_labels
8739 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
8745 expand_builtin_longjmp (buf_addr, value)
8746 rtx buf_addr, value;
8749 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8751 #ifdef POINTERS_EXTEND_UNSIGNED
8752 buf_addr = convert_memory_address (Pmode, buf_addr);
8754 buf_addr = force_reg (Pmode, buf_addr);
8756 /* We used to store value in static_chain_rtx, but that fails if pointers
8757 are smaller than integers. We instead require that the user must pass
8758 a second argument of 1, because that is what builtin_setjmp will
8759 return. This also makes EH slightly more efficient, since we are no
8760 longer copying around a value that we don't care about. */
8761 if (value != const1_rtx)
8764 #ifdef HAVE_builtin_longjmp
8765 if (HAVE_builtin_longjmp)
8766 emit_insn (gen_builtin_longjmp (buf_addr));
8770 fp = gen_rtx_MEM (Pmode, buf_addr);
8771 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8772 GET_MODE_SIZE (Pmode)));
8774 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8775 2 * GET_MODE_SIZE (Pmode)));
8777 /* Pick up FP, label, and SP from the block and jump. This code is
8778 from expand_goto in stmt.c; see there for detailed comments. */
8779 #if HAVE_nonlocal_goto
8780 if (HAVE_nonlocal_goto)
8781 /* We have to pass a value to the nonlocal_goto pattern that will
8782 get copied into the static_chain pointer, but it does not matter
8783 what that value is, because builtin_setjmp does not use it. */
8784 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8788 lab = copy_to_reg (lab);
8790 emit_move_insn (hard_frame_pointer_rtx, fp);
8791 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8793 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8794 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8795 emit_indirect_jump (lab);
8801 get_memory_rtx (exp)
8807 mem = gen_rtx_MEM (BLKmode,
8808 memory_address (BLKmode,
8809 expand_expr (exp, NULL_RTX,
8810 ptr_mode, EXPAND_SUM)));
8812 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8814 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8815 if the value is the address of a structure or if the expression is
8816 cast to a pointer to structure type. */
8819 while (TREE_CODE (exp) == NOP_EXPR)
8821 tree cast_type = TREE_TYPE (exp);
8822 if (TREE_CODE (cast_type) == POINTER_TYPE
8823 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8828 exp = TREE_OPERAND (exp, 0);
8831 if (is_aggregate == 0)
8835 if (TREE_CODE (exp) == ADDR_EXPR)
8836 /* If this is the address of an object, check whether the
8837 object is an array. */
8838 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8840 type = TREE_TYPE (TREE_TYPE (exp));
8841 is_aggregate = AGGREGATE_TYPE_P (type);
8844 MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8849 /* Expand an expression EXP that calls a built-in function,
8850 with result going to TARGET if that's convenient
8851 (and in mode MODE if that's convenient).
8852 SUBTARGET may be used as the target for computing one of EXP's operands.
8853 IGNORE is nonzero if the value is to be ignored. */
8855 #define CALLED_AS_BUILT_IN(NODE) \
8856 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8859 expand_builtin (exp, target, subtarget, mode, ignore)
8863 enum machine_mode mode;
8866 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8867 tree arglist = TREE_OPERAND (exp, 1);
8870 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8871 optab builtin_optab;
8873 switch (DECL_FUNCTION_CODE (fndecl))
8878 /* build_function_call changes these into ABS_EXPR. */
8883 /* Treat these like sqrt, but only if the user asks for them. */
8884 if (! flag_fast_math)
8886 case BUILT_IN_FSQRT:
8887 /* If not optimizing, call the library function. */
8892 /* Arg could be wrong type if user redeclared this fcn wrong. */
8893 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8896 /* Stabilize and compute the argument. */
8897 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8898 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8900 exp = copy_node (exp);
8901 arglist = copy_node (arglist);
8902 TREE_OPERAND (exp, 1) = arglist;
8903 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8905 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8907 /* Make a suitable register to place result in. */
8908 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8913 switch (DECL_FUNCTION_CODE (fndecl))
8916 builtin_optab = sin_optab; break;
8918 builtin_optab = cos_optab; break;
8919 case BUILT_IN_FSQRT:
8920 builtin_optab = sqrt_optab; break;
8925 /* Compute into TARGET.
8926 Set TARGET to wherever the result comes back. */
8927 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8928 builtin_optab, op0, target, 0);
8930 /* If we were unable to expand via the builtin, stop the
8931 sequence (without outputting the insns) and break, causing
8932 a call to the library function. */
8939 /* Check the results by default. But if flag_fast_math is turned on,
8940 then assume sqrt will always be called with valid arguments. */
8942 if (flag_errno_math && ! flag_fast_math)
8944 /* Don't define the builtin FP instructions
8945 if your machine is not IEEE. */
8946 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8949 lab1 = gen_label_rtx ();
8951 /* Test the result; if it is NaN, set errno=EDOM because
8952 the argument was not in the domain. */
8953 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8958 #ifdef GEN_ERRNO_RTX
8959 rtx errno_rtx = GEN_ERRNO_RTX;
8962 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8965 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8968 /* We can't set errno=EDOM directly; let the library call do it.
8969 Pop the arguments right away in case the call gets deleted. */
8971 expand_call (exp, target, 0);
8978 /* Output the entire sequence. */
8979 insns = get_insns ();
8988 /* __builtin_apply_args returns block of memory allocated on
8989 the stack into which is stored the arg pointer, structure
8990 value address, static chain, and all the registers that might
8991 possibly be used in performing a function call. The code is
8992 moved to the start of the function so the incoming values are
8994 case BUILT_IN_APPLY_ARGS:
8995 /* Don't do __builtin_apply_args more than once in a function.
8996 Save the result of the first call and reuse it. */
8997 if (apply_args_value != 0)
8998 return apply_args_value;
9000 /* When this function is called, it means that registers must be
9001 saved on entry to this function. So we migrate the
9002 call to the first insn of this function. */
9007 temp = expand_builtin_apply_args ();
9011 apply_args_value = temp;
9013 /* Put the sequence after the NOTE that starts the function.
9014 If this is inside a SEQUENCE, make the outer-level insn
9015 chain current, so the code is placed at the start of the
9017 push_topmost_sequence ();
9018 emit_insns_before (seq, NEXT_INSN (get_insns ()));
9019 pop_topmost_sequence ();
9023 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9024 FUNCTION with a copy of the parameters described by
9025 ARGUMENTS, and ARGSIZE. It returns a block of memory
9026 allocated on the stack into which is stored all the registers
9027 that might possibly be used for returning the result of a
9028 function. ARGUMENTS is the value returned by
9029 __builtin_apply_args. ARGSIZE is the number of bytes of
9030 arguments that must be copied. ??? How should this value be
9031 computed? We'll also need a safe worst case value for varargs
9033 case BUILT_IN_APPLY:
9035 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9036 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
9037 || TREE_CHAIN (arglist) == 0
9038 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9039 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9040 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9048 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
9049 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
9051 return expand_builtin_apply (ops[0], ops[1], ops[2]);
9054 /* __builtin_return (RESULT) causes the function to return the
9055 value described by RESULT. RESULT is address of the block of
9056 memory returned by __builtin_apply. */
9057 case BUILT_IN_RETURN:
9059 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9060 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
9061 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
9062 NULL_RTX, VOIDmode, 0));
9065 case BUILT_IN_SAVEREGS:
9066 /* Don't do __builtin_saveregs more than once in a function.
9067 Save the result of the first call and reuse it. */
9068 if (saveregs_value != 0)
9069 return saveregs_value;
9071 /* When this function is called, it means that registers must be
9072 saved on entry to this function. So we migrate the
9073 call to the first insn of this function. */
9077 /* Now really call the function. `expand_call' does not call
9078 expand_builtin, so there is no danger of infinite recursion here. */
9081 #ifdef EXPAND_BUILTIN_SAVEREGS
9082 /* Do whatever the machine needs done in this case. */
9083 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
9085 /* The register where the function returns its value
9086 is likely to have something else in it, such as an argument.
9087 So preserve that register around the call. */
9089 if (value_mode != VOIDmode)
9091 rtx valreg = hard_libcall_value (value_mode);
9092 rtx saved_valreg = gen_reg_rtx (value_mode);
9094 emit_move_insn (saved_valreg, valreg);
9095 temp = expand_call (exp, target, ignore);
9096 emit_move_insn (valreg, saved_valreg);
9099 /* Generate the call, putting the value in a pseudo. */
9100 temp = expand_call (exp, target, ignore);
9106 saveregs_value = temp;
9108 /* Put the sequence after the NOTE that starts the function.
9109 If this is inside a SEQUENCE, make the outer-level insn
9110 chain current, so the code is placed at the start of the
9112 push_topmost_sequence ();
9113 emit_insns_before (seq, NEXT_INSN (get_insns ()));
9114 pop_topmost_sequence ();
9118 /* __builtin_args_info (N) returns word N of the arg space info
9119 for the current function. The number and meanings of words
9120 is controlled by the definition of CUMULATIVE_ARGS. */
9121 case BUILT_IN_ARGS_INFO:
9123 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
9124 int *word_ptr = (int *) ¤t_function_args_info;
9126 /* These are used by the code below that is if 0'ed away */
9128 tree type, elts, result;
9131 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
9132 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
9133 __FILE__, __LINE__);
9137 tree arg = TREE_VALUE (arglist);
9138 if (TREE_CODE (arg) != INTEGER_CST)
9139 error ("argument of `__builtin_args_info' must be constant");
9142 int wordnum = TREE_INT_CST_LOW (arg);
9144 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
9145 error ("argument of `__builtin_args_info' out of range");
9147 return GEN_INT (word_ptr[wordnum]);
9151 error ("missing argument in `__builtin_args_info'");
9156 for (i = 0; i < nwords; i++)
9157 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
9159 type = build_array_type (integer_type_node,
9160 build_index_type (build_int_2 (nwords, 0)));
9161 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
9162 TREE_CONSTANT (result) = 1;
9163 TREE_STATIC (result) = 1;
9164 result = build (INDIRECT_REF, build_pointer_type (type), result);
9165 TREE_CONSTANT (result) = 1;
9166 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
9170 /* Return the address of the first anonymous stack arg. */
9171 case BUILT_IN_NEXT_ARG:
9173 tree fntype = TREE_TYPE (current_function_decl);
9175 if ((TYPE_ARG_TYPES (fntype) == 0
9176 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
9178 && ! current_function_varargs)
9180 error ("`va_start' used in function with fixed args");
9186 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9187 tree arg = TREE_VALUE (arglist);
9189 /* Strip off all nops for the sake of the comparison. This
9190 is not quite the same as STRIP_NOPS. It does more.
9191 We must also strip off INDIRECT_EXPR for C++ reference
9193 while (TREE_CODE (arg) == NOP_EXPR
9194 || TREE_CODE (arg) == CONVERT_EXPR
9195 || TREE_CODE (arg) == NON_LVALUE_EXPR
9196 || TREE_CODE (arg) == INDIRECT_REF)
9197 arg = TREE_OPERAND (arg, 0);
9198 if (arg != last_parm)
9199 warning ("second parameter of `va_start' not last named argument");
9201 else if (! current_function_varargs)
9202 /* Evidently an out of date version of <stdarg.h>; can't validate
9203 va_start's second argument, but can still work as intended. */
9204 warning ("`__builtin_next_arg' called without an argument");
9207 return expand_binop (Pmode, add_optab,
9208 current_function_internal_arg_pointer,
9209 current_function_arg_offset_rtx,
9210 NULL_RTX, 0, OPTAB_LIB_WIDEN);
9212 case BUILT_IN_CLASSIFY_TYPE:
9215 tree type = TREE_TYPE (TREE_VALUE (arglist));
9216 enum tree_code code = TREE_CODE (type);
9217 if (code == VOID_TYPE)
9218 return GEN_INT (void_type_class);
9219 if (code == INTEGER_TYPE)
9220 return GEN_INT (integer_type_class);
9221 if (code == CHAR_TYPE)
9222 return GEN_INT (char_type_class);
9223 if (code == ENUMERAL_TYPE)
9224 return GEN_INT (enumeral_type_class);
9225 if (code == BOOLEAN_TYPE)
9226 return GEN_INT (boolean_type_class);
9227 if (code == POINTER_TYPE)
9228 return GEN_INT (pointer_type_class);
9229 if (code == REFERENCE_TYPE)
9230 return GEN_INT (reference_type_class);
9231 if (code == OFFSET_TYPE)
9232 return GEN_INT (offset_type_class);
9233 if (code == REAL_TYPE)
9234 return GEN_INT (real_type_class);
9235 if (code == COMPLEX_TYPE)
9236 return GEN_INT (complex_type_class);
9237 if (code == FUNCTION_TYPE)
9238 return GEN_INT (function_type_class);
9239 if (code == METHOD_TYPE)
9240 return GEN_INT (method_type_class);
9241 if (code == RECORD_TYPE)
9242 return GEN_INT (record_type_class);
9243 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9244 return GEN_INT (union_type_class);
9245 if (code == ARRAY_TYPE)
9247 if (TYPE_STRING_FLAG (type))
9248 return GEN_INT (string_type_class);
9250 return GEN_INT (array_type_class);
9252 if (code == SET_TYPE)
9253 return GEN_INT (set_type_class);
9254 if (code == FILE_TYPE)
9255 return GEN_INT (file_type_class);
9256 if (code == LANG_TYPE)
9257 return GEN_INT (lang_type_class);
9259 return GEN_INT (no_type_class);
9261 case BUILT_IN_CONSTANT_P:
9266 tree arg = TREE_VALUE (arglist);
9269 /* We return 1 for a numeric type that's known to be a constant
9270 value at compile-time or for an aggregate type that's a
9271 literal constant. */
9274 /* If we know this is a constant, emit the constant of one. */
9275 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9276 || (TREE_CODE (arg) == CONSTRUCTOR
9277 && TREE_CONSTANT (arg))
9278 || (TREE_CODE (arg) == ADDR_EXPR
9279 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9282 /* If we aren't going to be running CSE or this expression
9283 has side effects, show we don't know it to be a constant.
9284 Likewise if it's a pointer or aggregate type since in those
9285 case we only want literals, since those are only optimized
9286 when generating RTL, not later. */
9287 if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9288 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9289 || POINTER_TYPE_P (TREE_TYPE (arg)))
9292 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9293 chance to see if it can deduce whether ARG is constant. */
9295 tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9296 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9300 case BUILT_IN_FRAME_ADDRESS:
9301 /* The argument must be a nonnegative integer constant.
9302 It counts the number of frames to scan up the stack.
9303 The value is the address of that frame. */
9304 case BUILT_IN_RETURN_ADDRESS:
9305 /* The argument must be a nonnegative integer constant.
9306 It counts the number of frames to scan up the stack.
9307 The value is the return address saved in that frame. */
9309 /* Warning about missing arg was already issued. */
9311 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9312 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9314 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9315 error ("invalid arg to `__builtin_frame_address'");
9317 error ("invalid arg to `__builtin_return_address'");
9322 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9323 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9324 hard_frame_pointer_rtx);
9326 /* Some ports cannot access arbitrary stack frames. */
9329 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9330 warning ("unsupported arg to `__builtin_frame_address'");
9332 warning ("unsupported arg to `__builtin_return_address'");
9336 /* For __builtin_frame_address, return what we've got. */
9337 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9340 if (GET_CODE (tem) != REG
9341 && ! CONSTANT_P (tem))
9342 tem = copy_to_mode_reg (Pmode, tem);
9346 /* Returns the address of the area where the structure is returned.
9348 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9350 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9351 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9354 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9356 case BUILT_IN_ALLOCA:
9358 /* Arg could be non-integer if user redeclared this fcn wrong. */
9359 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9362 /* Compute the argument. */
9363 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9365 /* Allocate the desired space. */
9366 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9369 /* If not optimizing, call the library function. */
9370 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9374 /* Arg could be non-integer if user redeclared this fcn wrong. */
9375 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9378 /* Compute the argument. */
9379 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9380 /* Compute ffs, into TARGET if possible.
9381 Set TARGET to wherever the result comes back. */
9382 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9383 ffs_optab, op0, target, 1);
9388 case BUILT_IN_STRLEN:
9389 /* If not optimizing, call the library function. */
9390 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9394 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9395 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9399 tree src = TREE_VALUE (arglist);
9400 tree len = c_strlen (src);
9403 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9405 rtx result, src_rtx, char_rtx;
9406 enum machine_mode insn_mode = value_mode, char_mode;
9407 enum insn_code icode;
9409 /* If the length is known, just return it. */
9411 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9413 /* If SRC is not a pointer type, don't do this operation inline. */
9417 /* Call a function if we can't compute strlen in the right mode. */
9419 while (insn_mode != VOIDmode)
9421 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9422 if (icode != CODE_FOR_nothing)
9425 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9427 if (insn_mode == VOIDmode)
9430 /* Make a place to write the result of the instruction. */
9433 && GET_CODE (result) == REG
9434 && GET_MODE (result) == insn_mode
9435 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9436 result = gen_reg_rtx (insn_mode);
9438 /* Make sure the operands are acceptable to the predicates. */
9440 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9441 result = gen_reg_rtx (insn_mode);
9442 src_rtx = memory_address (BLKmode,
9443 expand_expr (src, NULL_RTX, ptr_mode,
9446 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9447 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9449 /* Check the string is readable and has an end. */
9450 if (current_function_check_memory_usage)
9451 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9453 GEN_INT (MEMORY_USE_RO),
9454 TYPE_MODE (integer_type_node));
9456 char_rtx = const0_rtx;
9457 char_mode = insn_operand_mode[(int)icode][2];
9458 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9459 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9461 emit_insn (GEN_FCN (icode) (result,
9462 gen_rtx_MEM (BLKmode, src_rtx),
9463 char_rtx, GEN_INT (align)));
9465 /* Return the value in the proper mode for this function. */
9466 if (GET_MODE (result) == value_mode)
9468 else if (target != 0)
9470 convert_move (target, result, 0);
9474 return convert_to_mode (value_mode, result, 0);
9477 case BUILT_IN_STRCPY:
9478 /* If not optimizing, call the library function. */
9479 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9483 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9484 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9485 || TREE_CHAIN (arglist) == 0
9486 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9490 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9495 len = size_binop (PLUS_EXPR, len, integer_one_node);
9497 chainon (arglist, build_tree_list (NULL_TREE, len));
9501 case BUILT_IN_MEMCPY:
9502 /* If not optimizing, call the library function. */
9503 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9507 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9508 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9509 || TREE_CHAIN (arglist) == 0
9510 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9512 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9513 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9514 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9519 tree dest = TREE_VALUE (arglist);
9520 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9521 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9524 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9526 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9527 rtx dest_mem, src_mem, dest_addr, len_rtx;
9529 /* If either SRC or DEST is not a pointer type, don't do
9530 this operation in-line. */
9531 if (src_align == 0 || dest_align == 0)
9533 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9534 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9538 dest_mem = get_memory_rtx (dest);
9539 src_mem = get_memory_rtx (src);
9540 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9542 /* Just copy the rights of SRC to the rights of DEST. */
9543 if (current_function_check_memory_usage)
9544 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9545 XEXP (dest_mem, 0), Pmode,
9546 XEXP (src_mem, 0), Pmode,
9547 len_rtx, TYPE_MODE (sizetype));
9549 /* Copy word part most expediently. */
9551 = emit_block_move (dest_mem, src_mem, len_rtx,
9552 MIN (src_align, dest_align));
9555 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9560 case BUILT_IN_MEMSET:
9561 /* If not optimizing, call the library function. */
9562 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9566 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9567 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9568 || TREE_CHAIN (arglist) == 0
9569 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9571 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9573 != (TREE_CODE (TREE_TYPE
9575 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9579 tree dest = TREE_VALUE (arglist);
9580 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9581 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9584 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9585 rtx dest_mem, dest_addr, len_rtx;
9587 /* If DEST is not a pointer type, don't do this
9588 operation in-line. */
9589 if (dest_align == 0)
9592 /* If the arguments have side-effects, then we can only evaluate
9593 them at most once. The following code evaluates them twice if
9594 they are not constants because we break out to expand_call
9595 in that case. They can't be constants if they have side-effects
9596 so we can check for that first. Alternatively, we could call
9597 save_expr to make multiple evaluation safe. */
9598 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9601 /* If VAL is not 0, don't do this operation in-line. */
9602 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9605 /* If LEN does not expand to a constant, don't do this
9606 operation in-line. */
9607 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9608 if (GET_CODE (len_rtx) != CONST_INT)
9611 dest_mem = get_memory_rtx (dest);
9613 /* Just check DST is writable and mark it as readable. */
9614 if (current_function_check_memory_usage)
9615 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9616 XEXP (dest_mem, 0), Pmode,
9617 len_rtx, TYPE_MODE (sizetype),
9618 GEN_INT (MEMORY_USE_WO),
9619 TYPE_MODE (integer_type_node));
9622 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9625 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9630 /* These comparison functions need an instruction that returns an actual
9631 index. An ordinary compare that just sets the condition codes
9633 #ifdef HAVE_cmpstrsi
9634 case BUILT_IN_STRCMP:
9635 /* If not optimizing, call the library function. */
9636 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9639 /* If we need to check memory accesses, call the library function. */
9640 if (current_function_check_memory_usage)
9644 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9645 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9646 || TREE_CHAIN (arglist) == 0
9647 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9649 else if (!HAVE_cmpstrsi)
9652 tree arg1 = TREE_VALUE (arglist);
9653 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9656 len = c_strlen (arg1);
9658 len = size_binop (PLUS_EXPR, integer_one_node, len);
9659 len2 = c_strlen (arg2);
9661 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9663 /* If we don't have a constant length for the first, use the length
9664 of the second, if we know it. We don't require a constant for
9665 this case; some cost analysis could be done if both are available
9666 but neither is constant. For now, assume they're equally cheap.
9668 If both strings have constant lengths, use the smaller. This
9669 could arise if optimization results in strcpy being called with
9670 two fixed strings, or if the code was machine-generated. We should
9671 add some code to the `memcmp' handler below to deal with such
9672 situations, someday. */
9673 if (!len || TREE_CODE (len) != INTEGER_CST)
9680 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9682 if (tree_int_cst_lt (len2, len))
9686 chainon (arglist, build_tree_list (NULL_TREE, len));
9690 case BUILT_IN_MEMCMP:
9691 /* If not optimizing, call the library function. */
9692 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9695 /* If we need to check memory accesses, call the library function. */
9696 if (current_function_check_memory_usage)
9700 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9701 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9702 || TREE_CHAIN (arglist) == 0
9703 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9704 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9705 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9707 else if (!HAVE_cmpstrsi)
9710 tree arg1 = TREE_VALUE (arglist);
9711 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9712 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9716 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9718 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9719 enum machine_mode insn_mode
9720 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9722 /* If we don't have POINTER_TYPE, call the function. */
9723 if (arg1_align == 0 || arg2_align == 0)
9725 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9726 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9730 /* Make a place to write the result of the instruction. */
9733 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9734 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9735 result = gen_reg_rtx (insn_mode);
9737 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9738 get_memory_rtx (arg2),
9739 expand_expr (len, NULL_RTX, VOIDmode, 0),
9740 GEN_INT (MIN (arg1_align, arg2_align))));
9742 /* Return the value in the proper mode for this function. */
9743 mode = TYPE_MODE (TREE_TYPE (exp));
9744 if (GET_MODE (result) == mode)
9746 else if (target != 0)
9748 convert_move (target, result, 0);
9752 return convert_to_mode (mode, result, 0);
9755 case BUILT_IN_STRCMP:
9756 case BUILT_IN_MEMCMP:
9760 case BUILT_IN_SETJMP:
9761 target = expand_builtin_setjmp (arglist, target);
9766 /* __builtin_longjmp is passed a pointer to an array of five words.
9767 It's similar to the C library longjmp function but works with
9768 __builtin_setjmp above. */
9769 case BUILT_IN_LONGJMP:
9770 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9771 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9775 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9777 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9778 NULL_RTX, VOIDmode, 0);
9780 if (value != const1_rtx)
9782 error ("__builtin_longjmp second argument must be 1");
9786 expand_builtin_longjmp (buf_addr, value);
9793 emit_insn (gen_trap ());
9796 error ("__builtin_trap not supported by this target");
9800 /* Various hooks for the DWARF 2 __throw routine. */
9801 case BUILT_IN_UNWIND_INIT:
9802 expand_builtin_unwind_init ();
9804 case BUILT_IN_DWARF_CFA:
9805 return virtual_cfa_rtx;
9806 #ifdef DWARF2_UNWIND_INFO
9807 case BUILT_IN_DWARF_FP_REGNUM:
9808 return expand_builtin_dwarf_fp_regnum ();
9809 case BUILT_IN_DWARF_REG_SIZE:
9810 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9812 case BUILT_IN_FROB_RETURN_ADDR:
9813 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9814 case BUILT_IN_EXTRACT_RETURN_ADDR:
9815 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9816 case BUILT_IN_EH_RETURN:
9817 expand_builtin_eh_return (TREE_VALUE (arglist),
9818 TREE_VALUE (TREE_CHAIN (arglist)),
9819 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9822 default: /* just do library call, if unknown builtin */
9823 error ("built-in function `%s' not currently supported",
9824 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9827 /* The switch statement above can drop through to cause the function
9828 to be called normally. */
9830 return expand_call (exp, target, ignore);
9833 /* Built-in functions to perform an untyped call and return. */
9835 /* For each register that may be used for calling a function, this
9836 gives a mode used to copy the register's value. VOIDmode indicates
9837 the register is not used for calling a function. If the machine
9838 has register windows, this gives only the outbound registers.
9839 INCOMING_REGNO gives the corresponding inbound register. */
9840 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9842 /* For each register that may be used for returning values, this gives
9843 a mode used to copy the register's value. VOIDmode indicates the
9844 register is not used for returning values. If the machine has
9845 register windows, this gives only the outbound registers.
9846 INCOMING_REGNO gives the corresponding inbound register. */
9847 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9849 /* For each register that may be used for calling a function, this
9850 gives the offset of that register into the block returned by
9851 __builtin_apply_args. 0 indicates that the register is not
9852 used for calling a function. */
9853 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9855 /* Return the offset of register REGNO into the block returned by
9856 __builtin_apply_args. This is not declared static, since it is
9857 needed in objc-act.c. */
9860 apply_args_register_offset (regno)
9865 /* Arguments are always put in outgoing registers (in the argument
9866 block) if such make sense. */
9867 #ifdef OUTGOING_REGNO
9868 regno = OUTGOING_REGNO(regno);
9870 return apply_args_reg_offset[regno];
9873 /* Return the size required for the block returned by __builtin_apply_args,
9874 and initialize apply_args_mode. */
9879 static int size = -1;
9881 enum machine_mode mode;
9883 /* The values computed by this function never change. */
9886 /* The first value is the incoming arg-pointer. */
9887 size = GET_MODE_SIZE (Pmode);
9889 /* The second value is the structure value address unless this is
9890 passed as an "invisible" first argument. */
9891 if (struct_value_rtx)
9892 size += GET_MODE_SIZE (Pmode);
9894 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9895 if (FUNCTION_ARG_REGNO_P (regno))
9897 /* Search for the proper mode for copying this register's
9898 value. I'm not sure this is right, but it works so far. */
9899 enum machine_mode best_mode = VOIDmode;
9901 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9903 mode = GET_MODE_WIDER_MODE (mode))
9904 if (HARD_REGNO_MODE_OK (regno, mode)
9905 && HARD_REGNO_NREGS (regno, mode) == 1)
9908 if (best_mode == VOIDmode)
9909 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9911 mode = GET_MODE_WIDER_MODE (mode))
9912 if (HARD_REGNO_MODE_OK (regno, mode)
9913 && (mov_optab->handlers[(int) mode].insn_code
9914 != CODE_FOR_nothing))
9918 if (mode == VOIDmode)
9921 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9922 if (size % align != 0)
9923 size = CEIL (size, align) * align;
9924 apply_args_reg_offset[regno] = size;
9925 size += GET_MODE_SIZE (mode);
9926 apply_args_mode[regno] = mode;
9930 apply_args_mode[regno] = VOIDmode;
9931 apply_args_reg_offset[regno] = 0;
9937 /* Return the size required for the block returned by __builtin_apply,
9938 and initialize apply_result_mode. */
9941 apply_result_size ()
9943 static int size = -1;
9945 enum machine_mode mode;
9947 /* The values computed by this function never change. */
9952 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9953 if (FUNCTION_VALUE_REGNO_P (regno))
9955 /* Search for the proper mode for copying this register's
9956 value. I'm not sure this is right, but it works so far. */
9957 enum machine_mode best_mode = VOIDmode;
9959 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9961 mode = GET_MODE_WIDER_MODE (mode))
9962 if (HARD_REGNO_MODE_OK (regno, mode))
9965 if (best_mode == VOIDmode)
9966 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9968 mode = GET_MODE_WIDER_MODE (mode))
9969 if (HARD_REGNO_MODE_OK (regno, mode)
9970 && (mov_optab->handlers[(int) mode].insn_code
9971 != CODE_FOR_nothing))
9975 if (mode == VOIDmode)
9978 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9979 if (size % align != 0)
9980 size = CEIL (size, align) * align;
9981 size += GET_MODE_SIZE (mode);
9982 apply_result_mode[regno] = mode;
9985 apply_result_mode[regno] = VOIDmode;
9987 /* Allow targets that use untyped_call and untyped_return to override
9988 the size so that machine-specific information can be stored here. */
9989 #ifdef APPLY_RESULT_SIZE
9990 size = APPLY_RESULT_SIZE;
9996 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9997 /* Create a vector describing the result block RESULT. If SAVEP is true,
9998 the result block is used to save the values; otherwise it is used to
9999 restore the values. */
10002 result_vector (savep, result)
10006 int regno, size, align, nelts;
10007 enum machine_mode mode;
10009 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
10012 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10013 if ((mode = apply_result_mode[regno]) != VOIDmode)
10015 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10016 if (size % align != 0)
10017 size = CEIL (size, align) * align;
10018 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
10019 mem = change_address (result, mode,
10020 plus_constant (XEXP (result, 0), size));
10021 savevec[nelts++] = (savep
10022 ? gen_rtx_SET (VOIDmode, mem, reg)
10023 : gen_rtx_SET (VOIDmode, reg, mem));
10024 size += GET_MODE_SIZE (mode);
10026 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
10028 #endif /* HAVE_untyped_call or HAVE_untyped_return */
10030 /* Save the state required to perform an untyped call with the same
10031 arguments as were passed to the current function. */
10034 expand_builtin_apply_args ()
10037 int size, align, regno;
10038 enum machine_mode mode;
10040 /* Create a block where the arg-pointer, structure value address,
10041 and argument registers can be saved. */
10042 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
10044 /* Walk past the arg-pointer and structure value address. */
10045 size = GET_MODE_SIZE (Pmode);
10046 if (struct_value_rtx)
10047 size += GET_MODE_SIZE (Pmode);
10049 /* Save each register used in calling a function to the block. */
10050 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10051 if ((mode = apply_args_mode[regno]) != VOIDmode)
10055 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10056 if (size % align != 0)
10057 size = CEIL (size, align) * align;
10059 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10062 /* For reg-stack.c's stack register household.
10063 Compare with a similar piece of code in function.c. */
10065 emit_insn (gen_rtx_USE (mode, tem));
10068 emit_move_insn (change_address (registers, mode,
10069 plus_constant (XEXP (registers, 0),
10072 size += GET_MODE_SIZE (mode);
10075 /* Save the arg pointer to the block. */
10076 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
10077 copy_to_reg (virtual_incoming_args_rtx));
10078 size = GET_MODE_SIZE (Pmode);
10080 /* Save the structure value address unless this is passed as an
10081 "invisible" first argument. */
10082 if (struct_value_incoming_rtx)
10084 emit_move_insn (change_address (registers, Pmode,
10085 plus_constant (XEXP (registers, 0),
10087 copy_to_reg (struct_value_incoming_rtx));
10088 size += GET_MODE_SIZE (Pmode);
10091 /* Return the address of the block. */
10092 return copy_addr_to_reg (XEXP (registers, 0));
10095 /* Perform an untyped call and save the state required to perform an
10096 untyped return of whatever value was returned by the given function. */
10099 expand_builtin_apply (function, arguments, argsize)
10100 rtx function, arguments, argsize;
10102 int size, align, regno;
10103 enum machine_mode mode;
10104 rtx incoming_args, result, reg, dest, call_insn;
10105 rtx old_stack_level = 0;
10106 rtx call_fusage = 0;
10108 /* Create a block where the return registers can be saved. */
10109 result = assign_stack_local (BLKmode, apply_result_size (), -1);
10111 /* ??? The argsize value should be adjusted here. */
10113 /* Fetch the arg pointer from the ARGUMENTS block. */
10114 incoming_args = gen_reg_rtx (Pmode);
10115 emit_move_insn (incoming_args,
10116 gen_rtx_MEM (Pmode, arguments));
10117 #ifndef STACK_GROWS_DOWNWARD
10118 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
10119 incoming_args, 0, OPTAB_LIB_WIDEN);
10122 /* Perform postincrements before actually calling the function. */
10125 /* Push a new argument block and copy the arguments. */
10126 do_pending_stack_adjust ();
10128 /* Save the stack with nonlocal if available */
10129 #ifdef HAVE_save_stack_nonlocal
10130 if (HAVE_save_stack_nonlocal)
10131 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
10134 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
10136 /* Push a block of memory onto the stack to store the memory arguments.
10137 Save the address in a register, and copy the memory arguments. ??? I
10138 haven't figured out how the calling convention macros effect this,
10139 but it's likely that the source and/or destination addresses in
10140 the block copy will need updating in machine specific ways. */
10141 dest = allocate_dynamic_stack_space (argsize, 0, 0);
10142 emit_block_move (gen_rtx_MEM (BLKmode, dest),
10143 gen_rtx_MEM (BLKmode, incoming_args),
10145 PARM_BOUNDARY / BITS_PER_UNIT);
10147 /* Refer to the argument block. */
10148 apply_args_size ();
10149 arguments = gen_rtx_MEM (BLKmode, arguments);
10151 /* Walk past the arg-pointer and structure value address. */
10152 size = GET_MODE_SIZE (Pmode);
10153 if (struct_value_rtx)
10154 size += GET_MODE_SIZE (Pmode);
10156 /* Restore each of the registers previously saved. Make USE insns
10157 for each of these registers for use in making the call. */
10158 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10159 if ((mode = apply_args_mode[regno]) != VOIDmode)
10161 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10162 if (size % align != 0)
10163 size = CEIL (size, align) * align;
10164 reg = gen_rtx_REG (mode, regno);
10165 emit_move_insn (reg,
10166 change_address (arguments, mode,
10167 plus_constant (XEXP (arguments, 0),
10170 use_reg (&call_fusage, reg);
10171 size += GET_MODE_SIZE (mode);
10174 /* Restore the structure value address unless this is passed as an
10175 "invisible" first argument. */
10176 size = GET_MODE_SIZE (Pmode);
10177 if (struct_value_rtx)
10179 rtx value = gen_reg_rtx (Pmode);
10180 emit_move_insn (value,
10181 change_address (arguments, Pmode,
10182 plus_constant (XEXP (arguments, 0),
10184 emit_move_insn (struct_value_rtx, value);
10185 if (GET_CODE (struct_value_rtx) == REG)
10186 use_reg (&call_fusage, struct_value_rtx);
10187 size += GET_MODE_SIZE (Pmode);
10190 /* All arguments and registers used for the call are set up by now! */
10191 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
10193 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
10194 and we don't want to load it into a register as an optimization,
10195 because prepare_call_address already did it if it should be done. */
10196 if (GET_CODE (function) != SYMBOL_REF)
10197 function = memory_address (FUNCTION_MODE, function);
10199 /* Generate the actual call instruction and save the return value. */
10200 #ifdef HAVE_untyped_call
10201 if (HAVE_untyped_call)
10202 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
10203 result, result_vector (1, result)));
10206 #ifdef HAVE_call_value
10207 if (HAVE_call_value)
10211 /* Locate the unique return register. It is not possible to
10212 express a call that sets more than one return register using
10213 call_value; use untyped_call for that. In fact, untyped_call
10214 only needs to save the return registers in the given block. */
10215 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10216 if ((mode = apply_result_mode[regno]) != VOIDmode)
10219 abort (); /* HAVE_untyped_call required. */
10220 valreg = gen_rtx_REG (mode, regno);
10223 emit_call_insn (gen_call_value (valreg,
10224 gen_rtx_MEM (FUNCTION_MODE, function),
10225 const0_rtx, NULL_RTX, const0_rtx));
10227 emit_move_insn (change_address (result, GET_MODE (valreg),
10235 /* Find the CALL insn we just emitted. */
10236 for (call_insn = get_last_insn ();
10237 call_insn && GET_CODE (call_insn) != CALL_INSN;
10238 call_insn = PREV_INSN (call_insn))
10244 /* Put the register usage information on the CALL. If there is already
10245 some usage information, put ours at the end. */
10246 if (CALL_INSN_FUNCTION_USAGE (call_insn))
10250 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10251 link = XEXP (link, 1))
10254 XEXP (link, 1) = call_fusage;
10257 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10259 /* Restore the stack. */
10260 #ifdef HAVE_save_stack_nonlocal
10261 if (HAVE_save_stack_nonlocal)
10262 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10265 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10267 /* Return the address of the result block. */
10268 return copy_addr_to_reg (XEXP (result, 0));
10271 /* Perform an untyped return. */
10274 expand_builtin_return (result)
10277 int size, align, regno;
10278 enum machine_mode mode;
10280 rtx call_fusage = 0;
10282 apply_result_size ();
10283 result = gen_rtx_MEM (BLKmode, result);
10285 #ifdef HAVE_untyped_return
10286 if (HAVE_untyped_return)
10288 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10294 /* Restore the return value and note that each value is used. */
10296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10297 if ((mode = apply_result_mode[regno]) != VOIDmode)
10299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10300 if (size % align != 0)
10301 size = CEIL (size, align) * align;
10302 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10303 emit_move_insn (reg,
10304 change_address (result, mode,
10305 plus_constant (XEXP (result, 0),
10308 push_to_sequence (call_fusage);
10309 emit_insn (gen_rtx_USE (VOIDmode, reg));
10310 call_fusage = get_insns ();
10312 size += GET_MODE_SIZE (mode);
10315 /* Put the USE insns before the return. */
10316 emit_insns (call_fusage);
10318 /* Return whatever values was restored by jumping directly to the end
10319 of the function. */
10320 expand_null_return ();
10323 /* Expand code for a post- or pre- increment or decrement
10324 and return the RTX for the result.
10325 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10328 expand_increment (exp, post, ignore)
10332 register rtx op0, op1;
10333 register rtx temp, value;
10334 register tree incremented = TREE_OPERAND (exp, 0);
10335 optab this_optab = add_optab;
10337 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10338 int op0_is_copy = 0;
10339 int single_insn = 0;
10340 /* 1 means we can't store into OP0 directly,
10341 because it is a subreg narrower than a word,
10342 and we don't dare clobber the rest of the word. */
10343 int bad_subreg = 0;
10345 /* Stabilize any component ref that might need to be
10346 evaluated more than once below. */
10348 || TREE_CODE (incremented) == BIT_FIELD_REF
10349 || (TREE_CODE (incremented) == COMPONENT_REF
10350 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10351 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10352 incremented = stabilize_reference (incremented);
10353 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10354 ones into save exprs so that they don't accidentally get evaluated
10355 more than once by the code below. */
10356 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10357 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10358 incremented = save_expr (incremented);
10360 /* Compute the operands as RTX.
10361 Note whether OP0 is the actual lvalue or a copy of it:
10362 I believe it is a copy iff it is a register or subreg
10363 and insns were generated in computing it. */
10365 temp = get_last_insn ();
10366 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10368 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10369 in place but instead must do sign- or zero-extension during assignment,
10370 so we copy it into a new register and let the code below use it as
10373 Note that we can safely modify this SUBREG since it is know not to be
10374 shared (it was made by the expand_expr call above). */
10376 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10379 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10383 else if (GET_CODE (op0) == SUBREG
10384 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10386 /* We cannot increment this SUBREG in place. If we are
10387 post-incrementing, get a copy of the old value. Otherwise,
10388 just mark that we cannot increment in place. */
10390 op0 = copy_to_reg (op0);
10395 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10396 && temp != get_last_insn ());
10397 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10398 EXPAND_MEMORY_USE_BAD);
10400 /* Decide whether incrementing or decrementing. */
10401 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10402 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10403 this_optab = sub_optab;
10405 /* Convert decrement by a constant into a negative increment. */
10406 if (this_optab == sub_optab
10407 && GET_CODE (op1) == CONST_INT)
10409 op1 = GEN_INT (- INTVAL (op1));
10410 this_optab = add_optab;
10413 /* For a preincrement, see if we can do this with a single instruction. */
10416 icode = (int) this_optab->handlers[(int) mode].insn_code;
10417 if (icode != (int) CODE_FOR_nothing
10418 /* Make sure that OP0 is valid for operands 0 and 1
10419 of the insn we want to queue. */
10420 && (*insn_operand_predicate[icode][0]) (op0, mode)
10421 && (*insn_operand_predicate[icode][1]) (op0, mode)
10422 && (*insn_operand_predicate[icode][2]) (op1, mode))
10426 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10427 then we cannot just increment OP0. We must therefore contrive to
10428 increment the original value. Then, for postincrement, we can return
10429 OP0 since it is a copy of the old value. For preincrement, expand here
10430 unless we can do it with a single insn.
10432 Likewise if storing directly into OP0 would clobber high bits
10433 we need to preserve (bad_subreg). */
10434 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10436 /* This is the easiest way to increment the value wherever it is.
10437 Problems with multiple evaluation of INCREMENTED are prevented
10438 because either (1) it is a component_ref or preincrement,
10439 in which case it was stabilized above, or (2) it is an array_ref
10440 with constant index in an array in a register, which is
10441 safe to reevaluate. */
10442 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10443 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10444 ? MINUS_EXPR : PLUS_EXPR),
10447 TREE_OPERAND (exp, 1));
10449 while (TREE_CODE (incremented) == NOP_EXPR
10450 || TREE_CODE (incremented) == CONVERT_EXPR)
10452 newexp = convert (TREE_TYPE (incremented), newexp);
10453 incremented = TREE_OPERAND (incremented, 0);
10456 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10457 return post ? op0 : temp;
10462 /* We have a true reference to the value in OP0.
10463 If there is an insn to add or subtract in this mode, queue it.
10464 Queueing the increment insn avoids the register shuffling
10465 that often results if we must increment now and first save
10466 the old value for subsequent use. */
10468 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10469 op0 = stabilize (op0);
10472 icode = (int) this_optab->handlers[(int) mode].insn_code;
10473 if (icode != (int) CODE_FOR_nothing
10474 /* Make sure that OP0 is valid for operands 0 and 1
10475 of the insn we want to queue. */
10476 && (*insn_operand_predicate[icode][0]) (op0, mode)
10477 && (*insn_operand_predicate[icode][1]) (op0, mode))
10479 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10480 op1 = force_reg (mode, op1);
10482 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10484 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10486 rtx addr = (general_operand (XEXP (op0, 0), mode)
10487 ? force_reg (Pmode, XEXP (op0, 0))
10488 : copy_to_reg (XEXP (op0, 0)));
10491 op0 = change_address (op0, VOIDmode, addr);
10492 temp = force_reg (GET_MODE (op0), op0);
10493 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10494 op1 = force_reg (mode, op1);
10496 /* The increment queue is LIFO, thus we have to `queue'
10497 the instructions in reverse order. */
10498 enqueue_insn (op0, gen_move_insn (op0, temp));
10499 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10504 /* Preincrement, or we can't increment with one simple insn. */
10506 /* Save a copy of the value before inc or dec, to return it later. */
10507 temp = value = copy_to_reg (op0);
10509 /* Arrange to return the incremented value. */
10510 /* Copy the rtx because expand_binop will protect from the queue,
10511 and the results of that would be invalid for us to return
10512 if our caller does emit_queue before using our result. */
10513 temp = copy_rtx (value = op0);
10515 /* Increment however we can. */
10516 op1 = expand_binop (mode, this_optab, value, op1,
10517 current_function_check_memory_usage ? NULL_RTX : op0,
10518 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10519 /* Make sure the value is stored into OP0. */
10521 emit_move_insn (op0, op1);
10526 /* Expand all function calls contained within EXP, innermost ones first.
10527 But don't look within expressions that have sequence points.
10528 For each CALL_EXPR, record the rtx for its value
10529 in the CALL_EXPR_RTL field. */
10532 preexpand_calls (exp)
10535 register int nops, i;
10536 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10538 if (! do_preexpand_calls)
10541 /* Only expressions and references can contain calls. */
10543 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10546 switch (TREE_CODE (exp))
10549 /* Do nothing if already expanded. */
10550 if (CALL_EXPR_RTL (exp) != 0
10551 /* Do nothing if the call returns a variable-sized object. */
10552 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10553 /* Do nothing to built-in functions. */
10554 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10555 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10557 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10560 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10563 case COMPOUND_EXPR:
10565 case TRUTH_ANDIF_EXPR:
10566 case TRUTH_ORIF_EXPR:
10567 /* If we find one of these, then we can be sure
10568 the adjust will be done for it (since it makes jumps).
10569 Do it now, so that if this is inside an argument
10570 of a function, we don't get the stack adjustment
10571 after some other args have already been pushed. */
10572 do_pending_stack_adjust ();
10577 case WITH_CLEANUP_EXPR:
10578 case CLEANUP_POINT_EXPR:
10579 case TRY_CATCH_EXPR:
10583 if (SAVE_EXPR_RTL (exp) != 0)
10590 nops = tree_code_length[(int) TREE_CODE (exp)];
10591 for (i = 0; i < nops; i++)
10592 if (TREE_OPERAND (exp, i) != 0)
10594 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10595 if (type == 'e' || type == '<' || type == '1' || type == '2'
10597 preexpand_calls (TREE_OPERAND (exp, i));
10601 /* At the start of a function, record that we have no previously-pushed
10602 arguments waiting to be popped. */
10605 init_pending_stack_adjust ()
10607 pending_stack_adjust = 0;
10610 /* When exiting from function, if safe, clear out any pending stack adjust
10611 so the adjustment won't get done.
10613 Note, if the current function calls alloca, then it must have a
10614 frame pointer regardless of the value of flag_omit_frame_pointer. */
10617 clear_pending_stack_adjust ()
10619 #ifdef EXIT_IGNORE_STACK
10621 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10622 && EXIT_IGNORE_STACK
10623 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10624 && ! flag_inline_functions)
10625 pending_stack_adjust = 0;
10629 /* Pop any previously-pushed arguments that have not been popped yet. */
10632 do_pending_stack_adjust ()
10634 if (inhibit_defer_pop == 0)
10636 if (pending_stack_adjust != 0)
10637 adjust_stack (GEN_INT (pending_stack_adjust));
10638 pending_stack_adjust = 0;
10642 /* Expand conditional expressions. */
10644 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10645 LABEL is an rtx of code CODE_LABEL, in this function and all the
10649 jumpifnot (exp, label)
10653 do_jump (exp, label, NULL_RTX);
10656 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10659 jumpif (exp, label)
10663 do_jump (exp, NULL_RTX, label);
10666 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10667 the result is zero, or IF_TRUE_LABEL if the result is one.
10668 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10669 meaning fall through in that case.
10671 do_jump always does any pending stack adjust except when it does not
10672 actually perform a jump. An example where there is no jump
10673 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10675 This function is responsible for optimizing cases such as
10676 &&, || and comparison operators in EXP. */
10679 do_jump (exp, if_false_label, if_true_label)
10681 rtx if_false_label, if_true_label;
10683 register enum tree_code code = TREE_CODE (exp);
10684 /* Some cases need to create a label to jump to
10685 in order to properly fall through.
10686 These cases set DROP_THROUGH_LABEL nonzero. */
10687 rtx drop_through_label = 0;
10689 rtx comparison = 0;
10692 enum machine_mode mode;
10694 #ifdef MAX_INTEGER_COMPUTATION_MODE
10695 check_max_integer_computation_mode (exp);
10706 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10712 /* This is not true with #pragma weak */
10714 /* The address of something can never be zero. */
10716 emit_jump (if_true_label);
10721 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10722 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10723 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10726 /* If we are narrowing the operand, we have to do the compare in the
10728 if ((TYPE_PRECISION (TREE_TYPE (exp))
10729 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10731 case NON_LVALUE_EXPR:
10732 case REFERENCE_EXPR:
10737 /* These cannot change zero->non-zero or vice versa. */
10738 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10742 /* This is never less insns than evaluating the PLUS_EXPR followed by
10743 a test and can be longer if the test is eliminated. */
10745 /* Reduce to minus. */
10746 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10747 TREE_OPERAND (exp, 0),
10748 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10749 TREE_OPERAND (exp, 1))));
10750 /* Process as MINUS. */
10754 /* Non-zero iff operands of minus differ. */
10755 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10756 TREE_OPERAND (exp, 0),
10757 TREE_OPERAND (exp, 1)),
10762 /* If we are AND'ing with a small constant, do this comparison in the
10763 smallest type that fits. If the machine doesn't have comparisons
10764 that small, it will be converted back to the wider comparison.
10765 This helps if we are testing the sign bit of a narrower object.
10766 combine can't do this for us because it can't know whether a
10767 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10769 if (! SLOW_BYTE_ACCESS
10770 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10771 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10772 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10773 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10774 && (type = type_for_mode (mode, 1)) != 0
10775 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10776 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10777 != CODE_FOR_nothing))
10779 do_jump (convert (type, exp), if_false_label, if_true_label);
10784 case TRUTH_NOT_EXPR:
10785 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10788 case TRUTH_ANDIF_EXPR:
10789 if (if_false_label == 0)
10790 if_false_label = drop_through_label = gen_label_rtx ();
10791 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10792 start_cleanup_deferral ();
10793 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10794 end_cleanup_deferral ();
10797 case TRUTH_ORIF_EXPR:
10798 if (if_true_label == 0)
10799 if_true_label = drop_through_label = gen_label_rtx ();
10800 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10801 start_cleanup_deferral ();
10802 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10803 end_cleanup_deferral ();
10806 case COMPOUND_EXPR:
10807 push_temp_slots ();
10808 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10809 preserve_temp_slots (NULL_RTX);
10810 free_temp_slots ();
10813 do_pending_stack_adjust ();
10814 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10817 case COMPONENT_REF:
10818 case BIT_FIELD_REF:
10821 int bitsize, bitpos, unsignedp;
10822 enum machine_mode mode;
10828 /* Get description of this reference. We don't actually care
10829 about the underlying object here. */
10830 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10831 &mode, &unsignedp, &volatilep,
10834 type = type_for_size (bitsize, unsignedp);
10835 if (! SLOW_BYTE_ACCESS
10836 && type != 0 && bitsize >= 0
10837 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10838 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10839 != CODE_FOR_nothing))
10841 do_jump (convert (type, exp), if_false_label, if_true_label);
10848 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10849 if (integer_onep (TREE_OPERAND (exp, 1))
10850 && integer_zerop (TREE_OPERAND (exp, 2)))
10851 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10853 else if (integer_zerop (TREE_OPERAND (exp, 1))
10854 && integer_onep (TREE_OPERAND (exp, 2)))
10855 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10859 register rtx label1 = gen_label_rtx ();
10860 drop_through_label = gen_label_rtx ();
10862 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10864 start_cleanup_deferral ();
10865 /* Now the THEN-expression. */
10866 do_jump (TREE_OPERAND (exp, 1),
10867 if_false_label ? if_false_label : drop_through_label,
10868 if_true_label ? if_true_label : drop_through_label);
10869 /* In case the do_jump just above never jumps. */
10870 do_pending_stack_adjust ();
10871 emit_label (label1);
10873 /* Now the ELSE-expression. */
10874 do_jump (TREE_OPERAND (exp, 2),
10875 if_false_label ? if_false_label : drop_through_label,
10876 if_true_label ? if_true_label : drop_through_label);
10877 end_cleanup_deferral ();
10883 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10885 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10886 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10888 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10889 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10892 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10893 fold (build (EQ_EXPR, TREE_TYPE (exp),
10894 fold (build1 (REALPART_EXPR,
10895 TREE_TYPE (inner_type),
10897 fold (build1 (REALPART_EXPR,
10898 TREE_TYPE (inner_type),
10900 fold (build (EQ_EXPR, TREE_TYPE (exp),
10901 fold (build1 (IMAGPART_EXPR,
10902 TREE_TYPE (inner_type),
10904 fold (build1 (IMAGPART_EXPR,
10905 TREE_TYPE (inner_type),
10907 if_false_label, if_true_label);
10910 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10911 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10913 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10914 && !can_compare_p (TYPE_MODE (inner_type)))
10915 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10917 comparison = compare (exp, EQ, EQ);
10923 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10925 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10926 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10928 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10929 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10932 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10933 fold (build (NE_EXPR, TREE_TYPE (exp),
10934 fold (build1 (REALPART_EXPR,
10935 TREE_TYPE (inner_type),
10937 fold (build1 (REALPART_EXPR,
10938 TREE_TYPE (inner_type),
10940 fold (build (NE_EXPR, TREE_TYPE (exp),
10941 fold (build1 (IMAGPART_EXPR,
10942 TREE_TYPE (inner_type),
10944 fold (build1 (IMAGPART_EXPR,
10945 TREE_TYPE (inner_type),
10947 if_false_label, if_true_label);
10950 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10951 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10953 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10954 && !can_compare_p (TYPE_MODE (inner_type)))
10955 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10957 comparison = compare (exp, NE, NE);
10962 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10964 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10965 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10967 comparison = compare (exp, LT, LTU);
10971 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10973 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10974 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10976 comparison = compare (exp, LE, LEU);
10980 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10982 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10983 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10985 comparison = compare (exp, GT, GTU);
10989 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10991 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10992 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10994 comparison = compare (exp, GE, GEU);
10999 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
11001 /* This is not needed any more and causes poor code since it causes
11002 comparisons and tests from non-SI objects to have different code
11004 /* Copy to register to avoid generating bad insns by cse
11005 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
11006 if (!cse_not_expected && GET_CODE (temp) == MEM)
11007 temp = copy_to_reg (temp);
11009 do_pending_stack_adjust ();
11010 if (GET_CODE (temp) == CONST_INT)
11011 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
11012 else if (GET_CODE (temp) == LABEL_REF)
11013 comparison = const_true_rtx;
11014 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
11015 && !can_compare_p (GET_MODE (temp)))
11016 /* Note swapping the labels gives us not-equal. */
11017 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
11018 else if (GET_MODE (temp) != VOIDmode)
11019 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
11020 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
11021 GET_MODE (temp), NULL_RTX, 0);
11026 /* Do any postincrements in the expression that was tested. */
11029 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
11030 straight into a conditional jump instruction as the jump condition.
11031 Otherwise, all the work has been done already. */
11033 if (comparison == const_true_rtx)
11036 emit_jump (if_true_label);
11038 else if (comparison == const0_rtx)
11040 if (if_false_label)
11041 emit_jump (if_false_label);
11043 else if (comparison)
11044 do_jump_for_compare (comparison, if_false_label, if_true_label);
11046 if (drop_through_label)
11048 /* If do_jump produces code that might be jumped around,
11049 do any stack adjusts from that code, before the place
11050 where control merges in. */
11051 do_pending_stack_adjust ();
11052 emit_label (drop_through_label);
11056 /* Given a comparison expression EXP for values too wide to be compared
11057 with one insn, test the comparison and jump to the appropriate label.
11058 The code of EXP is ignored; we always test GT if SWAP is 0,
11059 and LT if SWAP is 1. */
11062 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
11065 rtx if_false_label, if_true_label;
11067 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
11068 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
11069 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11070 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11071 rtx drop_through_label = 0;
11072 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
11075 if (! if_true_label || ! if_false_label)
11076 drop_through_label = gen_label_rtx ();
11077 if (! if_true_label)
11078 if_true_label = drop_through_label;
11079 if (! if_false_label)
11080 if_false_label = drop_through_label;
11082 /* Compare a word at a time, high order first. */
11083 for (i = 0; i < nwords; i++)
11086 rtx op0_word, op1_word;
11088 if (WORDS_BIG_ENDIAN)
11090 op0_word = operand_subword_force (op0, i, mode);
11091 op1_word = operand_subword_force (op1, i, mode);
11095 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11096 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11099 /* All but high-order word must be compared as unsigned. */
11100 comp = compare_from_rtx (op0_word, op1_word,
11101 (unsignedp || i > 0) ? GTU : GT,
11102 unsignedp, word_mode, NULL_RTX, 0);
11103 if (comp == const_true_rtx)
11104 emit_jump (if_true_label);
11105 else if (comp != const0_rtx)
11106 do_jump_for_compare (comp, NULL_RTX, if_true_label);
11108 /* Consider lower words only if these are equal. */
11109 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11111 if (comp == const_true_rtx)
11112 emit_jump (if_false_label);
11113 else if (comp != const0_rtx)
11114 do_jump_for_compare (comp, NULL_RTX, if_false_label);
11117 if (if_false_label)
11118 emit_jump (if_false_label);
11119 if (drop_through_label)
11120 emit_label (drop_through_label);
11123 /* Compare OP0 with OP1, word at a time, in mode MODE.
11124 UNSIGNEDP says to do unsigned comparison.
11125 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
11128 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
11129 enum machine_mode mode;
11132 rtx if_false_label, if_true_label;
11134 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11135 rtx drop_through_label = 0;
11138 if (! if_true_label || ! if_false_label)
11139 drop_through_label = gen_label_rtx ();
11140 if (! if_true_label)
11141 if_true_label = drop_through_label;
11142 if (! if_false_label)
11143 if_false_label = drop_through_label;
11145 /* Compare a word at a time, high order first. */
11146 for (i = 0; i < nwords; i++)
11149 rtx op0_word, op1_word;
11151 if (WORDS_BIG_ENDIAN)
11153 op0_word = operand_subword_force (op0, i, mode);
11154 op1_word = operand_subword_force (op1, i, mode);
11158 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11159 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11162 /* All but high-order word must be compared as unsigned. */
11163 comp = compare_from_rtx (op0_word, op1_word,
11164 (unsignedp || i > 0) ? GTU : GT,
11165 unsignedp, word_mode, NULL_RTX, 0);
11166 if (comp == const_true_rtx)
11167 emit_jump (if_true_label);
11168 else if (comp != const0_rtx)
11169 do_jump_for_compare (comp, NULL_RTX, if_true_label);
11171 /* Consider lower words only if these are equal. */
11172 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11174 if (comp == const_true_rtx)
11175 emit_jump (if_false_label);
11176 else if (comp != const0_rtx)
11177 do_jump_for_compare (comp, NULL_RTX, if_false_label);
11180 if (if_false_label)
11181 emit_jump (if_false_label);
11182 if (drop_through_label)
11183 emit_label (drop_through_label);
11186 /* Given an EQ_EXPR expression EXP for values too wide to be compared
11187 with one insn, test the comparison and jump to the appropriate label. */
11190 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
11192 rtx if_false_label, if_true_label;
11194 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11195 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11196 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11197 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11199 rtx drop_through_label = 0;
11201 if (! if_false_label)
11202 drop_through_label = if_false_label = gen_label_rtx ();
11204 for (i = 0; i < nwords; i++)
11206 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
11207 operand_subword_force (op1, i, mode),
11208 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
11209 word_mode, NULL_RTX, 0);
11210 if (comp == const_true_rtx)
11211 emit_jump (if_false_label);
11212 else if (comp != const0_rtx)
11213 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11217 emit_jump (if_true_label);
11218 if (drop_through_label)
11219 emit_label (drop_through_label);
11222 /* Jump according to whether OP0 is 0.
11223 We assume that OP0 has an integer mode that is too wide
11224 for the available compare insns. */
11227 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
11229 rtx if_false_label, if_true_label;
11231 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11234 rtx drop_through_label = 0;
11236 /* The fastest way of doing this comparison on almost any machine is to
11237 "or" all the words and compare the result. If all have to be loaded
11238 from memory and this is a very wide item, it's possible this may
11239 be slower, but that's highly unlikely. */
11241 part = gen_reg_rtx (word_mode);
11242 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11243 for (i = 1; i < nwords && part != 0; i++)
11244 part = expand_binop (word_mode, ior_optab, part,
11245 operand_subword_force (op0, i, GET_MODE (op0)),
11246 part, 1, OPTAB_WIDEN);
11250 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11253 if (comp == const_true_rtx)
11254 emit_jump (if_false_label);
11255 else if (comp == const0_rtx)
11256 emit_jump (if_true_label);
11258 do_jump_for_compare (comp, if_false_label, if_true_label);
11263 /* If we couldn't do the "or" simply, do this with a series of compares. */
11264 if (! if_false_label)
11265 drop_through_label = if_false_label = gen_label_rtx ();
11267 for (i = 0; i < nwords; i++)
11269 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11271 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11272 if (comp == const_true_rtx)
11273 emit_jump (if_false_label);
11274 else if (comp != const0_rtx)
11275 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11279 emit_jump (if_true_label);
11281 if (drop_through_label)
11282 emit_label (drop_through_label);
11285 /* Given a comparison expression in rtl form, output conditional branches to
11286 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11289 do_jump_for_compare (comparison, if_false_label, if_true_label)
11290 rtx comparison, if_false_label, if_true_label;
11294 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11295 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11300 if (if_false_label)
11301 emit_jump (if_false_label);
11303 else if (if_false_label)
11305 rtx first = get_last_insn (), insn, branch;
11308 /* Output the branch with the opposite condition. Then try to invert
11309 what is generated. If more than one insn is a branch, or if the
11310 branch is not the last insn written, abort. If we can't invert
11311 the branch, emit make a true label, redirect this jump to that,
11312 emit a jump to the false label and define the true label. */
11313 /* ??? Note that we wouldn't have to do any of this nonsense if
11314 we passed both labels into a combined compare-and-branch.
11315 Ah well, jump threading does a good job of repairing the damage. */
11317 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11318 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11323 /* Here we get the first insn that was just emitted. It used to be the
11324 case that, on some machines, emitting the branch would discard
11325 the previous compare insn and emit a replacement. This isn't
11326 done anymore, but abort if we see that FIRST is deleted. */
11329 first = get_insns ();
11330 else if (INSN_DELETED_P (first))
11333 first = NEXT_INSN (first);
11335 /* Look for multiple branches in this sequence, as might be generated
11336 for a multi-word integer comparison. */
11340 for (insn = first; insn ; insn = NEXT_INSN (insn))
11341 if (GET_CODE (insn) == JUMP_INSN)
11347 /* If we've got one branch at the end of the sequence,
11348 we can try to reverse it. */
11350 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11353 insn_label = XEXP (condjump_label (branch), 0);
11354 JUMP_LABEL (branch) = insn_label;
11356 if (insn_label != if_false_label)
11359 if (invert_jump (branch, if_false_label))
11363 /* Multiple branches, or reversion failed. Convert to branches
11364 around an unconditional jump. */
11366 if_true_label = gen_label_rtx ();
11367 for (insn = first; insn; insn = NEXT_INSN (insn))
11368 if (GET_CODE (insn) == JUMP_INSN)
11371 insn_label = XEXP (condjump_label (insn), 0);
11372 JUMP_LABEL (insn) = insn_label;
11374 if (insn_label == if_false_label)
11375 redirect_jump (insn, if_true_label);
11377 emit_jump (if_false_label);
11378 emit_label (if_true_label);
11382 /* Generate code for a comparison expression EXP
11383 (including code to compute the values to be compared)
11384 and set (CC0) according to the result.
11385 SIGNED_CODE should be the rtx operation for this comparison for
11386 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11388 We force a stack adjustment unless there are currently
11389 things pushed on the stack that aren't yet used. */
11392 compare (exp, signed_code, unsigned_code)
11394 enum rtx_code signed_code, unsigned_code;
11396 register rtx op0, op1;
11397 register tree type;
11398 register enum machine_mode mode;
11400 enum rtx_code code;
11402 /* Don't crash if the comparison was erroneous. */
11403 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11404 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
11407 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11408 type = TREE_TYPE (TREE_OPERAND (exp, 0));
11409 mode = TYPE_MODE (type);
11410 unsignedp = TREE_UNSIGNED (type);
11411 code = unsignedp ? unsigned_code : signed_code;
11413 #ifdef HAVE_canonicalize_funcptr_for_compare
11414 /* If function pointers need to be "canonicalized" before they can
11415 be reliably compared, then canonicalize them. */
11416 if (HAVE_canonicalize_funcptr_for_compare
11417 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11418 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11421 rtx new_op0 = gen_reg_rtx (mode);
11423 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11427 if (HAVE_canonicalize_funcptr_for_compare
11428 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11429 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11432 rtx new_op1 = gen_reg_rtx (mode);
11434 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11439 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11441 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11442 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11445 /* Like compare but expects the values to compare as two rtx's.
11446 The decision as to signed or unsigned comparison must be made by the caller.
11448 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11451 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11452 size of MODE should be used. */
11455 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11456 register rtx op0, op1;
11457 enum rtx_code code;
11459 enum machine_mode mode;
11465 /* If one operand is constant, make it the second one. Only do this
11466 if the other operand is not constant as well. */
11468 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11469 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11474 code = swap_condition (code);
11477 if (flag_force_mem)
11479 op0 = force_not_mem (op0);
11480 op1 = force_not_mem (op1);
11483 do_pending_stack_adjust ();
11485 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11486 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11490 /* There's no need to do this now that combine.c can eliminate lots of
11491 sign extensions. This can be less efficient in certain cases on other
11494 /* If this is a signed equality comparison, we can do it as an
11495 unsigned comparison since zero-extension is cheaper than sign
11496 extension and comparisons with zero are done as unsigned. This is
11497 the case even on machines that can do fast sign extension, since
11498 zero-extension is easier to combine with other operations than
11499 sign-extension is. If we are comparing against a constant, we must
11500 convert it to what it would look like unsigned. */
11501 if ((code == EQ || code == NE) && ! unsignedp
11502 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11504 if (GET_CODE (op1) == CONST_INT
11505 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11506 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11511 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11513 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11516 /* Generate code to calculate EXP using a store-flag instruction
11517 and return an rtx for the result. EXP is either a comparison
11518 or a TRUTH_NOT_EXPR whose operand is a comparison.
11520 If TARGET is nonzero, store the result there if convenient.
11522 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11525 Return zero if there is no suitable set-flag instruction
11526 available on this machine.
11528 Once expand_expr has been called on the arguments of the comparison,
11529 we are committed to doing the store flag, since it is not safe to
11530 re-evaluate the expression. We emit the store-flag insn by calling
11531 emit_store_flag, but only expand the arguments if we have a reason
11532 to believe that emit_store_flag will be successful. If we think that
11533 it will, but it isn't, we have to simulate the store-flag with a
11534 set/jump/set sequence. */
11537 do_store_flag (exp, target, mode, only_cheap)
11540 enum machine_mode mode;
11543 enum rtx_code code;
11544 tree arg0, arg1, type;
11546 enum machine_mode operand_mode;
11550 enum insn_code icode;
11551 rtx subtarget = target;
11554 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11555 result at the end. We can't simply invert the test since it would
11556 have already been inverted if it were valid. This case occurs for
11557 some floating-point comparisons. */
11559 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11560 invert = 1, exp = TREE_OPERAND (exp, 0);
11562 arg0 = TREE_OPERAND (exp, 0);
11563 arg1 = TREE_OPERAND (exp, 1);
11564 type = TREE_TYPE (arg0);
11565 operand_mode = TYPE_MODE (type);
11566 unsignedp = TREE_UNSIGNED (type);
11568 /* We won't bother with BLKmode store-flag operations because it would mean
11569 passing a lot of information to emit_store_flag. */
11570 if (operand_mode == BLKmode)
11573 /* We won't bother with store-flag operations involving function pointers
11574 when function pointers must be canonicalized before comparisons. */
11575 #ifdef HAVE_canonicalize_funcptr_for_compare
11576 if (HAVE_canonicalize_funcptr_for_compare
11577 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11578 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11580 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11581 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11582 == FUNCTION_TYPE))))
11589 /* Get the rtx comparison code to use. We know that EXP is a comparison
11590 operation of some type. Some comparisons against 1 and -1 can be
11591 converted to comparisons with zero. Do so here so that the tests
11592 below will be aware that we have a comparison with zero. These
11593 tests will not catch constants in the first operand, but constants
11594 are rarely passed as the first operand. */
11596 switch (TREE_CODE (exp))
11605 if (integer_onep (arg1))
11606 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11608 code = unsignedp ? LTU : LT;
11611 if (! unsignedp && integer_all_onesp (arg1))
11612 arg1 = integer_zero_node, code = LT;
11614 code = unsignedp ? LEU : LE;
11617 if (! unsignedp && integer_all_onesp (arg1))
11618 arg1 = integer_zero_node, code = GE;
11620 code = unsignedp ? GTU : GT;
11623 if (integer_onep (arg1))
11624 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11626 code = unsignedp ? GEU : GE;
11632 /* Put a constant second. */
11633 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11635 tem = arg0; arg0 = arg1; arg1 = tem;
11636 code = swap_condition (code);
11639 /* If this is an equality or inequality test of a single bit, we can
11640 do this by shifting the bit being tested to the low-order bit and
11641 masking the result with the constant 1. If the condition was EQ,
11642 we xor it with 1. This does not require an scc insn and is faster
11643 than an scc insn even if we have it. */
11645 if ((code == NE || code == EQ)
11646 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11647 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11649 tree inner = TREE_OPERAND (arg0, 0);
11650 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11653 /* If INNER is a right shift of a constant and it plus BITNUM does
11654 not overflow, adjust BITNUM and INNER. */
11656 if (TREE_CODE (inner) == RSHIFT_EXPR
11657 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11658 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11659 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11660 < TYPE_PRECISION (type)))
11662 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11663 inner = TREE_OPERAND (inner, 0);
11666 /* If we are going to be able to omit the AND below, we must do our
11667 operations as unsigned. If we must use the AND, we have a choice.
11668 Normally unsigned is faster, but for some machines signed is. */
11669 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11670 #ifdef LOAD_EXTEND_OP
11671 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11677 if (subtarget == 0 || GET_CODE (subtarget) != REG
11678 || GET_MODE (subtarget) != operand_mode
11679 || ! safe_from_p (subtarget, inner, 1))
11682 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11685 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11686 size_int (bitnum), subtarget, ops_unsignedp);
11688 if (GET_MODE (op0) != mode)
11689 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11691 if ((code == EQ && ! invert) || (code == NE && invert))
11692 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11693 ops_unsignedp, OPTAB_LIB_WIDEN);
11695 /* Put the AND last so it can combine with more things. */
11696 if (bitnum != TYPE_PRECISION (type) - 1)
11697 op0 = expand_and (op0, const1_rtx, subtarget);
11702 /* Now see if we are likely to be able to do this. Return if not. */
11703 if (! can_compare_p (operand_mode))
11705 icode = setcc_gen_code[(int) code];
11706 if (icode == CODE_FOR_nothing
11707 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11709 /* We can only do this if it is one of the special cases that
11710 can be handled without an scc insn. */
11711 if ((code == LT && integer_zerop (arg1))
11712 || (! only_cheap && code == GE && integer_zerop (arg1)))
11714 else if (BRANCH_COST >= 0
11715 && ! only_cheap && (code == NE || code == EQ)
11716 && TREE_CODE (type) != REAL_TYPE
11717 && ((abs_optab->handlers[(int) operand_mode].insn_code
11718 != CODE_FOR_nothing)
11719 || (ffs_optab->handlers[(int) operand_mode].insn_code
11720 != CODE_FOR_nothing)))
11726 preexpand_calls (exp);
11727 if (subtarget == 0 || GET_CODE (subtarget) != REG
11728 || GET_MODE (subtarget) != operand_mode
11729 || ! safe_from_p (subtarget, arg1, 1))
11732 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11733 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11736 target = gen_reg_rtx (mode);
11738 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11739 because, if the emit_store_flag does anything it will succeed and
11740 OP0 and OP1 will not be used subsequently. */
11742 result = emit_store_flag (target, code,
11743 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11744 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11745 operand_mode, unsignedp, 1);
11750 result = expand_binop (mode, xor_optab, result, const1_rtx,
11751 result, 0, OPTAB_LIB_WIDEN);
11755 /* If this failed, we have to do this with set/compare/jump/set code. */
11756 if (GET_CODE (target) != REG
11757 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11758 target = gen_reg_rtx (GET_MODE (target));
11760 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11761 result = compare_from_rtx (op0, op1, code, unsignedp,
11762 operand_mode, NULL_RTX, 0);
11763 if (GET_CODE (result) == CONST_INT)
11764 return (((result == const0_rtx && ! invert)
11765 || (result != const0_rtx && invert))
11766 ? const0_rtx : const1_rtx);
11768 label = gen_label_rtx ();
11769 if (bcc_gen_fctn[(int) code] == 0)
11772 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11773 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11774 emit_label (label);
11779 /* Generate a tablejump instruction (used for switch statements). */
11781 #ifdef HAVE_tablejump
11783 /* INDEX is the value being switched on, with the lowest value
11784 in the table already subtracted.
11785 MODE is its expected mode (needed if INDEX is constant).
11786 RANGE is the length of the jump table.
11787 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11789 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11790 index value is out of range. */
11793 do_tablejump (index, mode, range, table_label, default_label)
11794 rtx index, range, table_label, default_label;
11795 enum machine_mode mode;
11797 register rtx temp, vector;
11799 /* Do an unsigned comparison (in the proper mode) between the index
11800 expression and the value which represents the length of the range.
11801 Since we just finished subtracting the lower bound of the range
11802 from the index expression, this comparison allows us to simultaneously
11803 check that the original index expression value is both greater than
11804 or equal to the minimum value of the range and less than or equal to
11805 the maximum value of the range. */
11807 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11810 /* If index is in range, it must fit in Pmode.
11811 Convert to Pmode so we can index with it. */
11813 index = convert_to_mode (Pmode, index, 1);
11815 /* Don't let a MEM slip thru, because then INDEX that comes
11816 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11817 and break_out_memory_refs will go to work on it and mess it up. */
11818 #ifdef PIC_CASE_VECTOR_ADDRESS
11819 if (flag_pic && GET_CODE (index) != REG)
11820 index = copy_to_mode_reg (Pmode, index);
11823 /* If flag_force_addr were to affect this address
11824 it could interfere with the tricky assumptions made
11825 about addresses that contain label-refs,
11826 which may be valid only very near the tablejump itself. */
11827 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11828 GET_MODE_SIZE, because this indicates how large insns are. The other
11829 uses should all be Pmode, because they are addresses. This code
11830 could fail if addresses and insns are not the same size. */
11831 index = gen_rtx_PLUS (Pmode,
11832 gen_rtx_MULT (Pmode, index,
11833 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11834 gen_rtx_LABEL_REF (Pmode, table_label));
11835 #ifdef PIC_CASE_VECTOR_ADDRESS
11837 index = PIC_CASE_VECTOR_ADDRESS (index);
11840 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11841 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11842 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11843 RTX_UNCHANGING_P (vector) = 1;
11844 convert_move (temp, vector, 0);
11846 emit_jump_insn (gen_tablejump (temp, table_label));
11848 /* If we are generating PIC code or if the table is PC-relative, the
11849 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11850 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11854 #endif /* HAVE_tablejump */