1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
31 #include "hard-reg-set.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
45 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
91 /* Under some ABIs, it is the caller's responsibility to pop arguments
92 pushed for function calls. A naive implementation would simply pop
93 the arguments immediately after each call. However, if several
94 function calls are made in a row, it is typically cheaper to pop
95 all the arguments after all of the calls are complete since a
96 single pop instruction can be used. Therefore, GCC attempts to
97 defer popping the arguments until absolutely necessary. (For
98 example, at the end of a conditional, the arguments must be popped,
99 since code outside the conditional won't know whether or not the
100 arguments need to be popped.)
102 When INHIBIT_DEFER_POP is non-zero, however, the compiler does not
103 attempt to defer pops. Instead, the stack is popped immediately
104 after each call. Rather then setting this variable directly, use
105 NO_DEFER_POP and OK_DEFER_POP. */
106 int inhibit_defer_pop;
108 /* Nonzero means __builtin_saveregs has already been done in this function.
109 The value is the pseudoreg containing the value __builtin_saveregs
111 static rtx saveregs_value;
113 /* Similarly for __builtin_apply_args. */
114 static rtx apply_args_value;
116 /* Don't check memory usage, since code is being emitted to check a memory
117 usage. Used when current_function_check_memory_usage is true, to avoid
118 infinite recursion. */
119 static int in_check_memory_usage;
121 /* Postincrements that still need to be expanded. */
122 static rtx pending_chain;
124 /* This structure is used by move_by_pieces to describe the move to
126 struct move_by_pieces
136 int explicit_inc_from;
143 /* This structure is used by clear_by_pieces to describe the clear to
146 struct clear_by_pieces
158 extern struct obstack permanent_obstack;
159 extern rtx arg_pointer_save_area;
161 static rtx get_push_address PROTO ((int));
163 static rtx enqueue_insn PROTO((rtx, rtx));
164 static void init_queue PROTO((void));
165 static int move_by_pieces_ninsns PROTO((unsigned int, int));
166 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
167 struct move_by_pieces *));
168 static void clear_by_pieces PROTO((rtx, int, int));
169 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
170 struct clear_by_pieces *));
171 static int is_zeros_p PROTO((tree));
172 static int mostly_zeros_p PROTO((tree));
173 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
175 static void store_constructor PROTO((tree, rtx, int));
176 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
177 enum machine_mode, int, int,
179 static enum memory_use_mode
180 get_memory_usage_from_modifier PROTO((enum expand_modifier));
181 static tree save_noncopied_parts PROTO((tree, tree));
182 static tree init_noncopied_parts PROTO((tree, tree));
183 static int safe_from_p PROTO((rtx, tree, int));
184 static int fixed_type_p PROTO((tree));
185 static rtx var_rtx PROTO((tree));
186 static int get_pointer_alignment PROTO((tree, unsigned));
187 static tree string_constant PROTO((tree, tree *));
188 static tree c_strlen PROTO((tree));
189 static rtx get_memory_rtx PROTO((tree));
190 static rtx expand_builtin PROTO((tree, rtx, rtx,
191 enum machine_mode, int));
192 static int apply_args_size PROTO((void));
193 static int apply_result_size PROTO((void));
194 static rtx result_vector PROTO((int, rtx));
195 static rtx expand_builtin_setjmp PROTO((tree, rtx));
196 static rtx expand_builtin_apply_args PROTO((void));
197 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
198 static void expand_builtin_return PROTO((rtx));
199 static rtx expand_increment PROTO((tree, int, int));
200 static void preexpand_calls PROTO((tree));
201 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
202 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
203 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
204 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
205 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
207 /* Record for each mode whether we can move a register directly to or
208 from an object of that mode in memory. If we can't, we won't try
209 to use that mode directly when accessing a field of that mode. */
211 static char direct_load[NUM_MACHINE_MODES];
212 static char direct_store[NUM_MACHINE_MODES];
214 /* If a memory-to-memory move would take MOVE_RATIO or more simple
215 move-instruction sequences, we will do a movstr or libcall instead. */
218 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
221 /* If we are optimizing for space (-Os), cut down the default move ratio */
222 #define MOVE_RATIO (optimize_size ? 3 : 15)
226 /* This macro is used to determine whether move_by_pieces should be called
227 to perform a structure copy. */
228 #ifndef MOVE_BY_PIECES_P
229 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
230 (SIZE, ALIGN) < MOVE_RATIO)
233 /* This array records the insn_code of insns to perform block moves. */
234 enum insn_code movstr_optab[NUM_MACHINE_MODES];
236 /* This array records the insn_code of insns to perform block clears. */
237 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
239 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
241 #ifndef SLOW_UNALIGNED_ACCESS
242 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
245 /* Register mappings for target machines without register windows. */
246 #ifndef INCOMING_REGNO
247 #define INCOMING_REGNO(OUT) (OUT)
249 #ifndef OUTGOING_REGNO
250 #define OUTGOING_REGNO(IN) (IN)
253 /* This is run once per compilation to set up which modes can be used
254 directly in memory and to initialize the block move optab. */
260 enum machine_mode mode;
267 /* Since we are on the permanent obstack, we must be sure we save this
268 spot AFTER we call start_sequence, since it will reuse the rtl it
270 free_point = (char *) oballoc (0);
272 /* Try indexing by frame ptr and try by stack ptr.
273 It is known that on the Convex the stack ptr isn't a valid index.
274 With luck, one or the other is valid on any machine. */
275 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
276 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
278 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
279 pat = PATTERN (insn);
281 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
282 mode = (enum machine_mode) ((int) mode + 1))
287 direct_load[(int) mode] = direct_store[(int) mode] = 0;
288 PUT_MODE (mem, mode);
289 PUT_MODE (mem1, mode);
291 /* See if there is some register that can be used in this mode and
292 directly loaded or stored from memory. */
294 if (mode != VOIDmode && mode != BLKmode)
295 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
296 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
299 if (! HARD_REGNO_MODE_OK (regno, mode))
302 reg = gen_rtx_REG (mode, regno);
305 SET_DEST (pat) = reg;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_load[(int) mode] = 1;
309 SET_SRC (pat) = mem1;
310 SET_DEST (pat) = reg;
311 if (recog (pat, insn, &num_clobbers) >= 0)
312 direct_load[(int) mode] = 1;
315 SET_DEST (pat) = mem;
316 if (recog (pat, insn, &num_clobbers) >= 0)
317 direct_store[(int) mode] = 1;
320 SET_DEST (pat) = mem1;
321 if (recog (pat, insn, &num_clobbers) >= 0)
322 direct_store[(int) mode] = 1;
330 /* This is run at the start of compiling a function. */
337 pending_stack_adjust = 0;
338 inhibit_defer_pop = 0;
340 apply_args_value = 0;
344 /* Save all variables describing the current status into the structure *P.
345 This is used before starting a nested function. */
351 p->pending_chain = pending_chain;
352 p->pending_stack_adjust = pending_stack_adjust;
353 p->inhibit_defer_pop = inhibit_defer_pop;
354 p->saveregs_value = saveregs_value;
355 p->apply_args_value = apply_args_value;
356 p->forced_labels = forced_labels;
358 pending_chain = NULL_RTX;
359 pending_stack_adjust = 0;
360 inhibit_defer_pop = 0;
362 apply_args_value = 0;
366 /* Restore all variables describing the current status from the structure *P.
367 This is used after a nested function. */
370 restore_expr_status (p)
373 pending_chain = p->pending_chain;
374 pending_stack_adjust = p->pending_stack_adjust;
375 inhibit_defer_pop = p->inhibit_defer_pop;
376 saveregs_value = p->saveregs_value;
377 apply_args_value = p->apply_args_value;
378 forced_labels = p->forced_labels;
381 /* Manage the queue of increment instructions to be output
382 for POSTINCREMENT_EXPR expressions, etc. */
384 /* Queue up to increment (or change) VAR later. BODY says how:
385 BODY should be the same thing you would pass to emit_insn
386 to increment right away. It will go to emit_insn later on.
388 The value is a QUEUED expression to be used in place of VAR
389 where you want to guarantee the pre-incrementation value of VAR. */
392 enqueue_insn (var, body)
395 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
396 var, NULL_RTX, NULL_RTX, body,
398 return pending_chain;
401 /* Use protect_from_queue to convert a QUEUED expression
402 into something that you can put immediately into an instruction.
403 If the queued incrementation has not happened yet,
404 protect_from_queue returns the variable itself.
405 If the incrementation has happened, protect_from_queue returns a temp
406 that contains a copy of the old value of the variable.
408 Any time an rtx which might possibly be a QUEUED is to be put
409 into an instruction, it must be passed through protect_from_queue first.
410 QUEUED expressions are not meaningful in instructions.
412 Do not pass a value through protect_from_queue and then hold
413 on to it for a while before putting it in an instruction!
414 If the queue is flushed in between, incorrect code will result. */
417 protect_from_queue (x, modify)
421 register RTX_CODE code = GET_CODE (x);
423 #if 0 /* A QUEUED can hang around after the queue is forced out. */
424 /* Shortcut for most common case. */
425 if (pending_chain == 0)
431 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
432 use of autoincrement. Make a copy of the contents of the memory
433 location rather than a copy of the address, but not if the value is
434 of mode BLKmode. Don't modify X in place since it might be
436 if (code == MEM && GET_MODE (x) != BLKmode
437 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
439 register rtx y = XEXP (x, 0);
440 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
442 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
443 MEM_COPY_ATTRIBUTES (new, x);
444 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
448 register rtx temp = gen_reg_rtx (GET_MODE (new));
449 emit_insn_before (gen_move_insn (temp, new),
453 /* Copy the address into a pseudo, so that the returned value
454 remains correct across calls to emit_queue. */
455 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
458 /* Otherwise, recursively protect the subexpressions of all
459 the kinds of rtx's that can contain a QUEUED. */
462 rtx tem = protect_from_queue (XEXP (x, 0), 0);
463 if (tem != XEXP (x, 0))
469 else if (code == PLUS || code == MULT)
471 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
472 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
473 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
482 /* If the increment has not happened, use the variable itself. Copy it
483 into a new pseudo so that the value remains correct across calls to
485 if (QUEUED_INSN (x) == 0)
486 return copy_to_reg (QUEUED_VAR (x));
487 /* If the increment has happened and a pre-increment copy exists,
489 if (QUEUED_COPY (x) != 0)
490 return QUEUED_COPY (x);
491 /* The increment has happened but we haven't set up a pre-increment copy.
492 Set one up now, and use it. */
493 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
494 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
496 return QUEUED_COPY (x);
499 /* Return nonzero if X contains a QUEUED expression:
500 if it contains anything that will be altered by a queued increment.
501 We handle only combinations of MEM, PLUS, MINUS and MULT operators
502 since memory addresses generally contain only those. */
508 register enum rtx_code code = GET_CODE (x);
514 return queued_subexp_p (XEXP (x, 0));
518 return (queued_subexp_p (XEXP (x, 0))
519 || queued_subexp_p (XEXP (x, 1)));
525 /* Perform all the pending incrementations. */
531 while ((p = pending_chain))
533 rtx body = QUEUED_BODY (p);
535 if (GET_CODE (body) == SEQUENCE)
537 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
538 emit_insn (QUEUED_BODY (p));
541 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
542 pending_chain = QUEUED_NEXT (p);
553 /* Copy data from FROM to TO, where the machine modes are not the same.
554 Both modes may be integer, or both may be floating.
555 UNSIGNEDP should be nonzero if FROM is an unsigned type.
556 This causes zero-extension instead of sign-extension. */
559 convert_move (to, from, unsignedp)
560 register rtx to, from;
563 enum machine_mode to_mode = GET_MODE (to);
564 enum machine_mode from_mode = GET_MODE (from);
565 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
566 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
570 /* rtx code for making an equivalent value. */
571 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
573 to = protect_from_queue (to, 1);
574 from = protect_from_queue (from, 0);
576 if (to_real != from_real)
579 /* If FROM is a SUBREG that indicates that we have already done at least
580 the required extension, strip it. We don't handle such SUBREGs as
583 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
584 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
585 >= GET_MODE_SIZE (to_mode))
586 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
587 from = gen_lowpart (to_mode, from), from_mode = to_mode;
589 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
592 if (to_mode == from_mode
593 || (from_mode == VOIDmode && CONSTANT_P (from)))
595 emit_move_insn (to, from);
603 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
605 /* Try converting directly if the insn is supported. */
606 if ((code = can_extend_p (to_mode, from_mode, 0))
609 emit_unop_insn (code, to, from, UNKNOWN);
614 #ifdef HAVE_trunchfqf2
615 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
617 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
621 #ifdef HAVE_trunctqfqf2
622 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
624 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
628 #ifdef HAVE_truncsfqf2
629 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
631 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
635 #ifdef HAVE_truncdfqf2
636 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
638 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
642 #ifdef HAVE_truncxfqf2
643 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
645 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
649 #ifdef HAVE_trunctfqf2
650 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
652 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
657 #ifdef HAVE_trunctqfhf2
658 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
660 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
664 #ifdef HAVE_truncsfhf2
665 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
667 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
671 #ifdef HAVE_truncdfhf2
672 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
674 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
678 #ifdef HAVE_truncxfhf2
679 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
681 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
685 #ifdef HAVE_trunctfhf2
686 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
688 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
693 #ifdef HAVE_truncsftqf2
694 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
696 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
700 #ifdef HAVE_truncdftqf2
701 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
703 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
707 #ifdef HAVE_truncxftqf2
708 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
710 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
714 #ifdef HAVE_trunctftqf2
715 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
717 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
722 #ifdef HAVE_truncdfsf2
723 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
725 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
729 #ifdef HAVE_truncxfsf2
730 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
732 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
736 #ifdef HAVE_trunctfsf2
737 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
739 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
743 #ifdef HAVE_truncxfdf2
744 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
746 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
750 #ifdef HAVE_trunctfdf2
751 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
753 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
765 libcall = extendsfdf2_libfunc;
769 libcall = extendsfxf2_libfunc;
773 libcall = extendsftf2_libfunc;
785 libcall = truncdfsf2_libfunc;
789 libcall = extenddfxf2_libfunc;
793 libcall = extenddftf2_libfunc;
805 libcall = truncxfsf2_libfunc;
809 libcall = truncxfdf2_libfunc;
821 libcall = trunctfsf2_libfunc;
825 libcall = trunctfdf2_libfunc;
837 if (libcall == (rtx) 0)
838 /* This conversion is not implemented yet. */
841 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
843 emit_move_insn (to, value);
847 /* Now both modes are integers. */
849 /* Handle expanding beyond a word. */
850 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
851 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
858 enum machine_mode lowpart_mode;
859 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
861 /* Try converting directly if the insn is supported. */
862 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
865 /* If FROM is a SUBREG, put it into a register. Do this
866 so that we always generate the same set of insns for
867 better cse'ing; if an intermediate assignment occurred,
868 we won't be doing the operation directly on the SUBREG. */
869 if (optimize > 0 && GET_CODE (from) == SUBREG)
870 from = force_reg (from_mode, from);
871 emit_unop_insn (code, to, from, equiv_code);
874 /* Next, try converting via full word. */
875 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
876 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
877 != CODE_FOR_nothing))
879 if (GET_CODE (to) == REG)
880 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
881 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
882 emit_unop_insn (code, to,
883 gen_lowpart (word_mode, to), equiv_code);
887 /* No special multiword conversion insn; do it by hand. */
890 /* Since we will turn this into a no conflict block, we must ensure
891 that the source does not overlap the target. */
893 if (reg_overlap_mentioned_p (to, from))
894 from = force_reg (from_mode, from);
896 /* Get a copy of FROM widened to a word, if necessary. */
897 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
898 lowpart_mode = word_mode;
900 lowpart_mode = from_mode;
902 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
904 lowpart = gen_lowpart (lowpart_mode, to);
905 emit_move_insn (lowpart, lowfrom);
907 /* Compute the value to put in each remaining word. */
909 fill_value = const0_rtx;
914 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
915 && STORE_FLAG_VALUE == -1)
917 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
919 fill_value = gen_reg_rtx (word_mode);
920 emit_insn (gen_slt (fill_value));
926 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
927 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
929 fill_value = convert_to_mode (word_mode, fill_value, 1);
933 /* Fill the remaining words. */
934 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
936 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
937 rtx subword = operand_subword (to, index, 1, to_mode);
942 if (fill_value != subword)
943 emit_move_insn (subword, fill_value);
946 insns = get_insns ();
949 emit_no_conflict_block (insns, to, from, NULL_RTX,
950 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
954 /* Truncating multi-word to a word or less. */
955 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
956 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
958 if (!((GET_CODE (from) == MEM
959 && ! MEM_VOLATILE_P (from)
960 && direct_load[(int) to_mode]
961 && ! mode_dependent_address_p (XEXP (from, 0)))
962 || GET_CODE (from) == REG
963 || GET_CODE (from) == SUBREG))
964 from = force_reg (from_mode, from);
965 convert_move (to, gen_lowpart (word_mode, from), 0);
969 /* Handle pointer conversion */ /* SPEE 900220 */
970 if (to_mode == PQImode)
972 if (from_mode != QImode)
973 from = convert_to_mode (QImode, from, unsignedp);
975 #ifdef HAVE_truncqipqi2
976 if (HAVE_truncqipqi2)
978 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
981 #endif /* HAVE_truncqipqi2 */
985 if (from_mode == PQImode)
987 if (to_mode != QImode)
989 from = convert_to_mode (QImode, from, unsignedp);
994 #ifdef HAVE_extendpqiqi2
995 if (HAVE_extendpqiqi2)
997 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1000 #endif /* HAVE_extendpqiqi2 */
1005 if (to_mode == PSImode)
1007 if (from_mode != SImode)
1008 from = convert_to_mode (SImode, from, unsignedp);
1010 #ifdef HAVE_truncsipsi2
1011 if (HAVE_truncsipsi2)
1013 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1016 #endif /* HAVE_truncsipsi2 */
1020 if (from_mode == PSImode)
1022 if (to_mode != SImode)
1024 from = convert_to_mode (SImode, from, unsignedp);
1029 #ifdef HAVE_extendpsisi2
1030 if (HAVE_extendpsisi2)
1032 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1035 #endif /* HAVE_extendpsisi2 */
1040 if (to_mode == PDImode)
1042 if (from_mode != DImode)
1043 from = convert_to_mode (DImode, from, unsignedp);
1045 #ifdef HAVE_truncdipdi2
1046 if (HAVE_truncdipdi2)
1048 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1051 #endif /* HAVE_truncdipdi2 */
1055 if (from_mode == PDImode)
1057 if (to_mode != DImode)
1059 from = convert_to_mode (DImode, from, unsignedp);
1064 #ifdef HAVE_extendpdidi2
1065 if (HAVE_extendpdidi2)
1067 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1070 #endif /* HAVE_extendpdidi2 */
1075 /* Now follow all the conversions between integers
1076 no more than a word long. */
1078 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1079 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1080 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1081 GET_MODE_BITSIZE (from_mode)))
1083 if (!((GET_CODE (from) == MEM
1084 && ! MEM_VOLATILE_P (from)
1085 && direct_load[(int) to_mode]
1086 && ! mode_dependent_address_p (XEXP (from, 0)))
1087 || GET_CODE (from) == REG
1088 || GET_CODE (from) == SUBREG))
1089 from = force_reg (from_mode, from);
1090 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1091 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1092 from = copy_to_reg (from);
1093 emit_move_insn (to, gen_lowpart (to_mode, from));
1097 /* Handle extension. */
1098 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1100 /* Convert directly if that works. */
1101 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1102 != CODE_FOR_nothing)
1104 emit_unop_insn (code, to, from, equiv_code);
1109 enum machine_mode intermediate;
1113 /* Search for a mode to convert via. */
1114 for (intermediate = from_mode; intermediate != VOIDmode;
1115 intermediate = GET_MODE_WIDER_MODE (intermediate))
1116 if (((can_extend_p (to_mode, intermediate, unsignedp)
1117 != CODE_FOR_nothing)
1118 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1119 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1120 && (can_extend_p (intermediate, from_mode, unsignedp)
1121 != CODE_FOR_nothing))
1123 convert_move (to, convert_to_mode (intermediate, from,
1124 unsignedp), unsignedp);
1128 /* No suitable intermediate mode.
1129 Generate what we need with shifts. */
1130 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1131 - GET_MODE_BITSIZE (from_mode), 0);
1132 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1133 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1135 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1138 emit_move_insn (to, tmp);
1143 /* Support special truncate insns for certain modes. */
1145 if (from_mode == DImode && to_mode == SImode)
1147 #ifdef HAVE_truncdisi2
1148 if (HAVE_truncdisi2)
1150 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1154 convert_move (to, force_reg (from_mode, from), unsignedp);
1158 if (from_mode == DImode && to_mode == HImode)
1160 #ifdef HAVE_truncdihi2
1161 if (HAVE_truncdihi2)
1163 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1167 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 if (from_mode == DImode && to_mode == QImode)
1173 #ifdef HAVE_truncdiqi2
1174 if (HAVE_truncdiqi2)
1176 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1180 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 if (from_mode == SImode && to_mode == HImode)
1186 #ifdef HAVE_truncsihi2
1187 if (HAVE_truncsihi2)
1189 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1193 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 if (from_mode == SImode && to_mode == QImode)
1199 #ifdef HAVE_truncsiqi2
1200 if (HAVE_truncsiqi2)
1202 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1206 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 if (from_mode == HImode && to_mode == QImode)
1212 #ifdef HAVE_trunchiqi2
1213 if (HAVE_trunchiqi2)
1215 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1219 convert_move (to, force_reg (from_mode, from), unsignedp);
1223 if (from_mode == TImode && to_mode == DImode)
1225 #ifdef HAVE_trunctidi2
1226 if (HAVE_trunctidi2)
1228 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1232 convert_move (to, force_reg (from_mode, from), unsignedp);
1236 if (from_mode == TImode && to_mode == SImode)
1238 #ifdef HAVE_trunctisi2
1239 if (HAVE_trunctisi2)
1241 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1245 convert_move (to, force_reg (from_mode, from), unsignedp);
1249 if (from_mode == TImode && to_mode == HImode)
1251 #ifdef HAVE_trunctihi2
1252 if (HAVE_trunctihi2)
1254 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1258 convert_move (to, force_reg (from_mode, from), unsignedp);
1262 if (from_mode == TImode && to_mode == QImode)
1264 #ifdef HAVE_trunctiqi2
1265 if (HAVE_trunctiqi2)
1267 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1271 convert_move (to, force_reg (from_mode, from), unsignedp);
1275 /* Handle truncation of volatile memrefs, and so on;
1276 the things that couldn't be truncated directly,
1277 and for which there was no special instruction. */
1278 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1280 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1281 emit_move_insn (to, temp);
1285 /* Mode combination is not recognized. */
1289 /* Return an rtx for a value that would result
1290 from converting X to mode MODE.
1291 Both X and MODE may be floating, or both integer.
1292 UNSIGNEDP is nonzero if X is an unsigned value.
1293 This can be done by referring to a part of X in place
1294 or by copying to a new temporary with conversion.
1296 This function *must not* call protect_from_queue
1297 except when putting X into an insn (in which case convert_move does it). */
1300 convert_to_mode (mode, x, unsignedp)
1301 enum machine_mode mode;
1305 return convert_modes (mode, VOIDmode, x, unsignedp);
1308 /* Return an rtx for a value that would result
1309 from converting X from mode OLDMODE to mode MODE.
1310 Both modes may be floating, or both integer.
1311 UNSIGNEDP is nonzero if X is an unsigned value.
1313 This can be done by referring to a part of X in place
1314 or by copying to a new temporary with conversion.
1316 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1318 This function *must not* call protect_from_queue
1319 except when putting X into an insn (in which case convert_move does it). */
1322 convert_modes (mode, oldmode, x, unsignedp)
1323 enum machine_mode mode, oldmode;
1329 /* If FROM is a SUBREG that indicates that we have already done at least
1330 the required extension, strip it. */
1332 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1333 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1334 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1335 x = gen_lowpart (mode, x);
1337 if (GET_MODE (x) != VOIDmode)
1338 oldmode = GET_MODE (x);
1340 if (mode == oldmode)
1343 /* There is one case that we must handle specially: If we are converting
1344 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1345 we are to interpret the constant as unsigned, gen_lowpart will do
1346 the wrong if the constant appears negative. What we want to do is
1347 make the high-order word of the constant zero, not all ones. */
1349 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1350 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1351 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1353 HOST_WIDE_INT val = INTVAL (x);
1355 if (oldmode != VOIDmode
1356 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1358 int width = GET_MODE_BITSIZE (oldmode);
1360 /* We need to zero extend VAL. */
1361 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1364 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1367 /* We can do this with a gen_lowpart if both desired and current modes
1368 are integer, and this is either a constant integer, a register, or a
1369 non-volatile MEM. Except for the constant case where MODE is no
1370 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1372 if ((GET_CODE (x) == CONST_INT
1373 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1374 || (GET_MODE_CLASS (mode) == MODE_INT
1375 && GET_MODE_CLASS (oldmode) == MODE_INT
1376 && (GET_CODE (x) == CONST_DOUBLE
1377 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1378 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1379 && direct_load[(int) mode])
1380 || (GET_CODE (x) == REG
1381 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1382 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1384 /* ?? If we don't know OLDMODE, we have to assume here that
1385 X does not need sign- or zero-extension. This may not be
1386 the case, but it's the best we can do. */
1387 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1388 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1390 HOST_WIDE_INT val = INTVAL (x);
1391 int width = GET_MODE_BITSIZE (oldmode);
1393 /* We must sign or zero-extend in this case. Start by
1394 zero-extending, then sign extend if we need to. */
1395 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1397 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1398 val |= (HOST_WIDE_INT) (-1) << width;
1400 return GEN_INT (val);
1403 return gen_lowpart (mode, x);
1406 temp = gen_reg_rtx (mode);
1407 convert_move (temp, x, unsignedp);
1412 /* This macro is used to determine what the largest unit size that
1413 move_by_pieces can use is. */
1415 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1416 move efficiently, as opposed to MOVE_MAX which is the maximum
1417 number of bhytes we can move with a single instruction. */
1419 #ifndef MOVE_MAX_PIECES
1420 #define MOVE_MAX_PIECES MOVE_MAX
1423 /* Generate several move instructions to copy LEN bytes
1424 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1425 The caller must pass FROM and TO
1426 through protect_from_queue before calling.
1427 ALIGN (in bytes) is maximum alignment we can assume. */
1430 move_by_pieces (to, from, len, align)
1434 struct move_by_pieces data;
1435 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1436 int max_size = MOVE_MAX_PIECES + 1;
1437 enum machine_mode mode = VOIDmode, tmode;
1438 enum insn_code icode;
1441 data.to_addr = to_addr;
1442 data.from_addr = from_addr;
1446 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1447 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1449 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1450 || GET_CODE (from_addr) == POST_INC
1451 || GET_CODE (from_addr) == POST_DEC);
1453 data.explicit_inc_from = 0;
1454 data.explicit_inc_to = 0;
1456 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1457 if (data.reverse) data.offset = len;
1460 data.to_struct = MEM_IN_STRUCT_P (to);
1461 data.from_struct = MEM_IN_STRUCT_P (from);
1463 /* If copying requires more than two move insns,
1464 copy addresses to registers (to make displacements shorter)
1465 and use post-increment if available. */
1466 if (!(data.autinc_from && data.autinc_to)
1467 && move_by_pieces_ninsns (len, align) > 2)
1469 /* Find the mode of the largest move... */
1470 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1471 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1472 if (GET_MODE_SIZE (tmode) < max_size)
1475 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1477 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1478 data.autinc_from = 1;
1479 data.explicit_inc_from = -1;
1481 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1483 data.from_addr = copy_addr_to_reg (from_addr);
1484 data.autinc_from = 1;
1485 data.explicit_inc_from = 1;
1487 if (!data.autinc_from && CONSTANT_P (from_addr))
1488 data.from_addr = copy_addr_to_reg (from_addr);
1489 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1491 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1493 data.explicit_inc_to = -1;
1495 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1497 data.to_addr = copy_addr_to_reg (to_addr);
1499 data.explicit_inc_to = 1;
1501 if (!data.autinc_to && CONSTANT_P (to_addr))
1502 data.to_addr = copy_addr_to_reg (to_addr);
1505 if (! SLOW_UNALIGNED_ACCESS
1506 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1509 /* First move what we can in the largest integer mode, then go to
1510 successively smaller modes. */
1512 while (max_size > 1)
1514 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1515 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1516 if (GET_MODE_SIZE (tmode) < max_size)
1519 if (mode == VOIDmode)
1522 icode = mov_optab->handlers[(int) mode].insn_code;
1523 if (icode != CODE_FOR_nothing
1524 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1525 GET_MODE_SIZE (mode)))
1526 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1528 max_size = GET_MODE_SIZE (mode);
1531 /* The code above should have handled everything. */
1536 /* Return number of insns required to move L bytes by pieces.
1537 ALIGN (in bytes) is maximum alignment we can assume. */
1540 move_by_pieces_ninsns (l, align)
1544 register int n_insns = 0;
1545 int max_size = MOVE_MAX + 1;
1547 if (! SLOW_UNALIGNED_ACCESS
1548 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1551 while (max_size > 1)
1553 enum machine_mode mode = VOIDmode, tmode;
1554 enum insn_code icode;
1556 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1557 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1558 if (GET_MODE_SIZE (tmode) < max_size)
1561 if (mode == VOIDmode)
1564 icode = mov_optab->handlers[(int) mode].insn_code;
1565 if (icode != CODE_FOR_nothing
1566 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1567 GET_MODE_SIZE (mode)))
1568 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1570 max_size = GET_MODE_SIZE (mode);
1576 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1577 with move instructions for mode MODE. GENFUN is the gen_... function
1578 to make a move insn for that mode. DATA has all the other info. */
1581 move_by_pieces_1 (genfun, mode, data)
1582 rtx (*genfun) PROTO ((rtx, ...));
1583 enum machine_mode mode;
1584 struct move_by_pieces *data;
1586 register int size = GET_MODE_SIZE (mode);
1587 register rtx to1, from1;
1589 while (data->len >= size)
1591 if (data->reverse) data->offset -= size;
1593 to1 = (data->autinc_to
1594 ? gen_rtx_MEM (mode, data->to_addr)
1595 : copy_rtx (change_address (data->to, mode,
1596 plus_constant (data->to_addr,
1598 MEM_IN_STRUCT_P (to1) = data->to_struct;
1601 = (data->autinc_from
1602 ? gen_rtx_MEM (mode, data->from_addr)
1603 : copy_rtx (change_address (data->from, mode,
1604 plus_constant (data->from_addr,
1606 MEM_IN_STRUCT_P (from1) = data->from_struct;
1608 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1609 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1610 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1611 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1613 emit_insn ((*genfun) (to1, from1));
1614 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1615 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1616 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1617 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1619 if (! data->reverse) data->offset += size;
1625 /* Emit code to move a block Y to a block X.
1626 This may be done with string-move instructions,
1627 with multiple scalar move instructions, or with a library call.
1629 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1631 SIZE is an rtx that says how long they are.
1632 ALIGN is the maximum alignment we can assume they have,
1635 Return the address of the new block, if memcpy is called and returns it,
1639 emit_block_move (x, y, size, align)
1645 #ifdef TARGET_MEM_FUNCTIONS
1647 tree call_expr, arg_list;
1650 if (GET_MODE (x) != BLKmode)
1653 if (GET_MODE (y) != BLKmode)
1656 x = protect_from_queue (x, 1);
1657 y = protect_from_queue (y, 0);
1658 size = protect_from_queue (size, 0);
1660 if (GET_CODE (x) != MEM)
1662 if (GET_CODE (y) != MEM)
1667 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1668 move_by_pieces (x, y, INTVAL (size), align);
1671 /* Try the most limited insn first, because there's no point
1672 including more than one in the machine description unless
1673 the more limited one has some advantage. */
1675 rtx opalign = GEN_INT (align);
1676 enum machine_mode mode;
1678 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1679 mode = GET_MODE_WIDER_MODE (mode))
1681 enum insn_code code = movstr_optab[(int) mode];
1683 if (code != CODE_FOR_nothing
1684 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1685 here because if SIZE is less than the mode mask, as it is
1686 returned by the macro, it will definitely be less than the
1687 actual mode mask. */
1688 && ((GET_CODE (size) == CONST_INT
1689 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1690 <= (GET_MODE_MASK (mode) >> 1)))
1691 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1692 && (insn_operand_predicate[(int) code][0] == 0
1693 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1694 && (insn_operand_predicate[(int) code][1] == 0
1695 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1696 && (insn_operand_predicate[(int) code][3] == 0
1697 || (*insn_operand_predicate[(int) code][3]) (opalign,
1701 rtx last = get_last_insn ();
1704 op2 = convert_to_mode (mode, size, 1);
1705 if (insn_operand_predicate[(int) code][2] != 0
1706 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1707 op2 = copy_to_mode_reg (mode, op2);
1709 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1716 delete_insns_since (last);
1720 /* X, Y, or SIZE may have been passed through protect_from_queue.
1722 It is unsafe to save the value generated by protect_from_queue
1723 and reuse it later. Consider what happens if emit_queue is
1724 called before the return value from protect_from_queue is used.
1726 Expansion of the CALL_EXPR below will call emit_queue before
1727 we are finished emitting RTL for argument setup. So if we are
1728 not careful we could get the wrong value for an argument.
1730 To avoid this problem we go ahead and emit code to copy X, Y &
1731 SIZE into new pseudos. We can then place those new pseudos
1732 into an RTL_EXPR and use them later, even after a call to
1735 Note this is not strictly needed for library calls since they
1736 do not call emit_queue before loading their arguments. However,
1737 we may need to have library calls call emit_queue in the future
1738 since failing to do so could cause problems for targets which
1739 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1740 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1741 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1743 #ifdef TARGET_MEM_FUNCTIONS
1744 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1746 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1747 TREE_UNSIGNED (integer_type_node));
1748 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1751 #ifdef TARGET_MEM_FUNCTIONS
1752 /* It is incorrect to use the libcall calling conventions to call
1753 memcpy in this context.
1755 This could be a user call to memcpy and the user may wish to
1756 examine the return value from memcpy.
1758 For targets where libcalls and normal calls have different conventions
1759 for returning pointers, we could end up generating incorrect code.
1761 So instead of using a libcall sequence we build up a suitable
1762 CALL_EXPR and expand the call in the normal fashion. */
1763 if (fn == NULL_TREE)
1767 /* This was copied from except.c, I don't know if all this is
1768 necessary in this context or not. */
1769 fn = get_identifier ("memcpy");
1770 push_obstacks_nochange ();
1771 end_temporary_allocation ();
1772 fntype = build_pointer_type (void_type_node);
1773 fntype = build_function_type (fntype, NULL_TREE);
1774 fn = build_decl (FUNCTION_DECL, fn, fntype);
1775 DECL_EXTERNAL (fn) = 1;
1776 TREE_PUBLIC (fn) = 1;
1777 DECL_ARTIFICIAL (fn) = 1;
1778 make_decl_rtl (fn, NULL_PTR, 1);
1779 assemble_external (fn);
1783 /* We need to make an argument list for the function call.
1785 memcpy has three arguments, the first two are void * addresses and
1786 the last is a size_t byte count for the copy. */
1788 = build_tree_list (NULL_TREE,
1789 make_tree (build_pointer_type (void_type_node), x));
1790 TREE_CHAIN (arg_list)
1791 = build_tree_list (NULL_TREE,
1792 make_tree (build_pointer_type (void_type_node), y));
1793 TREE_CHAIN (TREE_CHAIN (arg_list))
1794 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1795 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1797 /* Now we have to build up the CALL_EXPR itself. */
1798 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1799 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1800 call_expr, arg_list, NULL_TREE);
1801 TREE_SIDE_EFFECTS (call_expr) = 1;
1803 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1805 emit_library_call (bcopy_libfunc, 0,
1806 VOIDmode, 3, y, Pmode, x, Pmode,
1807 convert_to_mode (TYPE_MODE (integer_type_node), size,
1808 TREE_UNSIGNED (integer_type_node)),
1809 TYPE_MODE (integer_type_node));
1816 /* Copy all or part of a value X into registers starting at REGNO.
1817 The number of registers to be filled is NREGS. */
1820 move_block_to_reg (regno, x, nregs, mode)
1824 enum machine_mode mode;
1827 #ifdef HAVE_load_multiple
1835 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1836 x = validize_mem (force_const_mem (mode, x));
1838 /* See if the machine can do this with a load multiple insn. */
1839 #ifdef HAVE_load_multiple
1840 if (HAVE_load_multiple)
1842 last = get_last_insn ();
1843 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1851 delete_insns_since (last);
1855 for (i = 0; i < nregs; i++)
1856 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1857 operand_subword_force (x, i, mode));
1860 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1861 The number of registers to be filled is NREGS. SIZE indicates the number
1862 of bytes in the object X. */
1866 move_block_from_reg (regno, x, nregs, size)
1873 #ifdef HAVE_store_multiple
1877 enum machine_mode mode;
1879 /* If SIZE is that of a mode no bigger than a word, just use that
1880 mode's store operation. */
1881 if (size <= UNITS_PER_WORD
1882 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1884 emit_move_insn (change_address (x, mode, NULL),
1885 gen_rtx_REG (mode, regno));
1889 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1890 to the left before storing to memory. Note that the previous test
1891 doesn't handle all cases (e.g. SIZE == 3). */
1892 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1894 rtx tem = operand_subword (x, 0, 1, BLKmode);
1900 shift = expand_shift (LSHIFT_EXPR, word_mode,
1901 gen_rtx_REG (word_mode, regno),
1902 build_int_2 ((UNITS_PER_WORD - size)
1903 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1904 emit_move_insn (tem, shift);
1908 /* See if the machine can do this with a store multiple insn. */
1909 #ifdef HAVE_store_multiple
1910 if (HAVE_store_multiple)
1912 last = get_last_insn ();
1913 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1921 delete_insns_since (last);
1925 for (i = 0; i < nregs; i++)
1927 rtx tem = operand_subword (x, i, 1, BLKmode);
1932 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1936 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1937 registers represented by a PARALLEL. SSIZE represents the total size of
1938 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1940 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1941 the balance will be in what would be the low-order memory addresses, i.e.
1942 left justified for big endian, right justified for little endian. This
1943 happens to be true for the targets currently using this support. If this
1944 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1948 emit_group_load (dst, orig_src, ssize, align)
1955 if (GET_CODE (dst) != PARALLEL)
1958 /* Check for a NULL entry, used to indicate that the parameter goes
1959 both on the stack and in registers. */
1960 if (XEXP (XVECEXP (dst, 0, 0), 0))
1965 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1967 /* If we won't be loading directly from memory, protect the real source
1968 from strange tricks we might play. */
1970 if (GET_CODE (src) != MEM)
1972 src = gen_reg_rtx (GET_MODE (orig_src));
1973 emit_move_insn (src, orig_src);
1976 /* Process the pieces. */
1977 for (i = start; i < XVECLEN (dst, 0); i++)
1979 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1980 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1981 int bytelen = GET_MODE_SIZE (mode);
1984 /* Handle trailing fragments that run over the size of the struct. */
1985 if (ssize >= 0 && bytepos + bytelen > ssize)
1987 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1988 bytelen = ssize - bytepos;
1993 /* Optimize the access just a bit. */
1994 if (GET_CODE (src) == MEM
1995 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1996 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1997 && bytelen == GET_MODE_SIZE (mode))
1999 tmps[i] = gen_reg_rtx (mode);
2000 emit_move_insn (tmps[i],
2001 change_address (src, mode,
2002 plus_constant (XEXP (src, 0),
2007 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
2008 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
2009 mode, mode, align, ssize);
2012 if (BYTES_BIG_ENDIAN && shift)
2014 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2015 tmps[i], 0, OPTAB_WIDEN);
2020 /* Copy the extracted pieces into the proper (probable) hard regs. */
2021 for (i = start; i < XVECLEN (dst, 0); i++)
2022 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2025 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2026 registers represented by a PARALLEL. SSIZE represents the total size of
2027 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2030 emit_group_store (orig_dst, src, ssize, align)
2037 if (GET_CODE (src) != PARALLEL)
2040 /* Check for a NULL entry, used to indicate that the parameter goes
2041 both on the stack and in registers. */
2042 if (XEXP (XVECEXP (src, 0, 0), 0))
2047 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2049 /* Copy the (probable) hard regs into pseudos. */
2050 for (i = start; i < XVECLEN (src, 0); i++)
2052 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2053 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2054 emit_move_insn (tmps[i], reg);
2058 /* If we won't be storing directly into memory, protect the real destination
2059 from strange tricks we might play. */
2061 if (GET_CODE (dst) == PARALLEL)
2065 /* We can get a PARALLEL dst if there is a conditional expression in
2066 a return statement. In that case, the dst and src are the same,
2067 so no action is necessary. */
2068 if (rtx_equal_p (dst, src))
2071 /* It is unclear if we can ever reach here, but we may as well handle
2072 it. Allocate a temporary, and split this into a store/load to/from
2075 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2076 emit_group_store (temp, src, ssize, align);
2077 emit_group_load (dst, temp, ssize, align);
2080 else if (GET_CODE (dst) != MEM)
2082 dst = gen_reg_rtx (GET_MODE (orig_dst));
2083 /* Make life a bit easier for combine. */
2084 emit_move_insn (dst, const0_rtx);
2086 else if (! MEM_IN_STRUCT_P (dst))
2088 /* store_bit_field requires that memory operations have
2089 mem_in_struct_p set; we might not. */
2091 dst = copy_rtx (orig_dst);
2092 MEM_SET_IN_STRUCT_P (dst, 1);
2095 /* Process the pieces. */
2096 for (i = start; i < XVECLEN (src, 0); i++)
2098 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2099 enum machine_mode mode = GET_MODE (tmps[i]);
2100 int bytelen = GET_MODE_SIZE (mode);
2102 /* Handle trailing fragments that run over the size of the struct. */
2103 if (ssize >= 0 && bytepos + bytelen > ssize)
2105 if (BYTES_BIG_ENDIAN)
2107 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2108 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2109 tmps[i], 0, OPTAB_WIDEN);
2111 bytelen = ssize - bytepos;
2114 /* Optimize the access just a bit. */
2115 if (GET_CODE (dst) == MEM
2116 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2117 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2118 && bytelen == GET_MODE_SIZE (mode))
2120 emit_move_insn (change_address (dst, mode,
2121 plus_constant (XEXP (dst, 0),
2127 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2128 mode, tmps[i], align, ssize);
2133 /* Copy from the pseudo into the (probable) hard reg. */
2134 if (GET_CODE (dst) == REG)
2135 emit_move_insn (orig_dst, dst);
2138 /* Generate code to copy a BLKmode object of TYPE out of a
2139 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2140 is null, a stack temporary is created. TGTBLK is returned.
2142 The primary purpose of this routine is to handle functions
2143 that return BLKmode structures in registers. Some machines
2144 (the PA for example) want to return all small structures
2145 in registers regardless of the structure's alignment.
2149 copy_blkmode_from_reg(tgtblk,srcreg,type)
2154 int bytes = int_size_in_bytes (type);
2155 rtx src = NULL, dst = NULL;
2156 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2157 int bitpos, xbitpos, big_endian_correction = 0;
2161 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2162 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2163 preserve_temp_slots (tgtblk);
2166 /* This code assumes srcreg is at least a full word. If it isn't,
2167 copy it into a new pseudo which is a full word. */
2168 if (GET_MODE (srcreg) != BLKmode
2169 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2170 srcreg = convert_to_mode (word_mode, srcreg,
2171 TREE_UNSIGNED (type));
2173 /* Structures whose size is not a multiple of a word are aligned
2174 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2175 machine, this means we must skip the empty high order bytes when
2176 calculating the bit offset. */
2177 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2178 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2181 /* Copy the structure BITSIZE bites at a time.
2183 We could probably emit more efficient code for machines
2184 which do not use strict alignment, but it doesn't seem
2185 worth the effort at the current time. */
2186 for (bitpos = 0, xbitpos = big_endian_correction;
2187 bitpos < bytes * BITS_PER_UNIT;
2188 bitpos += bitsize, xbitpos += bitsize)
2191 /* We need a new source operand each time xbitpos is on a
2192 word boundary and when xbitpos == big_endian_correction
2193 (the first time through). */
2194 if (xbitpos % BITS_PER_WORD == 0
2195 || xbitpos == big_endian_correction)
2196 src = operand_subword_force (srcreg,
2197 xbitpos / BITS_PER_WORD,
2200 /* We need a new destination operand each time bitpos is on
2202 if (bitpos % BITS_PER_WORD == 0)
2203 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2205 /* Use xbitpos for the source extraction (right justified) and
2206 xbitpos for the destination store (left justified). */
2207 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2208 extract_bit_field (src, bitsize,
2209 xbitpos % BITS_PER_WORD, 1,
2210 NULL_RTX, word_mode,
2212 bitsize / BITS_PER_UNIT,
2214 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2220 /* Add a USE expression for REG to the (possibly empty) list pointed
2221 to by CALL_FUSAGE. REG must denote a hard register. */
2224 use_reg (call_fusage, reg)
2225 rtx *call_fusage, reg;
2227 if (GET_CODE (reg) != REG
2228 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2232 = gen_rtx_EXPR_LIST (VOIDmode,
2233 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2236 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2237 starting at REGNO. All of these registers must be hard registers. */
2240 use_regs (call_fusage, regno, nregs)
2247 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2250 for (i = 0; i < nregs; i++)
2251 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2254 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2255 PARALLEL REGS. This is for calls that pass values in multiple
2256 non-contiguous locations. The Irix 6 ABI has examples of this. */
2259 use_group_regs (call_fusage, regs)
2265 for (i = 0; i < XVECLEN (regs, 0); i++)
2267 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2269 /* A NULL entry means the parameter goes both on the stack and in
2270 registers. This can also be a MEM for targets that pass values
2271 partially on the stack and partially in registers. */
2272 if (reg != 0 && GET_CODE (reg) == REG)
2273 use_reg (call_fusage, reg);
2277 /* Generate several move instructions to clear LEN bytes of block TO.
2278 (A MEM rtx with BLKmode). The caller must pass TO through
2279 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2283 clear_by_pieces (to, len, align)
2287 struct clear_by_pieces data;
2288 rtx to_addr = XEXP (to, 0);
2289 int max_size = MOVE_MAX_PIECES + 1;
2290 enum machine_mode mode = VOIDmode, tmode;
2291 enum insn_code icode;
2294 data.to_addr = to_addr;
2297 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2298 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2300 data.explicit_inc_to = 0;
2302 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2303 if (data.reverse) data.offset = len;
2306 data.to_struct = MEM_IN_STRUCT_P (to);
2308 /* If copying requires more than two move insns,
2309 copy addresses to registers (to make displacements shorter)
2310 and use post-increment if available. */
2312 && move_by_pieces_ninsns (len, align) > 2)
2314 /* Determine the main mode we'll be using */
2315 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2316 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2317 if (GET_MODE_SIZE (tmode) < max_size)
2320 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2322 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2324 data.explicit_inc_to = -1;
2326 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2328 data.to_addr = copy_addr_to_reg (to_addr);
2330 data.explicit_inc_to = 1;
2332 if (!data.autinc_to && CONSTANT_P (to_addr))
2333 data.to_addr = copy_addr_to_reg (to_addr);
2336 if (! SLOW_UNALIGNED_ACCESS
2337 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2340 /* First move what we can in the largest integer mode, then go to
2341 successively smaller modes. */
2343 while (max_size > 1)
2345 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2346 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2347 if (GET_MODE_SIZE (tmode) < max_size)
2350 if (mode == VOIDmode)
2353 icode = mov_optab->handlers[(int) mode].insn_code;
2354 if (icode != CODE_FOR_nothing
2355 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2356 GET_MODE_SIZE (mode)))
2357 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2359 max_size = GET_MODE_SIZE (mode);
2362 /* The code above should have handled everything. */
2367 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2368 with move instructions for mode MODE. GENFUN is the gen_... function
2369 to make a move insn for that mode. DATA has all the other info. */
2372 clear_by_pieces_1 (genfun, mode, data)
2373 rtx (*genfun) PROTO ((rtx, ...));
2374 enum machine_mode mode;
2375 struct clear_by_pieces *data;
2377 register int size = GET_MODE_SIZE (mode);
2380 while (data->len >= size)
2382 if (data->reverse) data->offset -= size;
2384 to1 = (data->autinc_to
2385 ? gen_rtx_MEM (mode, data->to_addr)
2386 : copy_rtx (change_address (data->to, mode,
2387 plus_constant (data->to_addr,
2389 MEM_IN_STRUCT_P (to1) = data->to_struct;
2391 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2392 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2394 emit_insn ((*genfun) (to1, const0_rtx));
2395 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2396 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2398 if (! data->reverse) data->offset += size;
2404 /* Write zeros through the storage of OBJECT.
2405 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2406 the maximum alignment we can is has, measured in bytes.
2408 If we call a function that returns the length of the block, return it. */
2411 clear_storage (object, size, align)
2416 #ifdef TARGET_MEM_FUNCTIONS
2418 tree call_expr, arg_list;
2422 if (GET_MODE (object) == BLKmode)
2424 object = protect_from_queue (object, 1);
2425 size = protect_from_queue (size, 0);
2427 if (GET_CODE (size) == CONST_INT
2428 && MOVE_BY_PIECES_P (INTVAL (size), align))
2429 clear_by_pieces (object, INTVAL (size), align);
2433 /* Try the most limited insn first, because there's no point
2434 including more than one in the machine description unless
2435 the more limited one has some advantage. */
2437 rtx opalign = GEN_INT (align);
2438 enum machine_mode mode;
2440 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2441 mode = GET_MODE_WIDER_MODE (mode))
2443 enum insn_code code = clrstr_optab[(int) mode];
2445 if (code != CODE_FOR_nothing
2446 /* We don't need MODE to be narrower than
2447 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2448 the mode mask, as it is returned by the macro, it will
2449 definitely be less than the actual mode mask. */
2450 && ((GET_CODE (size) == CONST_INT
2451 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2452 <= (GET_MODE_MASK (mode) >> 1)))
2453 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2454 && (insn_operand_predicate[(int) code][0] == 0
2455 || (*insn_operand_predicate[(int) code][0]) (object,
2457 && (insn_operand_predicate[(int) code][2] == 0
2458 || (*insn_operand_predicate[(int) code][2]) (opalign,
2462 rtx last = get_last_insn ();
2465 op1 = convert_to_mode (mode, size, 1);
2466 if (insn_operand_predicate[(int) code][1] != 0
2467 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2469 op1 = copy_to_mode_reg (mode, op1);
2471 pat = GEN_FCN ((int) code) (object, op1, opalign);
2478 delete_insns_since (last);
2482 /* OBJECT or SIZE may have been passed through protect_from_queue.
2484 It is unsafe to save the value generated by protect_from_queue
2485 and reuse it later. Consider what happens if emit_queue is
2486 called before the return value from protect_from_queue is used.
2488 Expansion of the CALL_EXPR below will call emit_queue before
2489 we are finished emitting RTL for argument setup. So if we are
2490 not careful we could get the wrong value for an argument.
2492 To avoid this problem we go ahead and emit code to copy OBJECT
2493 and SIZE into new pseudos. We can then place those new pseudos
2494 into an RTL_EXPR and use them later, even after a call to
2497 Note this is not strictly needed for library calls since they
2498 do not call emit_queue before loading their arguments. However,
2499 we may need to have library calls call emit_queue in the future
2500 since failing to do so could cause problems for targets which
2501 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2502 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2504 #ifdef TARGET_MEM_FUNCTIONS
2505 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2507 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2508 TREE_UNSIGNED (integer_type_node));
2509 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2513 #ifdef TARGET_MEM_FUNCTIONS
2514 /* It is incorrect to use the libcall calling conventions to call
2515 memset in this context.
2517 This could be a user call to memset and the user may wish to
2518 examine the return value from memset.
2520 For targets where libcalls and normal calls have different
2521 conventions for returning pointers, we could end up generating
2524 So instead of using a libcall sequence we build up a suitable
2525 CALL_EXPR and expand the call in the normal fashion. */
2526 if (fn == NULL_TREE)
2530 /* This was copied from except.c, I don't know if all this is
2531 necessary in this context or not. */
2532 fn = get_identifier ("memset");
2533 push_obstacks_nochange ();
2534 end_temporary_allocation ();
2535 fntype = build_pointer_type (void_type_node);
2536 fntype = build_function_type (fntype, NULL_TREE);
2537 fn = build_decl (FUNCTION_DECL, fn, fntype);
2538 DECL_EXTERNAL (fn) = 1;
2539 TREE_PUBLIC (fn) = 1;
2540 DECL_ARTIFICIAL (fn) = 1;
2541 make_decl_rtl (fn, NULL_PTR, 1);
2542 assemble_external (fn);
2546 /* We need to make an argument list for the function call.
2548 memset has three arguments, the first is a void * addresses, the
2549 second a integer with the initialization value, the last is a
2550 size_t byte count for the copy. */
2552 = build_tree_list (NULL_TREE,
2553 make_tree (build_pointer_type (void_type_node),
2555 TREE_CHAIN (arg_list)
2556 = build_tree_list (NULL_TREE,
2557 make_tree (integer_type_node, const0_rtx));
2558 TREE_CHAIN (TREE_CHAIN (arg_list))
2559 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2560 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2562 /* Now we have to build up the CALL_EXPR itself. */
2563 call_expr = build1 (ADDR_EXPR,
2564 build_pointer_type (TREE_TYPE (fn)), fn);
2565 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2566 call_expr, arg_list, NULL_TREE);
2567 TREE_SIDE_EFFECTS (call_expr) = 1;
2569 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2571 emit_library_call (bzero_libfunc, 0,
2572 VOIDmode, 2, object, Pmode, size,
2573 TYPE_MODE (integer_type_node));
2578 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2583 /* Generate code to copy Y into X.
2584 Both Y and X must have the same mode, except that
2585 Y can be a constant with VOIDmode.
2586 This mode cannot be BLKmode; use emit_block_move for that.
2588 Return the last instruction emitted. */
2591 emit_move_insn (x, y)
2594 enum machine_mode mode = GET_MODE (x);
2596 x = protect_from_queue (x, 1);
2597 y = protect_from_queue (y, 0);
2599 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2602 /* Never force constant_p_rtx to memory. */
2603 if (GET_CODE (y) == CONSTANT_P_RTX)
2605 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2606 y = force_const_mem (mode, y);
2608 /* If X or Y are memory references, verify that their addresses are valid
2610 if (GET_CODE (x) == MEM
2611 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2612 && ! push_operand (x, GET_MODE (x)))
2614 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2615 x = change_address (x, VOIDmode, XEXP (x, 0));
2617 if (GET_CODE (y) == MEM
2618 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2620 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2621 y = change_address (y, VOIDmode, XEXP (y, 0));
2623 if (mode == BLKmode)
2626 return emit_move_insn_1 (x, y);
2629 /* Low level part of emit_move_insn.
2630 Called just like emit_move_insn, but assumes X and Y
2631 are basically valid. */
2634 emit_move_insn_1 (x, y)
2637 enum machine_mode mode = GET_MODE (x);
2638 enum machine_mode submode;
2639 enum mode_class class = GET_MODE_CLASS (mode);
2642 if (mode >= MAX_MACHINE_MODE)
2645 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2647 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2649 /* Expand complex moves by moving real part and imag part, if possible. */
2650 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2651 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2653 (class == MODE_COMPLEX_INT
2654 ? MODE_INT : MODE_FLOAT),
2656 && (mov_optab->handlers[(int) submode].insn_code
2657 != CODE_FOR_nothing))
2659 /* Don't split destination if it is a stack push. */
2660 int stack = push_operand (x, GET_MODE (x));
2662 /* If this is a stack, push the highpart first, so it
2663 will be in the argument order.
2665 In that case, change_address is used only to convert
2666 the mode, not to change the address. */
2669 /* Note that the real part always precedes the imag part in memory
2670 regardless of machine's endianness. */
2671 #ifdef STACK_GROWS_DOWNWARD
2672 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2673 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2674 gen_imagpart (submode, y)));
2675 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2676 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2677 gen_realpart (submode, y)));
2679 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2680 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2681 gen_realpart (submode, y)));
2682 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2683 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2684 gen_imagpart (submode, y)));
2689 rtx realpart_x, realpart_y;
2690 rtx imagpart_x, imagpart_y;
2692 /* If this is a complex value with each part being smaller than a
2693 word, the usual calling sequence will likely pack the pieces into
2694 a single register. Unfortunately, SUBREG of hard registers only
2695 deals in terms of words, so we have a problem converting input
2696 arguments to the CONCAT of two registers that is used elsewhere
2697 for complex values. If this is before reload, we can copy it into
2698 memory and reload. FIXME, we should see about using extract and
2699 insert on integer registers, but complex short and complex char
2700 variables should be rarely used. */
2701 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2702 && (reload_in_progress | reload_completed) == 0)
2704 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2705 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2707 if (packed_dest_p || packed_src_p)
2709 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2710 ? MODE_FLOAT : MODE_INT);
2712 enum machine_mode reg_mode =
2713 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2715 if (reg_mode != BLKmode)
2717 rtx mem = assign_stack_temp (reg_mode,
2718 GET_MODE_SIZE (mode), 0);
2720 rtx cmem = change_address (mem, mode, NULL_RTX);
2722 current_function_cannot_inline
2723 = "function using short complex types cannot be inline";
2727 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2728 emit_move_insn_1 (cmem, y);
2729 return emit_move_insn_1 (sreg, mem);
2733 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2734 emit_move_insn_1 (mem, sreg);
2735 return emit_move_insn_1 (x, cmem);
2741 realpart_x = gen_realpart (submode, x);
2742 realpart_y = gen_realpart (submode, y);
2743 imagpart_x = gen_imagpart (submode, x);
2744 imagpart_y = gen_imagpart (submode, y);
2746 /* Show the output dies here. This is necessary for SUBREGs
2747 of pseudos since we cannot track their lifetimes correctly;
2748 hard regs shouldn't appear here except as return values.
2749 We never want to emit such a clobber after reload. */
2751 && ! (reload_in_progress || reload_completed)
2752 && (GET_CODE (realpart_x) == SUBREG
2753 || GET_CODE (imagpart_x) == SUBREG))
2755 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2758 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2759 (realpart_x, realpart_y));
2760 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2761 (imagpart_x, imagpart_y));
2764 return get_last_insn ();
2767 /* This will handle any multi-word mode that lacks a move_insn pattern.
2768 However, you will get better code if you define such patterns,
2769 even if they must turn into multiple assembler instructions. */
2770 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2776 #ifdef PUSH_ROUNDING
2778 /* If X is a push on the stack, do the push now and replace
2779 X with a reference to the stack pointer. */
2780 if (push_operand (x, GET_MODE (x)))
2782 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2783 x = change_address (x, VOIDmode, stack_pointer_rtx);
2791 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2794 rtx xpart = operand_subword (x, i, 1, mode);
2795 rtx ypart = operand_subword (y, i, 1, mode);
2797 /* If we can't get a part of Y, put Y into memory if it is a
2798 constant. Otherwise, force it into a register. If we still
2799 can't get a part of Y, abort. */
2800 if (ypart == 0 && CONSTANT_P (y))
2802 y = force_const_mem (mode, y);
2803 ypart = operand_subword (y, i, 1, mode);
2805 else if (ypart == 0)
2806 ypart = operand_subword_force (y, i, mode);
2808 if (xpart == 0 || ypart == 0)
2811 need_clobber |= (GET_CODE (xpart) == SUBREG);
2813 last_insn = emit_move_insn (xpart, ypart);
2816 seq = gen_sequence ();
2819 /* Show the output dies here. This is necessary for SUBREGs
2820 of pseudos since we cannot track their lifetimes correctly;
2821 hard regs shouldn't appear here except as return values.
2822 We never want to emit such a clobber after reload. */
2824 && ! (reload_in_progress || reload_completed)
2825 && need_clobber != 0)
2827 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2838 /* Pushing data onto the stack. */
2840 /* Push a block of length SIZE (perhaps variable)
2841 and return an rtx to address the beginning of the block.
2842 Note that it is not possible for the value returned to be a QUEUED.
2843 The value may be virtual_outgoing_args_rtx.
2845 EXTRA is the number of bytes of padding to push in addition to SIZE.
2846 BELOW nonzero means this padding comes at low addresses;
2847 otherwise, the padding comes at high addresses. */
2850 push_block (size, extra, below)
2856 size = convert_modes (Pmode, ptr_mode, size, 1);
2857 if (CONSTANT_P (size))
2858 anti_adjust_stack (plus_constant (size, extra));
2859 else if (GET_CODE (size) == REG && extra == 0)
2860 anti_adjust_stack (size);
2863 rtx temp = copy_to_mode_reg (Pmode, size);
2865 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2866 temp, 0, OPTAB_LIB_WIDEN);
2867 anti_adjust_stack (temp);
2870 #if defined (STACK_GROWS_DOWNWARD) \
2871 || (defined (ARGS_GROW_DOWNWARD) \
2872 && !defined (ACCUMULATE_OUTGOING_ARGS))
2874 /* Return the lowest stack address when STACK or ARGS grow downward and
2875 we are not aaccumulating outgoing arguments (the c4x port uses such
2877 temp = virtual_outgoing_args_rtx;
2878 if (extra != 0 && below)
2879 temp = plus_constant (temp, extra);
2881 if (GET_CODE (size) == CONST_INT)
2882 temp = plus_constant (virtual_outgoing_args_rtx,
2883 - INTVAL (size) - (below ? 0 : extra));
2884 else if (extra != 0 && !below)
2885 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2886 negate_rtx (Pmode, plus_constant (size, extra)));
2888 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2889 negate_rtx (Pmode, size));
2892 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2898 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2901 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2902 block of SIZE bytes. */
2905 get_push_address (size)
2910 if (STACK_PUSH_CODE == POST_DEC)
2911 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2912 else if (STACK_PUSH_CODE == POST_INC)
2913 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2915 temp = stack_pointer_rtx;
2917 return copy_to_reg (temp);
2920 /* Generate code to push X onto the stack, assuming it has mode MODE and
2922 MODE is redundant except when X is a CONST_INT (since they don't
2924 SIZE is an rtx for the size of data to be copied (in bytes),
2925 needed only if X is BLKmode.
2927 ALIGN (in bytes) is maximum alignment we can assume.
2929 If PARTIAL and REG are both nonzero, then copy that many of the first
2930 words of X into registers starting with REG, and push the rest of X.
2931 The amount of space pushed is decreased by PARTIAL words,
2932 rounded *down* to a multiple of PARM_BOUNDARY.
2933 REG must be a hard register in this case.
2934 If REG is zero but PARTIAL is not, take any all others actions for an
2935 argument partially in registers, but do not actually load any
2938 EXTRA is the amount in bytes of extra space to leave next to this arg.
2939 This is ignored if an argument block has already been allocated.
2941 On a machine that lacks real push insns, ARGS_ADDR is the address of
2942 the bottom of the argument block for this call. We use indexing off there
2943 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2944 argument block has not been preallocated.
2946 ARGS_SO_FAR is the size of args previously pushed for this call.
2948 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2949 for arguments passed in registers. If nonzero, it will be the number
2950 of bytes required. */
2953 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2954 args_addr, args_so_far, reg_parm_stack_space)
2956 enum machine_mode mode;
2965 int reg_parm_stack_space;
2968 enum direction stack_direction
2969 #ifdef STACK_GROWS_DOWNWARD
2975 /* Decide where to pad the argument: `downward' for below,
2976 `upward' for above, or `none' for don't pad it.
2977 Default is below for small data on big-endian machines; else above. */
2978 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2980 /* Invert direction if stack is post-update. */
2981 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2982 if (where_pad != none)
2983 where_pad = (where_pad == downward ? upward : downward);
2985 xinner = x = protect_from_queue (x, 0);
2987 if (mode == BLKmode)
2989 /* Copy a block into the stack, entirely or partially. */
2992 int used = partial * UNITS_PER_WORD;
2993 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3001 /* USED is now the # of bytes we need not copy to the stack
3002 because registers will take care of them. */
3005 xinner = change_address (xinner, BLKmode,
3006 plus_constant (XEXP (xinner, 0), used));
3008 /* If the partial register-part of the arg counts in its stack size,
3009 skip the part of stack space corresponding to the registers.
3010 Otherwise, start copying to the beginning of the stack space,
3011 by setting SKIP to 0. */
3012 skip = (reg_parm_stack_space == 0) ? 0 : used;
3014 #ifdef PUSH_ROUNDING
3015 /* Do it with several push insns if that doesn't take lots of insns
3016 and if there is no difficulty with push insns that skip bytes
3017 on the stack for alignment purposes. */
3019 && GET_CODE (size) == CONST_INT
3021 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3022 /* Here we avoid the case of a structure whose weak alignment
3023 forces many pushes of a small amount of data,
3024 and such small pushes do rounding that causes trouble. */
3025 && ((! SLOW_UNALIGNED_ACCESS)
3026 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
3027 || PUSH_ROUNDING (align) == align)
3028 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3030 /* Push padding now if padding above and stack grows down,
3031 or if padding below and stack grows up.
3032 But if space already allocated, this has already been done. */
3033 if (extra && args_addr == 0
3034 && where_pad != none && where_pad != stack_direction)
3035 anti_adjust_stack (GEN_INT (extra));
3037 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3038 INTVAL (size) - used, align);
3040 if (current_function_check_memory_usage && ! in_check_memory_usage)
3044 in_check_memory_usage = 1;
3045 temp = get_push_address (INTVAL(size) - used);
3046 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3047 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3049 XEXP (xinner, 0), Pmode,
3050 GEN_INT (INTVAL(size) - used),
3051 TYPE_MODE (sizetype));
3053 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3055 GEN_INT (INTVAL(size) - used),
3056 TYPE_MODE (sizetype),
3057 GEN_INT (MEMORY_USE_RW),
3058 TYPE_MODE (integer_type_node));
3059 in_check_memory_usage = 0;
3063 #endif /* PUSH_ROUNDING */
3065 /* Otherwise make space on the stack and copy the data
3066 to the address of that space. */
3068 /* Deduct words put into registers from the size we must copy. */
3071 if (GET_CODE (size) == CONST_INT)
3072 size = GEN_INT (INTVAL (size) - used);
3074 size = expand_binop (GET_MODE (size), sub_optab, size,
3075 GEN_INT (used), NULL_RTX, 0,
3079 /* Get the address of the stack space.
3080 In this case, we do not deal with EXTRA separately.
3081 A single stack adjust will do. */
3084 temp = push_block (size, extra, where_pad == downward);
3087 else if (GET_CODE (args_so_far) == CONST_INT)
3088 temp = memory_address (BLKmode,
3089 plus_constant (args_addr,
3090 skip + INTVAL (args_so_far)));
3092 temp = memory_address (BLKmode,
3093 plus_constant (gen_rtx_PLUS (Pmode,
3097 if (current_function_check_memory_usage && ! in_check_memory_usage)
3101 in_check_memory_usage = 1;
3102 target = copy_to_reg (temp);
3103 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3104 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3106 XEXP (xinner, 0), Pmode,
3107 size, TYPE_MODE (sizetype));
3109 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3111 size, TYPE_MODE (sizetype),
3112 GEN_INT (MEMORY_USE_RW),
3113 TYPE_MODE (integer_type_node));
3114 in_check_memory_usage = 0;
3117 /* TEMP is the address of the block. Copy the data there. */
3118 if (GET_CODE (size) == CONST_INT
3119 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3121 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3122 INTVAL (size), align);
3127 rtx opalign = GEN_INT (align);
3128 enum machine_mode mode;
3129 rtx target = gen_rtx_MEM (BLKmode, temp);
3131 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3133 mode = GET_MODE_WIDER_MODE (mode))
3135 enum insn_code code = movstr_optab[(int) mode];
3137 if (code != CODE_FOR_nothing
3138 && ((GET_CODE (size) == CONST_INT
3139 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3140 <= (GET_MODE_MASK (mode) >> 1)))
3141 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3142 && (insn_operand_predicate[(int) code][0] == 0
3143 || ((*insn_operand_predicate[(int) code][0])
3145 && (insn_operand_predicate[(int) code][1] == 0
3146 || ((*insn_operand_predicate[(int) code][1])
3148 && (insn_operand_predicate[(int) code][3] == 0
3149 || ((*insn_operand_predicate[(int) code][3])
3150 (opalign, VOIDmode))))
3152 rtx op2 = convert_to_mode (mode, size, 1);
3153 rtx last = get_last_insn ();
3156 if (insn_operand_predicate[(int) code][2] != 0
3157 && ! ((*insn_operand_predicate[(int) code][2])
3159 op2 = copy_to_mode_reg (mode, op2);
3161 pat = GEN_FCN ((int) code) (target, xinner,
3169 delete_insns_since (last);
3174 #ifndef ACCUMULATE_OUTGOING_ARGS
3175 /* If the source is referenced relative to the stack pointer,
3176 copy it to another register to stabilize it. We do not need
3177 to do this if we know that we won't be changing sp. */
3179 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3180 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3181 temp = copy_to_reg (temp);
3184 /* Make inhibit_defer_pop nonzero around the library call
3185 to force it to pop the bcopy-arguments right away. */
3187 #ifdef TARGET_MEM_FUNCTIONS
3188 emit_library_call (memcpy_libfunc, 0,
3189 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3190 convert_to_mode (TYPE_MODE (sizetype),
3191 size, TREE_UNSIGNED (sizetype)),
3192 TYPE_MODE (sizetype));
3194 emit_library_call (bcopy_libfunc, 0,
3195 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3196 convert_to_mode (TYPE_MODE (integer_type_node),
3198 TREE_UNSIGNED (integer_type_node)),
3199 TYPE_MODE (integer_type_node));
3204 else if (partial > 0)
3206 /* Scalar partly in registers. */
3208 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3211 /* # words of start of argument
3212 that we must make space for but need not store. */
3213 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3214 int args_offset = INTVAL (args_so_far);
3217 /* Push padding now if padding above and stack grows down,
3218 or if padding below and stack grows up.
3219 But if space already allocated, this has already been done. */
3220 if (extra && args_addr == 0
3221 && where_pad != none && where_pad != stack_direction)
3222 anti_adjust_stack (GEN_INT (extra));
3224 /* If we make space by pushing it, we might as well push
3225 the real data. Otherwise, we can leave OFFSET nonzero
3226 and leave the space uninitialized. */
3230 /* Now NOT_STACK gets the number of words that we don't need to
3231 allocate on the stack. */
3232 not_stack = partial - offset;
3234 /* If the partial register-part of the arg counts in its stack size,
3235 skip the part of stack space corresponding to the registers.
3236 Otherwise, start copying to the beginning of the stack space,
3237 by setting SKIP to 0. */
3238 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3240 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3241 x = validize_mem (force_const_mem (mode, x));
3243 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3244 SUBREGs of such registers are not allowed. */
3245 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3246 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3247 x = copy_to_reg (x);
3249 /* Loop over all the words allocated on the stack for this arg. */
3250 /* We can do it by words, because any scalar bigger than a word
3251 has a size a multiple of a word. */
3252 #ifndef PUSH_ARGS_REVERSED
3253 for (i = not_stack; i < size; i++)
3255 for (i = size - 1; i >= not_stack; i--)
3257 if (i >= not_stack + offset)
3258 emit_push_insn (operand_subword_force (x, i, mode),
3259 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3261 GEN_INT (args_offset + ((i - not_stack + skip)
3263 reg_parm_stack_space);
3268 rtx target = NULL_RTX;
3270 /* Push padding now if padding above and stack grows down,
3271 or if padding below and stack grows up.
3272 But if space already allocated, this has already been done. */
3273 if (extra && args_addr == 0
3274 && where_pad != none && where_pad != stack_direction)
3275 anti_adjust_stack (GEN_INT (extra));
3277 #ifdef PUSH_ROUNDING
3279 addr = gen_push_operand ();
3283 if (GET_CODE (args_so_far) == CONST_INT)
3285 = memory_address (mode,
3286 plus_constant (args_addr,
3287 INTVAL (args_so_far)));
3289 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3294 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3296 if (current_function_check_memory_usage && ! in_check_memory_usage)
3298 in_check_memory_usage = 1;
3300 target = get_push_address (GET_MODE_SIZE (mode));
3302 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3303 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3306 GEN_INT (GET_MODE_SIZE (mode)),
3307 TYPE_MODE (sizetype));
3309 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3311 GEN_INT (GET_MODE_SIZE (mode)),
3312 TYPE_MODE (sizetype),
3313 GEN_INT (MEMORY_USE_RW),
3314 TYPE_MODE (integer_type_node));
3315 in_check_memory_usage = 0;
3320 /* If part should go in registers, copy that part
3321 into the appropriate registers. Do this now, at the end,
3322 since mem-to-mem copies above may do function calls. */
3323 if (partial > 0 && reg != 0)
3325 /* Handle calls that pass values in multiple non-contiguous locations.
3326 The Irix 6 ABI has examples of this. */
3327 if (GET_CODE (reg) == PARALLEL)
3328 emit_group_load (reg, x, -1, align); /* ??? size? */
3330 move_block_to_reg (REGNO (reg), x, partial, mode);
3333 if (extra && args_addr == 0 && where_pad == stack_direction)
3334 anti_adjust_stack (GEN_INT (extra));
3337 /* Expand an assignment that stores the value of FROM into TO.
3338 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3339 (This may contain a QUEUED rtx;
3340 if the value is constant, this rtx is a constant.)
3341 Otherwise, the returned value is NULL_RTX.
3343 SUGGEST_REG is no longer actually used.
3344 It used to mean, copy the value through a register
3345 and return that register, if that is possible.
3346 We now use WANT_VALUE to decide whether to do this. */
3349 expand_assignment (to, from, want_value, suggest_reg)
3354 register rtx to_rtx = 0;
3357 /* Don't crash if the lhs of the assignment was erroneous. */
3359 if (TREE_CODE (to) == ERROR_MARK)
3361 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3362 return want_value ? result : NULL_RTX;
3365 /* Assignment of a structure component needs special treatment
3366 if the structure component's rtx is not simply a MEM.
3367 Assignment of an array element at a constant index, and assignment of
3368 an array element in an unaligned packed structure field, has the same
3371 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3372 || TREE_CODE (to) == ARRAY_REF)
3374 enum machine_mode mode1;
3384 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3385 &unsignedp, &volatilep, &alignment);
3387 /* If we are going to use store_bit_field and extract_bit_field,
3388 make sure to_rtx will be safe for multiple use. */
3390 if (mode1 == VOIDmode && want_value)
3391 tem = stabilize_reference (tem);
3393 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3396 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3398 if (GET_CODE (to_rtx) != MEM)
3401 if (GET_MODE (offset_rtx) != ptr_mode)
3403 #ifdef POINTERS_EXTEND_UNSIGNED
3404 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3406 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3410 /* A constant address in TO_RTX can have VOIDmode, we must not try
3411 to call force_reg for that case. Avoid that case. */
3412 if (GET_CODE (to_rtx) == MEM
3413 && GET_MODE (to_rtx) == BLKmode
3414 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3416 && (bitpos % bitsize) == 0
3417 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3418 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3420 rtx temp = change_address (to_rtx, mode1,
3421 plus_constant (XEXP (to_rtx, 0),
3424 if (GET_CODE (XEXP (temp, 0)) == REG)
3427 to_rtx = change_address (to_rtx, mode1,
3428 force_reg (GET_MODE (XEXP (temp, 0)),
3433 to_rtx = change_address (to_rtx, VOIDmode,
3434 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3435 force_reg (ptr_mode, offset_rtx)));
3439 if (GET_CODE (to_rtx) == MEM)
3441 /* When the offset is zero, to_rtx is the address of the
3442 structure we are storing into, and hence may be shared.
3443 We must make a new MEM before setting the volatile bit. */
3445 to_rtx = copy_rtx (to_rtx);
3447 MEM_VOLATILE_P (to_rtx) = 1;
3449 #if 0 /* This was turned off because, when a field is volatile
3450 in an object which is not volatile, the object may be in a register,
3451 and then we would abort over here. */
3457 if (TREE_CODE (to) == COMPONENT_REF
3458 && TREE_READONLY (TREE_OPERAND (to, 1)))
3461 to_rtx = copy_rtx (to_rtx);
3463 RTX_UNCHANGING_P (to_rtx) = 1;
3466 /* Check the access. */
3467 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3472 enum machine_mode best_mode;
3474 best_mode = get_best_mode (bitsize, bitpos,
3475 TYPE_ALIGN (TREE_TYPE (tem)),
3477 if (best_mode == VOIDmode)
3480 best_mode_size = GET_MODE_BITSIZE (best_mode);
3481 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3482 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3483 size *= GET_MODE_SIZE (best_mode);
3485 /* Check the access right of the pointer. */
3487 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3489 GEN_INT (size), TYPE_MODE (sizetype),
3490 GEN_INT (MEMORY_USE_WO),
3491 TYPE_MODE (integer_type_node));
3494 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3496 /* Spurious cast makes HPUX compiler happy. */
3497 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3500 /* Required alignment of containing datum. */
3502 int_size_in_bytes (TREE_TYPE (tem)),
3503 get_alias_set (to));
3504 preserve_temp_slots (result);
3508 /* If the value is meaningful, convert RESULT to the proper mode.
3509 Otherwise, return nothing. */
3510 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3511 TYPE_MODE (TREE_TYPE (from)),
3513 TREE_UNSIGNED (TREE_TYPE (to)))
3517 /* If the rhs is a function call and its value is not an aggregate,
3518 call the function before we start to compute the lhs.
3519 This is needed for correct code for cases such as
3520 val = setjmp (buf) on machines where reference to val
3521 requires loading up part of an address in a separate insn.
3523 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3524 a promoted variable where the zero- or sign- extension needs to be done.
3525 Handling this in the normal way is safe because no computation is done
3527 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3528 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3529 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3534 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3536 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3538 /* Handle calls that return values in multiple non-contiguous locations.
3539 The Irix 6 ABI has examples of this. */
3540 if (GET_CODE (to_rtx) == PARALLEL)
3541 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3542 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3543 else if (GET_MODE (to_rtx) == BLKmode)
3544 emit_block_move (to_rtx, value, expr_size (from),
3545 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3548 #ifdef POINTERS_EXTEND_UNSIGNED
3549 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3550 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3551 value = convert_memory_address (GET_MODE (to_rtx), value);
3553 emit_move_insn (to_rtx, value);
3555 preserve_temp_slots (to_rtx);
3558 return want_value ? to_rtx : NULL_RTX;
3561 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3562 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3566 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3567 if (GET_CODE (to_rtx) == MEM)
3568 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3571 /* Don't move directly into a return register. */
3572 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3577 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3578 emit_move_insn (to_rtx, temp);
3579 preserve_temp_slots (to_rtx);
3582 return want_value ? to_rtx : NULL_RTX;
3585 /* In case we are returning the contents of an object which overlaps
3586 the place the value is being stored, use a safe function when copying
3587 a value through a pointer into a structure value return block. */
3588 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3589 && current_function_returns_struct
3590 && !current_function_returns_pcc_struct)
3595 size = expr_size (from);
3596 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3597 EXPAND_MEMORY_USE_DONT);
3599 /* Copy the rights of the bitmap. */
3600 if (current_function_check_memory_usage)
3601 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3602 XEXP (to_rtx, 0), Pmode,
3603 XEXP (from_rtx, 0), Pmode,
3604 convert_to_mode (TYPE_MODE (sizetype),
3605 size, TREE_UNSIGNED (sizetype)),
3606 TYPE_MODE (sizetype));
3608 #ifdef TARGET_MEM_FUNCTIONS
3609 emit_library_call (memcpy_libfunc, 0,
3610 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3611 XEXP (from_rtx, 0), Pmode,
3612 convert_to_mode (TYPE_MODE (sizetype),
3613 size, TREE_UNSIGNED (sizetype)),
3614 TYPE_MODE (sizetype));
3616 emit_library_call (bcopy_libfunc, 0,
3617 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3618 XEXP (to_rtx, 0), Pmode,
3619 convert_to_mode (TYPE_MODE (integer_type_node),
3620 size, TREE_UNSIGNED (integer_type_node)),
3621 TYPE_MODE (integer_type_node));
3624 preserve_temp_slots (to_rtx);
3627 return want_value ? to_rtx : NULL_RTX;
3630 /* Compute FROM and store the value in the rtx we got. */
3633 result = store_expr (from, to_rtx, want_value);
3634 preserve_temp_slots (result);
3637 return want_value ? result : NULL_RTX;
3640 /* Generate code for computing expression EXP,
3641 and storing the value into TARGET.
3642 TARGET may contain a QUEUED rtx.
3644 If WANT_VALUE is nonzero, return a copy of the value
3645 not in TARGET, so that we can be sure to use the proper
3646 value in a containing expression even if TARGET has something
3647 else stored in it. If possible, we copy the value through a pseudo
3648 and return that pseudo. Or, if the value is constant, we try to
3649 return the constant. In some cases, we return a pseudo
3650 copied *from* TARGET.
3652 If the mode is BLKmode then we may return TARGET itself.
3653 It turns out that in BLKmode it doesn't cause a problem.
3654 because C has no operators that could combine two different
3655 assignments into the same BLKmode object with different values
3656 with no sequence point. Will other languages need this to
3659 If WANT_VALUE is 0, we return NULL, to make sure
3660 to catch quickly any cases where the caller uses the value
3661 and fails to set WANT_VALUE. */
3664 store_expr (exp, target, want_value)
3666 register rtx target;
3670 int dont_return_target = 0;
3672 if (TREE_CODE (exp) == COMPOUND_EXPR)
3674 /* Perform first part of compound expression, then assign from second
3676 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3678 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3680 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3682 /* For conditional expression, get safe form of the target. Then
3683 test the condition, doing the appropriate assignment on either
3684 side. This avoids the creation of unnecessary temporaries.
3685 For non-BLKmode, it is more efficient not to do this. */
3687 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3690 target = protect_from_queue (target, 1);
3692 do_pending_stack_adjust ();
3694 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3695 start_cleanup_deferral ();
3696 store_expr (TREE_OPERAND (exp, 1), target, 0);
3697 end_cleanup_deferral ();
3699 emit_jump_insn (gen_jump (lab2));
3702 start_cleanup_deferral ();
3703 store_expr (TREE_OPERAND (exp, 2), target, 0);
3704 end_cleanup_deferral ();
3709 return want_value ? target : NULL_RTX;
3711 else if (queued_subexp_p (target))
3712 /* If target contains a postincrement, let's not risk
3713 using it as the place to generate the rhs. */
3715 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3717 /* Expand EXP into a new pseudo. */
3718 temp = gen_reg_rtx (GET_MODE (target));
3719 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3722 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3724 /* If target is volatile, ANSI requires accessing the value
3725 *from* the target, if it is accessed. So make that happen.
3726 In no case return the target itself. */
3727 if (! MEM_VOLATILE_P (target) && want_value)
3728 dont_return_target = 1;
3730 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3731 && GET_MODE (target) != BLKmode)
3732 /* If target is in memory and caller wants value in a register instead,
3733 arrange that. Pass TARGET as target for expand_expr so that,
3734 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3735 We know expand_expr will not use the target in that case.
3736 Don't do this if TARGET is volatile because we are supposed
3737 to write it and then read it. */
3739 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3740 GET_MODE (target), 0);
3741 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3742 temp = copy_to_reg (temp);
3743 dont_return_target = 1;
3745 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3746 /* If this is an scalar in a register that is stored in a wider mode
3747 than the declared mode, compute the result into its declared mode
3748 and then convert to the wider mode. Our value is the computed
3751 /* If we don't want a value, we can do the conversion inside EXP,
3752 which will often result in some optimizations. Do the conversion
3753 in two steps: first change the signedness, if needed, then
3754 the extend. But don't do this if the type of EXP is a subtype
3755 of something else since then the conversion might involve
3756 more than just converting modes. */
3757 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3758 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3760 if (TREE_UNSIGNED (TREE_TYPE (exp))
3761 != SUBREG_PROMOTED_UNSIGNED_P (target))
3764 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3768 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3769 SUBREG_PROMOTED_UNSIGNED_P (target)),
3773 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3775 /* If TEMP is a volatile MEM and we want a result value, make
3776 the access now so it gets done only once. Likewise if
3777 it contains TARGET. */
3778 if (GET_CODE (temp) == MEM && want_value
3779 && (MEM_VOLATILE_P (temp)
3780 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3781 temp = copy_to_reg (temp);
3783 /* If TEMP is a VOIDmode constant, use convert_modes to make
3784 sure that we properly convert it. */
3785 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3786 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3787 TYPE_MODE (TREE_TYPE (exp)), temp,
3788 SUBREG_PROMOTED_UNSIGNED_P (target));
3790 convert_move (SUBREG_REG (target), temp,
3791 SUBREG_PROMOTED_UNSIGNED_P (target));
3792 return want_value ? temp : NULL_RTX;
3796 temp = expand_expr (exp, target, GET_MODE (target), 0);
3797 /* Return TARGET if it's a specified hardware register.
3798 If TARGET is a volatile mem ref, either return TARGET
3799 or return a reg copied *from* TARGET; ANSI requires this.
3801 Otherwise, if TEMP is not TARGET, return TEMP
3802 if it is constant (for efficiency),
3803 or if we really want the correct value. */
3804 if (!(target && GET_CODE (target) == REG
3805 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3806 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3807 && ! rtx_equal_p (temp, target)
3808 && (CONSTANT_P (temp) || want_value))
3809 dont_return_target = 1;
3812 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3813 the same as that of TARGET, adjust the constant. This is needed, for
3814 example, in case it is a CONST_DOUBLE and we want only a word-sized
3816 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3817 && TREE_CODE (exp) != ERROR_MARK
3818 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3819 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3820 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3822 if (current_function_check_memory_usage
3823 && GET_CODE (target) == MEM
3824 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3826 if (GET_CODE (temp) == MEM)
3827 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3828 XEXP (target, 0), Pmode,
3829 XEXP (temp, 0), Pmode,
3830 expr_size (exp), TYPE_MODE (sizetype));
3832 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3833 XEXP (target, 0), Pmode,
3834 expr_size (exp), TYPE_MODE (sizetype),
3835 GEN_INT (MEMORY_USE_WO),
3836 TYPE_MODE (integer_type_node));
3839 /* If value was not generated in the target, store it there.
3840 Convert the value to TARGET's type first if nec. */
3841 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3842 one or both of them are volatile memory refs, we have to distinguish
3844 - expand_expr has used TARGET. In this case, we must not generate
3845 another copy. This can be detected by TARGET being equal according
3847 - expand_expr has not used TARGET - that means that the source just
3848 happens to have the same RTX form. Since temp will have been created
3849 by expand_expr, it will compare unequal according to == .
3850 We must generate a copy in this case, to reach the correct number
3851 of volatile memory references. */
3853 if ((! rtx_equal_p (temp, target)
3854 || (temp != target && (side_effects_p (temp)
3855 || side_effects_p (target))))
3856 && TREE_CODE (exp) != ERROR_MARK)
3858 target = protect_from_queue (target, 1);
3859 if (GET_MODE (temp) != GET_MODE (target)
3860 && GET_MODE (temp) != VOIDmode)
3862 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3863 if (dont_return_target)
3865 /* In this case, we will return TEMP,
3866 so make sure it has the proper mode.
3867 But don't forget to store the value into TARGET. */
3868 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3869 emit_move_insn (target, temp);
3872 convert_move (target, temp, unsignedp);
3875 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3877 /* Handle copying a string constant into an array.
3878 The string constant may be shorter than the array.
3879 So copy just the string's actual length, and clear the rest. */
3883 /* Get the size of the data type of the string,
3884 which is actually the size of the target. */
3885 size = expr_size (exp);
3886 if (GET_CODE (size) == CONST_INT
3887 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3888 emit_block_move (target, temp, size,
3889 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3892 /* Compute the size of the data to copy from the string. */
3894 = size_binop (MIN_EXPR,
3895 make_tree (sizetype, size),
3897 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3898 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3902 /* Copy that much. */
3903 emit_block_move (target, temp, copy_size_rtx,
3904 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3906 /* Figure out how much is left in TARGET that we have to clear.
3907 Do all calculations in ptr_mode. */
3909 addr = XEXP (target, 0);
3910 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3912 if (GET_CODE (copy_size_rtx) == CONST_INT)
3914 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3915 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3919 addr = force_reg (ptr_mode, addr);
3920 addr = expand_binop (ptr_mode, add_optab, addr,
3921 copy_size_rtx, NULL_RTX, 0,
3924 size = expand_binop (ptr_mode, sub_optab, size,
3925 copy_size_rtx, NULL_RTX, 0,
3928 label = gen_label_rtx ();
3929 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3930 GET_MODE (size), 0, 0, label);
3933 if (size != const0_rtx)
3935 /* Be sure we can write on ADDR. */
3936 if (current_function_check_memory_usage)
3937 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3939 size, TYPE_MODE (sizetype),
3940 GEN_INT (MEMORY_USE_WO),
3941 TYPE_MODE (integer_type_node));
3942 #ifdef TARGET_MEM_FUNCTIONS
3943 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3945 const0_rtx, TYPE_MODE (integer_type_node),
3946 convert_to_mode (TYPE_MODE (sizetype),
3948 TREE_UNSIGNED (sizetype)),
3949 TYPE_MODE (sizetype));
3951 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3953 convert_to_mode (TYPE_MODE (integer_type_node),
3955 TREE_UNSIGNED (integer_type_node)),
3956 TYPE_MODE (integer_type_node));
3964 /* Handle calls that return values in multiple non-contiguous locations.
3965 The Irix 6 ABI has examples of this. */
3966 else if (GET_CODE (target) == PARALLEL)
3967 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3968 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3969 else if (GET_MODE (temp) == BLKmode)
3970 emit_block_move (target, temp, expr_size (exp),
3971 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3973 emit_move_insn (target, temp);
3976 /* If we don't want a value, return NULL_RTX. */
3980 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3981 ??? The latter test doesn't seem to make sense. */
3982 else if (dont_return_target && GET_CODE (temp) != MEM)
3985 /* Return TARGET itself if it is a hard register. */
3986 else if (want_value && GET_MODE (target) != BLKmode
3987 && ! (GET_CODE (target) == REG
3988 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3989 return copy_to_reg (target);
3995 /* Return 1 if EXP just contains zeros. */
4003 switch (TREE_CODE (exp))
4007 case NON_LVALUE_EXPR:
4008 return is_zeros_p (TREE_OPERAND (exp, 0));
4011 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
4015 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4018 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4021 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4022 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4023 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4024 if (! is_zeros_p (TREE_VALUE (elt)))
4034 /* Return 1 if EXP contains mostly (3/4) zeros. */
4037 mostly_zeros_p (exp)
4040 if (TREE_CODE (exp) == CONSTRUCTOR)
4042 int elts = 0, zeros = 0;
4043 tree elt = CONSTRUCTOR_ELTS (exp);
4044 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4046 /* If there are no ranges of true bits, it is all zero. */
4047 return elt == NULL_TREE;
4049 for (; elt; elt = TREE_CHAIN (elt))
4051 /* We do not handle the case where the index is a RANGE_EXPR,
4052 so the statistic will be somewhat inaccurate.
4053 We do make a more accurate count in store_constructor itself,
4054 so since this function is only used for nested array elements,
4055 this should be close enough. */
4056 if (mostly_zeros_p (TREE_VALUE (elt)))
4061 return 4 * zeros >= 3 * elts;
4064 return is_zeros_p (exp);
4067 /* Helper function for store_constructor.
4068 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4069 TYPE is the type of the CONSTRUCTOR, not the element type.
4070 CLEARED is as for store_constructor.
4072 This provides a recursive shortcut back to store_constructor when it isn't
4073 necessary to go through store_field. This is so that we can pass through
4074 the cleared field to let store_constructor know that we may not have to
4075 clear a substructure if the outer structure has already been cleared. */
4078 store_constructor_field (target, bitsize, bitpos,
4079 mode, exp, type, cleared)
4081 int bitsize, bitpos;
4082 enum machine_mode mode;
4086 if (TREE_CODE (exp) == CONSTRUCTOR
4087 && bitpos % BITS_PER_UNIT == 0
4088 /* If we have a non-zero bitpos for a register target, then we just
4089 let store_field do the bitfield handling. This is unlikely to
4090 generate unnecessary clear instructions anyways. */
4091 && (bitpos == 0 || GET_CODE (target) == MEM))
4094 target = change_address (target, VOIDmode,
4095 plus_constant (XEXP (target, 0),
4096 bitpos / BITS_PER_UNIT));
4097 store_constructor (exp, target, cleared);
4100 store_field (target, bitsize, bitpos, mode, exp,
4101 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
4102 int_size_in_bytes (type), 0);
4105 /* Store the value of constructor EXP into the rtx TARGET.
4106 TARGET is either a REG or a MEM.
4107 CLEARED is true if TARGET is known to have been zero'd. */
4110 store_constructor (exp, target, cleared)
4115 tree type = TREE_TYPE (exp);
4116 rtx exp_size = expr_size (exp);
4118 /* We know our target cannot conflict, since safe_from_p has been called. */
4120 /* Don't try copying piece by piece into a hard register
4121 since that is vulnerable to being clobbered by EXP.
4122 Instead, construct in a pseudo register and then copy it all. */
4123 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4125 rtx temp = gen_reg_rtx (GET_MODE (target));
4126 store_constructor (exp, temp, 0);
4127 emit_move_insn (target, temp);
4132 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4133 || TREE_CODE (type) == QUAL_UNION_TYPE)
4137 /* Inform later passes that the whole union value is dead. */
4138 if (TREE_CODE (type) == UNION_TYPE
4139 || TREE_CODE (type) == QUAL_UNION_TYPE)
4140 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4142 /* If we are building a static constructor into a register,
4143 set the initial value as zero so we can fold the value into
4144 a constant. But if more than one register is involved,
4145 this probably loses. */
4146 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4147 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4150 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4155 /* If the constructor has fewer fields than the structure
4156 or if we are initializing the structure to mostly zeros,
4157 clear the whole structure first. */
4158 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4159 != list_length (TYPE_FIELDS (type)))
4160 || mostly_zeros_p (exp))
4163 clear_storage (target, expr_size (exp),
4164 TYPE_ALIGN (type) / BITS_PER_UNIT);
4169 /* Inform later passes that the old value is dead. */
4170 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4172 /* Store each element of the constructor into
4173 the corresponding field of TARGET. */
4175 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4177 register tree field = TREE_PURPOSE (elt);
4178 tree value = TREE_VALUE (elt);
4179 register enum machine_mode mode;
4183 tree pos, constant = 0, offset = 0;
4184 rtx to_rtx = target;
4186 /* Just ignore missing fields.
4187 We cleared the whole structure, above,
4188 if any fields are missing. */
4192 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4195 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4196 unsignedp = TREE_UNSIGNED (field);
4197 mode = DECL_MODE (field);
4198 if (DECL_BIT_FIELD (field))
4201 pos = DECL_FIELD_BITPOS (field);
4202 if (TREE_CODE (pos) == INTEGER_CST)
4204 else if (TREE_CODE (pos) == PLUS_EXPR
4205 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4206 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4211 bitpos = TREE_INT_CST_LOW (constant);
4217 if (contains_placeholder_p (offset))
4218 offset = build (WITH_RECORD_EXPR, sizetype,
4219 offset, make_tree (TREE_TYPE (exp), target));
4221 offset = size_binop (FLOOR_DIV_EXPR, offset,
4222 size_int (BITS_PER_UNIT));
4224 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4225 if (GET_CODE (to_rtx) != MEM)
4228 if (GET_MODE (offset_rtx) != ptr_mode)
4230 #ifdef POINTERS_EXTEND_UNSIGNED
4231 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4233 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4238 = change_address (to_rtx, VOIDmode,
4239 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4240 force_reg (ptr_mode, offset_rtx)));
4242 if (TREE_READONLY (field))
4244 if (GET_CODE (to_rtx) == MEM)
4245 to_rtx = copy_rtx (to_rtx);
4247 RTX_UNCHANGING_P (to_rtx) = 1;
4250 #ifdef WORD_REGISTER_OPERATIONS
4251 /* If this initializes a field that is smaller than a word, at the
4252 start of a word, try to widen it to a full word.
4253 This special case allows us to output C++ member function
4254 initializations in a form that the optimizers can understand. */
4256 && GET_CODE (target) == REG
4257 && bitsize < BITS_PER_WORD
4258 && bitpos % BITS_PER_WORD == 0
4259 && GET_MODE_CLASS (mode) == MODE_INT
4260 && TREE_CODE (value) == INTEGER_CST
4261 && GET_CODE (exp_size) == CONST_INT
4262 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4264 tree type = TREE_TYPE (value);
4265 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4267 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4268 value = convert (type, value);
4270 if (BYTES_BIG_ENDIAN)
4272 = fold (build (LSHIFT_EXPR, type, value,
4273 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4274 bitsize = BITS_PER_WORD;
4278 store_constructor_field (to_rtx, bitsize, bitpos,
4279 mode, value, type, cleared);
4282 else if (TREE_CODE (type) == ARRAY_TYPE)
4287 tree domain = TYPE_DOMAIN (type);
4288 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4289 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4290 tree elttype = TREE_TYPE (type);
4292 /* If the constructor has fewer elements than the array,
4293 clear the whole array first. Similarly if this is
4294 static constructor of a non-BLKmode object. */
4295 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4299 HOST_WIDE_INT count = 0, zero_count = 0;
4301 /* This loop is a more accurate version of the loop in
4302 mostly_zeros_p (it handles RANGE_EXPR in an index).
4303 It is also needed to check for missing elements. */
4304 for (elt = CONSTRUCTOR_ELTS (exp);
4306 elt = TREE_CHAIN (elt))
4308 tree index = TREE_PURPOSE (elt);
4309 HOST_WIDE_INT this_node_count;
4310 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4312 tree lo_index = TREE_OPERAND (index, 0);
4313 tree hi_index = TREE_OPERAND (index, 1);
4314 if (TREE_CODE (lo_index) != INTEGER_CST
4315 || TREE_CODE (hi_index) != INTEGER_CST)
4320 this_node_count = TREE_INT_CST_LOW (hi_index)
4321 - TREE_INT_CST_LOW (lo_index) + 1;
4324 this_node_count = 1;
4325 count += this_node_count;
4326 if (mostly_zeros_p (TREE_VALUE (elt)))
4327 zero_count += this_node_count;
4329 /* Clear the entire array first if there are any missing elements,
4330 or if the incidence of zero elements is >= 75%. */
4331 if (count < maxelt - minelt + 1
4332 || 4 * zero_count >= 3 * count)
4338 clear_storage (target, expr_size (exp),
4339 TYPE_ALIGN (type) / BITS_PER_UNIT);
4343 /* Inform later passes that the old value is dead. */
4344 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4346 /* Store each element of the constructor into
4347 the corresponding element of TARGET, determined
4348 by counting the elements. */
4349 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4351 elt = TREE_CHAIN (elt), i++)
4353 register enum machine_mode mode;
4357 tree value = TREE_VALUE (elt);
4358 tree index = TREE_PURPOSE (elt);
4359 rtx xtarget = target;
4361 if (cleared && is_zeros_p (value))
4364 mode = TYPE_MODE (elttype);
4365 bitsize = GET_MODE_BITSIZE (mode);
4366 unsignedp = TREE_UNSIGNED (elttype);
4368 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4370 tree lo_index = TREE_OPERAND (index, 0);
4371 tree hi_index = TREE_OPERAND (index, 1);
4372 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4373 struct nesting *loop;
4374 HOST_WIDE_INT lo, hi, count;
4377 /* If the range is constant and "small", unroll the loop. */
4378 if (TREE_CODE (lo_index) == INTEGER_CST
4379 && TREE_CODE (hi_index) == INTEGER_CST
4380 && (lo = TREE_INT_CST_LOW (lo_index),
4381 hi = TREE_INT_CST_LOW (hi_index),
4382 count = hi - lo + 1,
4383 (GET_CODE (target) != MEM
4385 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4386 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4389 lo -= minelt; hi -= minelt;
4390 for (; lo <= hi; lo++)
4392 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4393 store_constructor_field (target, bitsize, bitpos,
4394 mode, value, type, cleared);
4399 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4400 loop_top = gen_label_rtx ();
4401 loop_end = gen_label_rtx ();
4403 unsignedp = TREE_UNSIGNED (domain);
4405 index = build_decl (VAR_DECL, NULL_TREE, domain);
4407 DECL_RTL (index) = index_r
4408 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4411 if (TREE_CODE (value) == SAVE_EXPR
4412 && SAVE_EXPR_RTL (value) == 0)
4414 /* Make sure value gets expanded once before the
4416 expand_expr (value, const0_rtx, VOIDmode, 0);
4419 store_expr (lo_index, index_r, 0);
4420 loop = expand_start_loop (0);
4422 /* Assign value to element index. */
4423 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4424 size_int (BITS_PER_UNIT));
4425 position = size_binop (MULT_EXPR,
4426 size_binop (MINUS_EXPR, index,
4427 TYPE_MIN_VALUE (domain)),
4429 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4430 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4431 xtarget = change_address (target, mode, addr);
4432 if (TREE_CODE (value) == CONSTRUCTOR)
4433 store_constructor (value, xtarget, cleared);
4435 store_expr (value, xtarget, 0);
4437 expand_exit_loop_if_false (loop,
4438 build (LT_EXPR, integer_type_node,
4441 expand_increment (build (PREINCREMENT_EXPR,
4443 index, integer_one_node), 0, 0);
4445 emit_label (loop_end);
4447 /* Needed by stupid register allocation. to extend the
4448 lifetime of pseudo-regs used by target past the end
4450 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4453 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4454 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4460 index = size_int (i);
4463 index = size_binop (MINUS_EXPR, index,
4464 TYPE_MIN_VALUE (domain));
4465 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4466 size_int (BITS_PER_UNIT));
4467 position = size_binop (MULT_EXPR, index, position);
4468 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4469 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4470 xtarget = change_address (target, mode, addr);
4471 store_expr (value, xtarget, 0);
4476 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4477 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4479 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4480 store_constructor_field (target, bitsize, bitpos,
4481 mode, value, type, cleared);
4485 /* set constructor assignments */
4486 else if (TREE_CODE (type) == SET_TYPE)
4488 tree elt = CONSTRUCTOR_ELTS (exp);
4489 int nbytes = int_size_in_bytes (type), nbits;
4490 tree domain = TYPE_DOMAIN (type);
4491 tree domain_min, domain_max, bitlength;
4493 /* The default implementation strategy is to extract the constant
4494 parts of the constructor, use that to initialize the target,
4495 and then "or" in whatever non-constant ranges we need in addition.
4497 If a large set is all zero or all ones, it is
4498 probably better to set it using memset (if available) or bzero.
4499 Also, if a large set has just a single range, it may also be
4500 better to first clear all the first clear the set (using
4501 bzero/memset), and set the bits we want. */
4503 /* Check for all zeros. */
4504 if (elt == NULL_TREE)
4507 clear_storage (target, expr_size (exp),
4508 TYPE_ALIGN (type) / BITS_PER_UNIT);
4512 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4513 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4514 bitlength = size_binop (PLUS_EXPR,
4515 size_binop (MINUS_EXPR, domain_max, domain_min),
4518 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4520 nbits = TREE_INT_CST_LOW (bitlength);
4522 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4523 are "complicated" (more than one range), initialize (the
4524 constant parts) by copying from a constant. */
4525 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4526 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4528 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4529 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4530 char *bit_buffer = (char *) alloca (nbits);
4531 HOST_WIDE_INT word = 0;
4534 int offset = 0; /* In bytes from beginning of set. */
4535 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4538 if (bit_buffer[ibit])
4540 if (BYTES_BIG_ENDIAN)
4541 word |= (1 << (set_word_size - 1 - bit_pos));
4543 word |= 1 << bit_pos;
4546 if (bit_pos >= set_word_size || ibit == nbits)
4548 if (word != 0 || ! cleared)
4550 rtx datum = GEN_INT (word);
4552 /* The assumption here is that it is safe to use
4553 XEXP if the set is multi-word, but not if
4554 it's single-word. */
4555 if (GET_CODE (target) == MEM)
4557 to_rtx = plus_constant (XEXP (target, 0), offset);
4558 to_rtx = change_address (target, mode, to_rtx);
4560 else if (offset == 0)
4564 emit_move_insn (to_rtx, datum);
4570 offset += set_word_size / BITS_PER_UNIT;
4576 /* Don't bother clearing storage if the set is all ones. */
4577 if (TREE_CHAIN (elt) != NULL_TREE
4578 || (TREE_PURPOSE (elt) == NULL_TREE
4580 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4581 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4582 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4583 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4585 clear_storage (target, expr_size (exp),
4586 TYPE_ALIGN (type) / BITS_PER_UNIT);
4589 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4591 /* start of range of element or NULL */
4592 tree startbit = TREE_PURPOSE (elt);
4593 /* end of range of element, or element value */
4594 tree endbit = TREE_VALUE (elt);
4595 #ifdef TARGET_MEM_FUNCTIONS
4596 HOST_WIDE_INT startb, endb;
4598 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4600 bitlength_rtx = expand_expr (bitlength,
4601 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4603 /* handle non-range tuple element like [ expr ] */
4604 if (startbit == NULL_TREE)
4606 startbit = save_expr (endbit);
4609 startbit = convert (sizetype, startbit);
4610 endbit = convert (sizetype, endbit);
4611 if (! integer_zerop (domain_min))
4613 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4614 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4616 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4617 EXPAND_CONST_ADDRESS);
4618 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4619 EXPAND_CONST_ADDRESS);
4623 targetx = assign_stack_temp (GET_MODE (target),
4624 GET_MODE_SIZE (GET_MODE (target)),
4626 emit_move_insn (targetx, target);
4628 else if (GET_CODE (target) == MEM)
4633 #ifdef TARGET_MEM_FUNCTIONS
4634 /* Optimization: If startbit and endbit are
4635 constants divisible by BITS_PER_UNIT,
4636 call memset instead. */
4637 if (TREE_CODE (startbit) == INTEGER_CST
4638 && TREE_CODE (endbit) == INTEGER_CST
4639 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4640 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4642 emit_library_call (memset_libfunc, 0,
4644 plus_constant (XEXP (targetx, 0),
4645 startb / BITS_PER_UNIT),
4647 constm1_rtx, TYPE_MODE (integer_type_node),
4648 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4649 TYPE_MODE (sizetype));
4654 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4655 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4656 bitlength_rtx, TYPE_MODE (sizetype),
4657 startbit_rtx, TYPE_MODE (sizetype),
4658 endbit_rtx, TYPE_MODE (sizetype));
4661 emit_move_insn (target, targetx);
4669 /* Store the value of EXP (an expression tree)
4670 into a subfield of TARGET which has mode MODE and occupies
4671 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4672 If MODE is VOIDmode, it means that we are storing into a bit-field.
4674 If VALUE_MODE is VOIDmode, return nothing in particular.
4675 UNSIGNEDP is not used in this case.
4677 Otherwise, return an rtx for the value stored. This rtx
4678 has mode VALUE_MODE if that is convenient to do.
4679 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4681 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4682 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4684 ALIAS_SET is the alias set for the destination. This value will
4685 (in general) be different from that for TARGET, since TARGET is a
4686 reference to the containing structure. */
4689 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4690 unsignedp, align, total_size, alias_set)
4692 int bitsize, bitpos;
4693 enum machine_mode mode;
4695 enum machine_mode value_mode;
4701 HOST_WIDE_INT width_mask = 0;
4703 if (TREE_CODE (exp) == ERROR_MARK)
4706 if (bitsize < HOST_BITS_PER_WIDE_INT)
4707 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4709 /* If we are storing into an unaligned field of an aligned union that is
4710 in a register, we may have the mode of TARGET being an integer mode but
4711 MODE == BLKmode. In that case, get an aligned object whose size and
4712 alignment are the same as TARGET and store TARGET into it (we can avoid
4713 the store if the field being stored is the entire width of TARGET). Then
4714 call ourselves recursively to store the field into a BLKmode version of
4715 that object. Finally, load from the object into TARGET. This is not
4716 very efficient in general, but should only be slightly more expensive
4717 than the otherwise-required unaligned accesses. Perhaps this can be
4718 cleaned up later. */
4721 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4723 rtx object = assign_stack_temp (GET_MODE (target),
4724 GET_MODE_SIZE (GET_MODE (target)), 0);
4725 rtx blk_object = copy_rtx (object);
4727 MEM_SET_IN_STRUCT_P (object, 1);
4728 MEM_SET_IN_STRUCT_P (blk_object, 1);
4729 PUT_MODE (blk_object, BLKmode);
4731 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4732 emit_move_insn (object, target);
4734 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4735 align, total_size, alias_set);
4737 /* Even though we aren't returning target, we need to
4738 give it the updated value. */
4739 emit_move_insn (target, object);
4744 /* If the structure is in a register or if the component
4745 is a bit field, we cannot use addressing to access it.
4746 Use bit-field techniques or SUBREG to store in it. */
4748 if (mode == VOIDmode
4749 || (mode != BLKmode && ! direct_store[(int) mode]
4750 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4751 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4752 || GET_CODE (target) == REG
4753 || GET_CODE (target) == SUBREG
4754 /* If the field isn't aligned enough to store as an ordinary memref,
4755 store it as a bit field. */
4756 || (SLOW_UNALIGNED_ACCESS
4757 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4758 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4760 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4762 /* If BITSIZE is narrower than the size of the type of EXP
4763 we will be narrowing TEMP. Normally, what's wanted are the
4764 low-order bits. However, if EXP's type is a record and this is
4765 big-endian machine, we want the upper BITSIZE bits. */
4766 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4767 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4768 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4769 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4770 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4774 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4776 if (mode != VOIDmode && mode != BLKmode
4777 && mode != TYPE_MODE (TREE_TYPE (exp)))
4778 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4780 /* If the modes of TARGET and TEMP are both BLKmode, both
4781 must be in memory and BITPOS must be aligned on a byte
4782 boundary. If so, we simply do a block copy. */
4783 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4785 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4786 || bitpos % BITS_PER_UNIT != 0)
4789 target = change_address (target, VOIDmode,
4790 plus_constant (XEXP (target, 0),
4791 bitpos / BITS_PER_UNIT));
4793 emit_block_move (target, temp,
4794 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4798 return value_mode == VOIDmode ? const0_rtx : target;
4801 /* Store the value in the bitfield. */
4802 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4803 if (value_mode != VOIDmode)
4805 /* The caller wants an rtx for the value. */
4806 /* If possible, avoid refetching from the bitfield itself. */
4808 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4811 enum machine_mode tmode;
4814 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4815 tmode = GET_MODE (temp);
4816 if (tmode == VOIDmode)
4818 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4819 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4820 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4822 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4823 NULL_RTX, value_mode, 0, align,
4830 rtx addr = XEXP (target, 0);
4833 /* If a value is wanted, it must be the lhs;
4834 so make the address stable for multiple use. */
4836 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4837 && ! CONSTANT_ADDRESS_P (addr)
4838 /* A frame-pointer reference is already stable. */
4839 && ! (GET_CODE (addr) == PLUS
4840 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4841 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4842 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4843 addr = copy_to_reg (addr);
4845 /* Now build a reference to just the desired component. */
4847 to_rtx = copy_rtx (change_address (target, mode,
4848 plus_constant (addr,
4850 / BITS_PER_UNIT))));
4851 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4852 MEM_ALIAS_SET (to_rtx) = alias_set;
4854 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4858 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4859 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4860 ARRAY_REFs and find the ultimate containing object, which we return.
4862 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4863 bit position, and *PUNSIGNEDP to the signedness of the field.
4864 If the position of the field is variable, we store a tree
4865 giving the variable offset (in units) in *POFFSET.
4866 This offset is in addition to the bit position.
4867 If the position is not variable, we store 0 in *POFFSET.
4868 We set *PALIGNMENT to the alignment in bytes of the address that will be
4869 computed. This is the alignment of the thing we return if *POFFSET
4870 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4872 If any of the extraction expressions is volatile,
4873 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4875 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4876 is a mode that can be used to access the field. In that case, *PBITSIZE
4879 If the field describes a variable-sized object, *PMODE is set to
4880 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4881 this case, but the address of the object can be found. */
4884 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4885 punsignedp, pvolatilep, palignment)
4890 enum machine_mode *pmode;
4895 tree orig_exp = exp;
4897 enum machine_mode mode = VOIDmode;
4898 tree offset = integer_zero_node;
4899 unsigned int alignment = BIGGEST_ALIGNMENT;
4901 if (TREE_CODE (exp) == COMPONENT_REF)
4903 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4904 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4905 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4906 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4908 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4910 size_tree = TREE_OPERAND (exp, 1);
4911 *punsignedp = TREE_UNSIGNED (exp);
4915 mode = TYPE_MODE (TREE_TYPE (exp));
4916 if (mode == BLKmode)
4917 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4919 *pbitsize = GET_MODE_BITSIZE (mode);
4920 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4925 if (TREE_CODE (size_tree) != INTEGER_CST)
4926 mode = BLKmode, *pbitsize = -1;
4928 *pbitsize = TREE_INT_CST_LOW (size_tree);
4931 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4932 and find the ultimate containing object. */
4938 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4940 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4941 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4942 : TREE_OPERAND (exp, 2));
4943 tree constant = integer_zero_node, var = pos;
4945 /* If this field hasn't been filled in yet, don't go
4946 past it. This should only happen when folding expressions
4947 made during type construction. */
4951 /* Assume here that the offset is a multiple of a unit.
4952 If not, there should be an explicitly added constant. */
4953 if (TREE_CODE (pos) == PLUS_EXPR
4954 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4955 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4956 else if (TREE_CODE (pos) == INTEGER_CST)
4957 constant = pos, var = integer_zero_node;
4959 *pbitpos += TREE_INT_CST_LOW (constant);
4960 offset = size_binop (PLUS_EXPR, offset,
4961 size_binop (EXACT_DIV_EXPR, var,
4962 size_int (BITS_PER_UNIT)));
4965 else if (TREE_CODE (exp) == ARRAY_REF)
4967 /* This code is based on the code in case ARRAY_REF in expand_expr
4968 below. We assume here that the size of an array element is
4969 always an integral multiple of BITS_PER_UNIT. */
4971 tree index = TREE_OPERAND (exp, 1);
4972 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4974 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4975 tree index_type = TREE_TYPE (index);
4978 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4980 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4982 index_type = TREE_TYPE (index);
4985 /* Optimize the special-case of a zero lower bound.
4987 We convert the low_bound to sizetype to avoid some problems
4988 with constant folding. (E.g. suppose the lower bound is 1,
4989 and its mode is QI. Without the conversion, (ARRAY
4990 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4991 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4993 But sizetype isn't quite right either (especially if
4994 the lowbound is negative). FIXME */
4996 if (! integer_zerop (low_bound))
4997 index = fold (build (MINUS_EXPR, index_type, index,
4998 convert (sizetype, low_bound)));
5000 if (TREE_CODE (index) == INTEGER_CST)
5002 index = convert (sbitsizetype, index);
5003 index_type = TREE_TYPE (index);
5006 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5007 convert (sbitsizetype,
5008 TYPE_SIZE (TREE_TYPE (exp)))));
5010 if (TREE_CODE (xindex) == INTEGER_CST
5011 && TREE_INT_CST_HIGH (xindex) == 0)
5012 *pbitpos += TREE_INT_CST_LOW (xindex);
5015 /* Either the bit offset calculated above is not constant, or
5016 it overflowed. In either case, redo the multiplication
5017 against the size in units. This is especially important
5018 in the non-constant case to avoid a division at runtime. */
5019 xindex = fold (build (MULT_EXPR, ssizetype, index,
5021 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5023 if (contains_placeholder_p (xindex))
5024 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5026 offset = size_binop (PLUS_EXPR, offset, xindex);
5029 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5030 && ! ((TREE_CODE (exp) == NOP_EXPR
5031 || TREE_CODE (exp) == CONVERT_EXPR)
5032 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
5033 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5035 && (TYPE_MODE (TREE_TYPE (exp))
5036 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5039 /* If any reference in the chain is volatile, the effect is volatile. */
5040 if (TREE_THIS_VOLATILE (exp))
5043 /* If the offset is non-constant already, then we can't assume any
5044 alignment more than the alignment here. */
5045 if (! integer_zerop (offset))
5046 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5048 exp = TREE_OPERAND (exp, 0);
5051 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5052 alignment = MIN (alignment, DECL_ALIGN (exp));
5053 else if (TREE_TYPE (exp) != 0)
5054 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5056 if (integer_zerop (offset))
5059 if (offset != 0 && contains_placeholder_p (offset))
5060 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5064 *palignment = alignment / BITS_PER_UNIT;
5068 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5069 static enum memory_use_mode
5070 get_memory_usage_from_modifier (modifier)
5071 enum expand_modifier modifier;
5077 return MEMORY_USE_RO;
5079 case EXPAND_MEMORY_USE_WO:
5080 return MEMORY_USE_WO;
5082 case EXPAND_MEMORY_USE_RW:
5083 return MEMORY_USE_RW;
5085 case EXPAND_MEMORY_USE_DONT:
5086 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5087 MEMORY_USE_DONT, because they are modifiers to a call of
5088 expand_expr in the ADDR_EXPR case of expand_expr. */
5089 case EXPAND_CONST_ADDRESS:
5090 case EXPAND_INITIALIZER:
5091 return MEMORY_USE_DONT;
5092 case EXPAND_MEMORY_USE_BAD:
5098 /* Given an rtx VALUE that may contain additions and multiplications,
5099 return an equivalent value that just refers to a register or memory.
5100 This is done by generating instructions to perform the arithmetic
5101 and returning a pseudo-register containing the value.
5103 The returned value may be a REG, SUBREG, MEM or constant. */
5106 force_operand (value, target)
5109 register optab binoptab = 0;
5110 /* Use a temporary to force order of execution of calls to
5114 /* Use subtarget as the target for operand 0 of a binary operation. */
5115 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5117 /* Check for a PIC address load. */
5119 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5120 && XEXP (value, 0) == pic_offset_table_rtx
5121 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5122 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5123 || GET_CODE (XEXP (value, 1)) == CONST))
5126 subtarget = gen_reg_rtx (GET_MODE (value));
5127 emit_move_insn (subtarget, value);
5131 if (GET_CODE (value) == PLUS)
5132 binoptab = add_optab;
5133 else if (GET_CODE (value) == MINUS)
5134 binoptab = sub_optab;
5135 else if (GET_CODE (value) == MULT)
5137 op2 = XEXP (value, 1);
5138 if (!CONSTANT_P (op2)
5139 && !(GET_CODE (op2) == REG && op2 != subtarget))
5141 tmp = force_operand (XEXP (value, 0), subtarget);
5142 return expand_mult (GET_MODE (value), tmp,
5143 force_operand (op2, NULL_RTX),
5149 op2 = XEXP (value, 1);
5150 if (!CONSTANT_P (op2)
5151 && !(GET_CODE (op2) == REG && op2 != subtarget))
5153 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5155 binoptab = add_optab;
5156 op2 = negate_rtx (GET_MODE (value), op2);
5159 /* Check for an addition with OP2 a constant integer and our first
5160 operand a PLUS of a virtual register and something else. In that
5161 case, we want to emit the sum of the virtual register and the
5162 constant first and then add the other value. This allows virtual
5163 register instantiation to simply modify the constant rather than
5164 creating another one around this addition. */
5165 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5166 && GET_CODE (XEXP (value, 0)) == PLUS
5167 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5168 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5169 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5171 rtx temp = expand_binop (GET_MODE (value), binoptab,
5172 XEXP (XEXP (value, 0), 0), op2,
5173 subtarget, 0, OPTAB_LIB_WIDEN);
5174 return expand_binop (GET_MODE (value), binoptab, temp,
5175 force_operand (XEXP (XEXP (value, 0), 1), 0),
5176 target, 0, OPTAB_LIB_WIDEN);
5179 tmp = force_operand (XEXP (value, 0), subtarget);
5180 return expand_binop (GET_MODE (value), binoptab, tmp,
5181 force_operand (op2, NULL_RTX),
5182 target, 0, OPTAB_LIB_WIDEN);
5183 /* We give UNSIGNEDP = 0 to expand_binop
5184 because the only operations we are expanding here are signed ones. */
5189 /* Subroutine of expand_expr:
5190 save the non-copied parts (LIST) of an expr (LHS), and return a list
5191 which can restore these values to their previous values,
5192 should something modify their storage. */
5195 save_noncopied_parts (lhs, list)
5202 for (tail = list; tail; tail = TREE_CHAIN (tail))
5203 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5204 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5207 tree part = TREE_VALUE (tail);
5208 tree part_type = TREE_TYPE (part);
5209 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5210 rtx target = assign_temp (part_type, 0, 1, 1);
5211 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5212 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5213 parts = tree_cons (to_be_saved,
5214 build (RTL_EXPR, part_type, NULL_TREE,
5217 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5222 /* Subroutine of expand_expr:
5223 record the non-copied parts (LIST) of an expr (LHS), and return a list
5224 which specifies the initial values of these parts. */
5227 init_noncopied_parts (lhs, list)
5234 for (tail = list; tail; tail = TREE_CHAIN (tail))
5235 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5236 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5237 else if (TREE_PURPOSE (tail))
5239 tree part = TREE_VALUE (tail);
5240 tree part_type = TREE_TYPE (part);
5241 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5242 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5247 /* Subroutine of expand_expr: return nonzero iff there is no way that
5248 EXP can reference X, which is being modified. TOP_P is nonzero if this
5249 call is going to be used to determine whether we need a temporary
5250 for EXP, as opposed to a recursive call to this function.
5252 It is always safe for this routine to return zero since it merely
5253 searches for optimization opportunities. */
5256 safe_from_p (x, exp, top_p)
5263 static int save_expr_count;
5264 static int save_expr_size = 0;
5265 static tree *save_expr_rewritten;
5266 static tree save_expr_trees[256];
5269 /* If EXP has varying size, we MUST use a target since we currently
5270 have no way of allocating temporaries of variable size
5271 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5272 So we assume here that something at a higher level has prevented a
5273 clash. This is somewhat bogus, but the best we can do. Only
5274 do this when X is BLKmode and when we are at the top level. */
5275 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5276 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5277 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5278 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5279 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5281 && GET_MODE (x) == BLKmode))
5284 if (top_p && save_expr_size == 0)
5288 save_expr_count = 0;
5289 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5290 save_expr_rewritten = &save_expr_trees[0];
5292 rtn = safe_from_p (x, exp, 1);
5294 for (i = 0; i < save_expr_count; ++i)
5296 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5298 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5306 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5307 find the underlying pseudo. */
5308 if (GET_CODE (x) == SUBREG)
5311 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5315 /* If X is a location in the outgoing argument area, it is always safe. */
5316 if (GET_CODE (x) == MEM
5317 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5318 || (GET_CODE (XEXP (x, 0)) == PLUS
5319 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5322 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5325 exp_rtl = DECL_RTL (exp);
5332 if (TREE_CODE (exp) == TREE_LIST)
5333 return ((TREE_VALUE (exp) == 0
5334 || safe_from_p (x, TREE_VALUE (exp), 0))
5335 && (TREE_CHAIN (exp) == 0
5336 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5337 else if (TREE_CODE (exp) == ERROR_MARK)
5338 return 1; /* An already-visited SAVE_EXPR? */
5343 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5347 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5348 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5352 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5353 the expression. If it is set, we conflict iff we are that rtx or
5354 both are in memory. Otherwise, we check all operands of the
5355 expression recursively. */
5357 switch (TREE_CODE (exp))
5360 return (staticp (TREE_OPERAND (exp, 0))
5361 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5362 || TREE_STATIC (exp));
5365 if (GET_CODE (x) == MEM)
5370 exp_rtl = CALL_EXPR_RTL (exp);
5373 /* Assume that the call will clobber all hard registers and
5375 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5376 || GET_CODE (x) == MEM)
5383 /* If a sequence exists, we would have to scan every instruction
5384 in the sequence to see if it was safe. This is probably not
5386 if (RTL_EXPR_SEQUENCE (exp))
5389 exp_rtl = RTL_EXPR_RTL (exp);
5392 case WITH_CLEANUP_EXPR:
5393 exp_rtl = RTL_EXPR_RTL (exp);
5396 case CLEANUP_POINT_EXPR:
5397 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5400 exp_rtl = SAVE_EXPR_RTL (exp);
5404 /* This SAVE_EXPR might appear many times in the top-level
5405 safe_from_p() expression, and if it has a complex
5406 subexpression, examining it multiple times could result
5407 in a combinatorial explosion. E.g. on an Alpha
5408 running at least 200MHz, a Fortran test case compiled with
5409 optimization took about 28 minutes to compile -- even though
5410 it was only a few lines long, and the complicated line causing
5411 so much time to be spent in the earlier version of safe_from_p()
5412 had only 293 or so unique nodes.
5414 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5415 where it is so we can turn it back in the top-level safe_from_p()
5418 /* For now, don't bother re-sizing the array. */
5419 if (save_expr_count >= save_expr_size)
5421 save_expr_rewritten[save_expr_count++] = exp;
5423 nops = tree_code_length[(int) SAVE_EXPR];
5424 for (i = 0; i < nops; i++)
5426 tree operand = TREE_OPERAND (exp, i);
5427 if (operand == NULL_TREE)
5429 TREE_SET_CODE (exp, ERROR_MARK);
5430 if (!safe_from_p (x, operand, 0))
5432 TREE_SET_CODE (exp, SAVE_EXPR);
5434 TREE_SET_CODE (exp, ERROR_MARK);
5438 /* The only operand we look at is operand 1. The rest aren't
5439 part of the expression. */
5440 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5442 case METHOD_CALL_EXPR:
5443 /* This takes a rtx argument, but shouldn't appear here. */
5450 /* If we have an rtx, we do not need to scan our operands. */
5454 nops = tree_code_length[(int) TREE_CODE (exp)];
5455 for (i = 0; i < nops; i++)
5456 if (TREE_OPERAND (exp, i) != 0
5457 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5461 /* If we have an rtl, find any enclosed object. Then see if we conflict
5465 if (GET_CODE (exp_rtl) == SUBREG)
5467 exp_rtl = SUBREG_REG (exp_rtl);
5468 if (GET_CODE (exp_rtl) == REG
5469 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5473 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5474 are memory and EXP is not readonly. */
5475 return ! (rtx_equal_p (x, exp_rtl)
5476 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5477 && ! TREE_READONLY (exp)));
5480 /* If we reach here, it is safe. */
5484 /* Subroutine of expand_expr: return nonzero iff EXP is an
5485 expression whose type is statically determinable. */
5491 if (TREE_CODE (exp) == PARM_DECL
5492 || TREE_CODE (exp) == VAR_DECL
5493 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5494 || TREE_CODE (exp) == COMPONENT_REF
5495 || TREE_CODE (exp) == ARRAY_REF)
5500 /* Subroutine of expand_expr: return rtx if EXP is a
5501 variable or parameter; else return 0. */
5508 switch (TREE_CODE (exp))
5512 return DECL_RTL (exp);
5518 #ifdef MAX_INTEGER_COMPUTATION_MODE
5520 check_max_integer_computation_mode (exp)
5523 enum tree_code code = TREE_CODE (exp);
5524 enum machine_mode mode;
5526 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5527 if (code == NOP_EXPR
5528 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5531 /* First check the type of the overall operation. We need only look at
5532 unary, binary and relational operations. */
5533 if (TREE_CODE_CLASS (code) == '1'
5534 || TREE_CODE_CLASS (code) == '2'
5535 || TREE_CODE_CLASS (code) == '<')
5537 mode = TYPE_MODE (TREE_TYPE (exp));
5538 if (GET_MODE_CLASS (mode) == MODE_INT
5539 && mode > MAX_INTEGER_COMPUTATION_MODE)
5540 fatal ("unsupported wide integer operation");
5543 /* Check operand of a unary op. */
5544 if (TREE_CODE_CLASS (code) == '1')
5546 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5547 if (GET_MODE_CLASS (mode) == MODE_INT
5548 && mode > MAX_INTEGER_COMPUTATION_MODE)
5549 fatal ("unsupported wide integer operation");
5552 /* Check operands of a binary/comparison op. */
5553 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5555 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5556 if (GET_MODE_CLASS (mode) == MODE_INT
5557 && mode > MAX_INTEGER_COMPUTATION_MODE)
5558 fatal ("unsupported wide integer operation");
5560 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5561 if (GET_MODE_CLASS (mode) == MODE_INT
5562 && mode > MAX_INTEGER_COMPUTATION_MODE)
5563 fatal ("unsupported wide integer operation");
5569 /* expand_expr: generate code for computing expression EXP.
5570 An rtx for the computed value is returned. The value is never null.
5571 In the case of a void EXP, const0_rtx is returned.
5573 The value may be stored in TARGET if TARGET is nonzero.
5574 TARGET is just a suggestion; callers must assume that
5575 the rtx returned may not be the same as TARGET.
5577 If TARGET is CONST0_RTX, it means that the value will be ignored.
5579 If TMODE is not VOIDmode, it suggests generating the
5580 result in mode TMODE. But this is done only when convenient.
5581 Otherwise, TMODE is ignored and the value generated in its natural mode.
5582 TMODE is just a suggestion; callers must assume that
5583 the rtx returned may not have mode TMODE.
5585 Note that TARGET may have neither TMODE nor MODE. In that case, it
5586 probably will not be used.
5588 If MODIFIER is EXPAND_SUM then when EXP is an addition
5589 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5590 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5591 products as above, or REG or MEM, or constant.
5592 Ordinarily in such cases we would output mul or add instructions
5593 and then return a pseudo reg containing the sum.
5595 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5596 it also marks a label as absolutely required (it can't be dead).
5597 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5598 This is used for outputting expressions used in initializers.
5600 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5601 with a constant address even if that address is not normally legitimate.
5602 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5605 expand_expr (exp, target, tmode, modifier)
5608 enum machine_mode tmode;
5609 enum expand_modifier modifier;
5611 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5612 This is static so it will be accessible to our recursive callees. */
5613 static tree placeholder_list = 0;
5614 register rtx op0, op1, temp;
5615 tree type = TREE_TYPE (exp);
5616 int unsignedp = TREE_UNSIGNED (type);
5617 register enum machine_mode mode;
5618 register enum tree_code code = TREE_CODE (exp);
5620 rtx subtarget, original_target;
5623 /* Used by check-memory-usage to make modifier read only. */
5624 enum expand_modifier ro_modifier;
5626 /* Handle ERROR_MARK before anybody tries to access its type. */
5627 if (TREE_CODE (exp) == ERROR_MARK)
5629 op0 = CONST0_RTX (tmode);
5635 mode = TYPE_MODE (type);
5636 /* Use subtarget as the target for operand 0 of a binary operation. */
5637 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5638 original_target = target;
5639 ignore = (target == const0_rtx
5640 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5641 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5642 || code == COND_EXPR)
5643 && TREE_CODE (type) == VOID_TYPE));
5645 /* Make a read-only version of the modifier. */
5646 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5647 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5648 ro_modifier = modifier;
5650 ro_modifier = EXPAND_NORMAL;
5652 /* Don't use hard regs as subtargets, because the combiner
5653 can only handle pseudo regs. */
5654 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5656 /* Avoid subtargets inside loops,
5657 since they hide some invariant expressions. */
5658 if (preserve_subexpressions_p ())
5661 /* If we are going to ignore this result, we need only do something
5662 if there is a side-effect somewhere in the expression. If there
5663 is, short-circuit the most common cases here. Note that we must
5664 not call expand_expr with anything but const0_rtx in case this
5665 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5669 if (! TREE_SIDE_EFFECTS (exp))
5672 /* Ensure we reference a volatile object even if value is ignored. */
5673 if (TREE_THIS_VOLATILE (exp)
5674 && TREE_CODE (exp) != FUNCTION_DECL
5675 && mode != VOIDmode && mode != BLKmode)
5677 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5678 if (GET_CODE (temp) == MEM)
5679 temp = copy_to_reg (temp);
5683 if (TREE_CODE_CLASS (code) == '1')
5684 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5685 VOIDmode, ro_modifier);
5686 else if (TREE_CODE_CLASS (code) == '2'
5687 || TREE_CODE_CLASS (code) == '<')
5689 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5690 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5693 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5694 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5695 /* If the second operand has no side effects, just evaluate
5697 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5698 VOIDmode, ro_modifier);
5703 #ifdef MAX_INTEGER_COMPUTATION_MODE
5705 && TREE_CODE (exp) != INTEGER_CST
5706 && TREE_CODE (exp) != PARM_DECL
5707 && TREE_CODE (exp) != ARRAY_REF
5708 && TREE_CODE (exp) != COMPONENT_REF
5709 && TREE_CODE (exp) != BIT_FIELD_REF
5710 && TREE_CODE (exp) != INDIRECT_REF
5711 && TREE_CODE (exp) != CALL_EXPR
5712 && TREE_CODE (exp) != VAR_DECL)
5714 enum machine_mode mode = GET_MODE (target);
5716 if (GET_MODE_CLASS (mode) == MODE_INT
5717 && mode > MAX_INTEGER_COMPUTATION_MODE)
5718 fatal ("unsupported wide integer operation");
5721 if (TREE_CODE (exp) != INTEGER_CST
5722 && TREE_CODE (exp) != PARM_DECL
5723 && TREE_CODE (exp) != ARRAY_REF
5724 && TREE_CODE (exp) != COMPONENT_REF
5725 && TREE_CODE (exp) != BIT_FIELD_REF
5726 && TREE_CODE (exp) != INDIRECT_REF
5727 && TREE_CODE (exp) != VAR_DECL
5728 && TREE_CODE (exp) != CALL_EXPR
5729 && GET_MODE_CLASS (tmode) == MODE_INT
5730 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5731 fatal ("unsupported wide integer operation");
5733 check_max_integer_computation_mode (exp);
5736 /* If will do cse, generate all results into pseudo registers
5737 since 1) that allows cse to find more things
5738 and 2) otherwise cse could produce an insn the machine
5741 if (! cse_not_expected && mode != BLKmode && target
5742 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5749 tree function = decl_function_context (exp);
5750 /* Handle using a label in a containing function. */
5751 if (function != current_function_decl
5752 && function != inline_function_decl && function != 0)
5754 struct function *p = find_function_data (function);
5755 /* Allocate in the memory associated with the function
5756 that the label is in. */
5757 push_obstacks (p->function_obstack,
5758 p->function_maybepermanent_obstack);
5760 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5767 if (modifier == EXPAND_INITIALIZER)
5768 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5772 temp = gen_rtx_MEM (FUNCTION_MODE,
5773 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5774 if (function != current_function_decl
5775 && function != inline_function_decl && function != 0)
5776 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5781 if (DECL_RTL (exp) == 0)
5783 error_with_decl (exp, "prior parameter's size depends on `%s'");
5784 return CONST0_RTX (mode);
5787 /* ... fall through ... */
5790 /* If a static var's type was incomplete when the decl was written,
5791 but the type is complete now, lay out the decl now. */
5792 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5793 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5795 push_obstacks_nochange ();
5796 end_temporary_allocation ();
5797 layout_decl (exp, 0);
5798 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5802 /* Although static-storage variables start off initialized, according to
5803 ANSI C, a memcpy could overwrite them with uninitialized values. So
5804 we check them too. This also lets us check for read-only variables
5805 accessed via a non-const declaration, in case it won't be detected
5806 any other way (e.g., in an embedded system or OS kernel without
5809 Aggregates are not checked here; they're handled elsewhere. */
5810 if (current_function_check_memory_usage && code == VAR_DECL
5811 && GET_CODE (DECL_RTL (exp)) == MEM
5812 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5814 enum memory_use_mode memory_usage;
5815 memory_usage = get_memory_usage_from_modifier (modifier);
5817 if (memory_usage != MEMORY_USE_DONT)
5818 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5819 XEXP (DECL_RTL (exp), 0), Pmode,
5820 GEN_INT (int_size_in_bytes (type)),
5821 TYPE_MODE (sizetype),
5822 GEN_INT (memory_usage),
5823 TYPE_MODE (integer_type_node));
5826 /* ... fall through ... */
5830 if (DECL_RTL (exp) == 0)
5833 /* Ensure variable marked as used even if it doesn't go through
5834 a parser. If it hasn't be used yet, write out an external
5836 if (! TREE_USED (exp))
5838 assemble_external (exp);
5839 TREE_USED (exp) = 1;
5842 /* Show we haven't gotten RTL for this yet. */
5845 /* Handle variables inherited from containing functions. */
5846 context = decl_function_context (exp);
5848 /* We treat inline_function_decl as an alias for the current function
5849 because that is the inline function whose vars, types, etc.
5850 are being merged into the current function.
5851 See expand_inline_function. */
5853 if (context != 0 && context != current_function_decl
5854 && context != inline_function_decl
5855 /* If var is static, we don't need a static chain to access it. */
5856 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5857 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5861 /* Mark as non-local and addressable. */
5862 DECL_NONLOCAL (exp) = 1;
5863 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5865 mark_addressable (exp);
5866 if (GET_CODE (DECL_RTL (exp)) != MEM)
5868 addr = XEXP (DECL_RTL (exp), 0);
5869 if (GET_CODE (addr) == MEM)
5870 addr = gen_rtx_MEM (Pmode,
5871 fix_lexical_addr (XEXP (addr, 0), exp));
5873 addr = fix_lexical_addr (addr, exp);
5874 temp = change_address (DECL_RTL (exp), mode, addr);
5877 /* This is the case of an array whose size is to be determined
5878 from its initializer, while the initializer is still being parsed.
5881 else if (GET_CODE (DECL_RTL (exp)) == MEM
5882 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5883 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5884 XEXP (DECL_RTL (exp), 0));
5886 /* If DECL_RTL is memory, we are in the normal case and either
5887 the address is not valid or it is not a register and -fforce-addr
5888 is specified, get the address into a register. */
5890 else if (GET_CODE (DECL_RTL (exp)) == MEM
5891 && modifier != EXPAND_CONST_ADDRESS
5892 && modifier != EXPAND_SUM
5893 && modifier != EXPAND_INITIALIZER
5894 && (! memory_address_p (DECL_MODE (exp),
5895 XEXP (DECL_RTL (exp), 0))
5897 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5898 temp = change_address (DECL_RTL (exp), VOIDmode,
5899 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5901 /* If we got something, return it. But first, set the alignment
5902 the address is a register. */
5905 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5906 mark_reg_pointer (XEXP (temp, 0),
5907 DECL_ALIGN (exp) / BITS_PER_UNIT);
5912 /* If the mode of DECL_RTL does not match that of the decl, it
5913 must be a promoted value. We return a SUBREG of the wanted mode,
5914 but mark it so that we know that it was already extended. */
5916 if (GET_CODE (DECL_RTL (exp)) == REG
5917 && GET_MODE (DECL_RTL (exp)) != mode)
5919 /* Get the signedness used for this variable. Ensure we get the
5920 same mode we got when the variable was declared. */
5921 if (GET_MODE (DECL_RTL (exp))
5922 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5925 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5926 SUBREG_PROMOTED_VAR_P (temp) = 1;
5927 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5931 return DECL_RTL (exp);
5934 return immed_double_const (TREE_INT_CST_LOW (exp),
5935 TREE_INT_CST_HIGH (exp),
5939 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5940 EXPAND_MEMORY_USE_BAD);
5943 /* If optimized, generate immediate CONST_DOUBLE
5944 which will be turned into memory by reload if necessary.
5946 We used to force a register so that loop.c could see it. But
5947 this does not allow gen_* patterns to perform optimizations with
5948 the constants. It also produces two insns in cases like "x = 1.0;".
5949 On most machines, floating-point constants are not permitted in
5950 many insns, so we'd end up copying it to a register in any case.
5952 Now, we do the copying in expand_binop, if appropriate. */
5953 return immed_real_const (exp);
5957 if (! TREE_CST_RTL (exp))
5958 output_constant_def (exp);
5960 /* TREE_CST_RTL probably contains a constant address.
5961 On RISC machines where a constant address isn't valid,
5962 make some insns to get that address into a register. */
5963 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5964 && modifier != EXPAND_CONST_ADDRESS
5965 && modifier != EXPAND_INITIALIZER
5966 && modifier != EXPAND_SUM
5967 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5969 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5970 return change_address (TREE_CST_RTL (exp), VOIDmode,
5971 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5972 return TREE_CST_RTL (exp);
5974 case EXPR_WITH_FILE_LOCATION:
5977 char *saved_input_filename = input_filename;
5978 int saved_lineno = lineno;
5979 input_filename = EXPR_WFL_FILENAME (exp);
5980 lineno = EXPR_WFL_LINENO (exp);
5981 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5982 emit_line_note (input_filename, lineno);
5983 /* Possibly avoid switching back and force here */
5984 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5985 input_filename = saved_input_filename;
5986 lineno = saved_lineno;
5991 context = decl_function_context (exp);
5993 /* If this SAVE_EXPR was at global context, assume we are an
5994 initialization function and move it into our context. */
5996 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5998 /* We treat inline_function_decl as an alias for the current function
5999 because that is the inline function whose vars, types, etc.
6000 are being merged into the current function.
6001 See expand_inline_function. */
6002 if (context == current_function_decl || context == inline_function_decl)
6005 /* If this is non-local, handle it. */
6008 /* The following call just exists to abort if the context is
6009 not of a containing function. */
6010 find_function_data (context);
6012 temp = SAVE_EXPR_RTL (exp);
6013 if (temp && GET_CODE (temp) == REG)
6015 put_var_into_stack (exp);
6016 temp = SAVE_EXPR_RTL (exp);
6018 if (temp == 0 || GET_CODE (temp) != MEM)
6020 return change_address (temp, mode,
6021 fix_lexical_addr (XEXP (temp, 0), exp));
6023 if (SAVE_EXPR_RTL (exp) == 0)
6025 if (mode == VOIDmode)
6028 temp = assign_temp (type, 3, 0, 0);
6030 SAVE_EXPR_RTL (exp) = temp;
6031 if (!optimize && GET_CODE (temp) == REG)
6032 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6035 /* If the mode of TEMP does not match that of the expression, it
6036 must be a promoted value. We pass store_expr a SUBREG of the
6037 wanted mode but mark it so that we know that it was already
6038 extended. Note that `unsignedp' was modified above in
6041 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6043 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6044 SUBREG_PROMOTED_VAR_P (temp) = 1;
6045 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6048 if (temp == const0_rtx)
6049 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6050 EXPAND_MEMORY_USE_BAD);
6052 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6054 TREE_USED (exp) = 1;
6057 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6058 must be a promoted value. We return a SUBREG of the wanted mode,
6059 but mark it so that we know that it was already extended. */
6061 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6062 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6064 /* Compute the signedness and make the proper SUBREG. */
6065 promote_mode (type, mode, &unsignedp, 0);
6066 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6067 SUBREG_PROMOTED_VAR_P (temp) = 1;
6068 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6072 return SAVE_EXPR_RTL (exp);
6077 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6078 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6082 case PLACEHOLDER_EXPR:
6084 tree placeholder_expr;
6086 /* If there is an object on the head of the placeholder list,
6087 see if some object in it of type TYPE or a pointer to it. For
6088 further information, see tree.def. */
6089 for (placeholder_expr = placeholder_list;
6090 placeholder_expr != 0;
6091 placeholder_expr = TREE_CHAIN (placeholder_expr))
6093 tree need_type = TYPE_MAIN_VARIANT (type);
6095 tree old_list = placeholder_list;
6098 /* Find the outermost reference that is of the type we want.
6099 If none, see if any object has a type that is a pointer to
6100 the type we want. */
6101 for (elt = TREE_PURPOSE (placeholder_expr);
6102 elt != 0 && object == 0;
6104 = ((TREE_CODE (elt) == COMPOUND_EXPR
6105 || TREE_CODE (elt) == COND_EXPR)
6106 ? TREE_OPERAND (elt, 1)
6107 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6108 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6109 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6110 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6111 ? TREE_OPERAND (elt, 0) : 0))
6112 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6115 for (elt = TREE_PURPOSE (placeholder_expr);
6116 elt != 0 && object == 0;
6118 = ((TREE_CODE (elt) == COMPOUND_EXPR
6119 || TREE_CODE (elt) == COND_EXPR)
6120 ? TREE_OPERAND (elt, 1)
6121 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6122 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6123 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6124 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6125 ? TREE_OPERAND (elt, 0) : 0))
6126 if (POINTER_TYPE_P (TREE_TYPE (elt))
6127 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6129 object = build1 (INDIRECT_REF, need_type, elt);
6133 /* Expand this object skipping the list entries before
6134 it was found in case it is also a PLACEHOLDER_EXPR.
6135 In that case, we want to translate it using subsequent
6137 placeholder_list = TREE_CHAIN (placeholder_expr);
6138 temp = expand_expr (object, original_target, tmode,
6140 placeholder_list = old_list;
6146 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6149 case WITH_RECORD_EXPR:
6150 /* Put the object on the placeholder list, expand our first operand,
6151 and pop the list. */
6152 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6154 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6155 tmode, ro_modifier);
6156 placeholder_list = TREE_CHAIN (placeholder_list);
6160 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6161 expand_goto (TREE_OPERAND (exp, 0));
6163 expand_computed_goto (TREE_OPERAND (exp, 0));
6167 expand_exit_loop_if_false (NULL_PTR,
6168 invert_truthvalue (TREE_OPERAND (exp, 0)));
6171 case LABELED_BLOCK_EXPR:
6172 if (LABELED_BLOCK_BODY (exp))
6173 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6174 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6177 case EXIT_BLOCK_EXPR:
6178 if (EXIT_BLOCK_RETURN (exp))
6179 sorry ("returned value in block_exit_expr");
6180 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6185 expand_start_loop (1);
6186 expand_expr_stmt (TREE_OPERAND (exp, 0));
6194 tree vars = TREE_OPERAND (exp, 0);
6195 int vars_need_expansion = 0;
6197 /* Need to open a binding contour here because
6198 if there are any cleanups they must be contained here. */
6199 expand_start_bindings (0);
6201 /* Mark the corresponding BLOCK for output in its proper place. */
6202 if (TREE_OPERAND (exp, 2) != 0
6203 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6204 insert_block (TREE_OPERAND (exp, 2));
6206 /* If VARS have not yet been expanded, expand them now. */
6209 if (DECL_RTL (vars) == 0)
6211 vars_need_expansion = 1;
6214 expand_decl_init (vars);
6215 vars = TREE_CHAIN (vars);
6218 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6220 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6226 if (RTL_EXPR_SEQUENCE (exp))
6228 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6230 emit_insns (RTL_EXPR_SEQUENCE (exp));
6231 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6233 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6234 free_temps_for_rtl_expr (exp);
6235 return RTL_EXPR_RTL (exp);
6238 /* If we don't need the result, just ensure we evaluate any
6243 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6244 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6245 EXPAND_MEMORY_USE_BAD);
6249 /* All elts simple constants => refer to a constant in memory. But
6250 if this is a non-BLKmode mode, let it store a field at a time
6251 since that should make a CONST_INT or CONST_DOUBLE when we
6252 fold. Likewise, if we have a target we can use, it is best to
6253 store directly into the target unless the type is large enough
6254 that memcpy will be used. If we are making an initializer and
6255 all operands are constant, put it in memory as well. */
6256 else if ((TREE_STATIC (exp)
6257 && ((mode == BLKmode
6258 && ! (target != 0 && safe_from_p (target, exp, 1)))
6259 || TREE_ADDRESSABLE (exp)
6260 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6261 && (!MOVE_BY_PIECES_P
6262 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6263 TYPE_ALIGN (type) / BITS_PER_UNIT))
6264 && ! mostly_zeros_p (exp))))
6265 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6267 rtx constructor = output_constant_def (exp);
6268 if (modifier != EXPAND_CONST_ADDRESS
6269 && modifier != EXPAND_INITIALIZER
6270 && modifier != EXPAND_SUM
6271 && (! memory_address_p (GET_MODE (constructor),
6272 XEXP (constructor, 0))
6274 && GET_CODE (XEXP (constructor, 0)) != REG)))
6275 constructor = change_address (constructor, VOIDmode,
6276 XEXP (constructor, 0));
6282 /* Handle calls that pass values in multiple non-contiguous
6283 locations. The Irix 6 ABI has examples of this. */
6284 if (target == 0 || ! safe_from_p (target, exp, 1)
6285 || GET_CODE (target) == PARALLEL)
6287 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6288 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6290 target = assign_temp (type, 0, 1, 1);
6293 if (TREE_READONLY (exp))
6295 if (GET_CODE (target) == MEM)
6296 target = copy_rtx (target);
6298 RTX_UNCHANGING_P (target) = 1;
6301 store_constructor (exp, target, 0);
6307 tree exp1 = TREE_OPERAND (exp, 0);
6310 tree string = string_constant (exp1, &index);
6313 /* Try to optimize reads from const strings. */
6315 && TREE_CODE (string) == STRING_CST
6316 && TREE_CODE (index) == INTEGER_CST
6317 && !TREE_INT_CST_HIGH (index)
6318 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6319 && GET_MODE_CLASS (mode) == MODE_INT
6320 && GET_MODE_SIZE (mode) == 1
6321 && modifier != EXPAND_MEMORY_USE_WO)
6322 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6324 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6325 op0 = memory_address (mode, op0);
6327 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6329 enum memory_use_mode memory_usage;
6330 memory_usage = get_memory_usage_from_modifier (modifier);
6332 if (memory_usage != MEMORY_USE_DONT)
6334 in_check_memory_usage = 1;
6335 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6337 GEN_INT (int_size_in_bytes (type)),
6338 TYPE_MODE (sizetype),
6339 GEN_INT (memory_usage),
6340 TYPE_MODE (integer_type_node));
6341 in_check_memory_usage = 0;
6345 temp = gen_rtx_MEM (mode, op0);
6347 if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
6348 || (TREE_CODE (exp1) == ADDR_EXPR
6349 && (exp2 = TREE_OPERAND (exp1, 0))
6350 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6351 MEM_SET_IN_STRUCT_P (temp, 1);
6353 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6354 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6356 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6357 here, because, in C and C++, the fact that a location is accessed
6358 through a pointer to const does not mean that the value there can
6359 never change. Languages where it can never change should
6360 also set TREE_STATIC. */
6361 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6366 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6370 tree array = TREE_OPERAND (exp, 0);
6371 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6372 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6373 tree index = TREE_OPERAND (exp, 1);
6374 tree index_type = TREE_TYPE (index);
6377 /* Optimize the special-case of a zero lower bound.
6379 We convert the low_bound to sizetype to avoid some problems
6380 with constant folding. (E.g. suppose the lower bound is 1,
6381 and its mode is QI. Without the conversion, (ARRAY
6382 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6383 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6385 But sizetype isn't quite right either (especially if
6386 the lowbound is negative). FIXME */
6388 if (! integer_zerop (low_bound))
6389 index = fold (build (MINUS_EXPR, index_type, index,
6390 convert (sizetype, low_bound)));
6392 /* Fold an expression like: "foo"[2].
6393 This is not done in fold so it won't happen inside &.
6394 Don't fold if this is for wide characters since it's too
6395 difficult to do correctly and this is a very rare case. */
6397 if (TREE_CODE (array) == STRING_CST
6398 && TREE_CODE (index) == INTEGER_CST
6399 && !TREE_INT_CST_HIGH (index)
6400 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6401 && GET_MODE_CLASS (mode) == MODE_INT
6402 && GET_MODE_SIZE (mode) == 1)
6403 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6405 /* If this is a constant index into a constant array,
6406 just get the value from the array. Handle both the cases when
6407 we have an explicit constructor and when our operand is a variable
6408 that was declared const. */
6410 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6412 if (TREE_CODE (index) == INTEGER_CST
6413 && TREE_INT_CST_HIGH (index) == 0)
6415 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6417 i = TREE_INT_CST_LOW (index);
6419 elem = TREE_CHAIN (elem);
6421 return expand_expr (fold (TREE_VALUE (elem)), target,
6422 tmode, ro_modifier);
6426 else if (optimize >= 1
6427 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6428 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6429 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6431 if (TREE_CODE (index) == INTEGER_CST)
6433 tree init = DECL_INITIAL (array);
6435 i = TREE_INT_CST_LOW (index);
6436 if (TREE_CODE (init) == CONSTRUCTOR)
6438 tree elem = CONSTRUCTOR_ELTS (init);
6441 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6442 elem = TREE_CHAIN (elem);
6444 return expand_expr (fold (TREE_VALUE (elem)), target,
6445 tmode, ro_modifier);
6447 else if (TREE_CODE (init) == STRING_CST
6448 && TREE_INT_CST_HIGH (index) == 0
6449 && (TREE_INT_CST_LOW (index)
6450 < TREE_STRING_LENGTH (init)))
6452 (TREE_STRING_POINTER
6453 (init)[TREE_INT_CST_LOW (index)]));
6458 /* ... fall through ... */
6462 /* If the operand is a CONSTRUCTOR, we can just extract the
6463 appropriate field if it is present. Don't do this if we have
6464 already written the data since we want to refer to that copy
6465 and varasm.c assumes that's what we'll do. */
6466 if (code != ARRAY_REF
6467 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6468 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6472 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6473 elt = TREE_CHAIN (elt))
6474 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6475 /* We can normally use the value of the field in the
6476 CONSTRUCTOR. However, if this is a bitfield in
6477 an integral mode that we can fit in a HOST_WIDE_INT,
6478 we must mask only the number of bits in the bitfield,
6479 since this is done implicitly by the constructor. If
6480 the bitfield does not meet either of those conditions,
6481 we can't do this optimization. */
6482 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6483 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6485 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6486 <= HOST_BITS_PER_WIDE_INT))))
6488 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6489 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6491 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6493 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6495 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6496 op0 = expand_and (op0, op1, target);
6500 enum machine_mode imode
6501 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6503 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6506 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6508 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6518 enum machine_mode mode1;
6524 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6525 &mode1, &unsignedp, &volatilep,
6528 /* If we got back the original object, something is wrong. Perhaps
6529 we are evaluating an expression too early. In any event, don't
6530 infinitely recurse. */
6534 /* If TEM's type is a union of variable size, pass TARGET to the inner
6535 computation, since it will need a temporary and TARGET is known
6536 to have to do. This occurs in unchecked conversion in Ada. */
6538 op0 = expand_expr (tem,
6539 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6540 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6542 ? target : NULL_RTX),
6544 modifier == EXPAND_INITIALIZER
6545 ? modifier : EXPAND_NORMAL);
6547 /* If this is a constant, put it into a register if it is a
6548 legitimate constant and memory if it isn't. */
6549 if (CONSTANT_P (op0))
6551 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6552 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6553 op0 = force_reg (mode, op0);
6555 op0 = validize_mem (force_const_mem (mode, op0));
6560 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6562 if (GET_CODE (op0) != MEM)
6565 if (GET_MODE (offset_rtx) != ptr_mode)
6567 #ifdef POINTERS_EXTEND_UNSIGNED
6568 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6570 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6574 /* A constant address in TO_RTX can have VOIDmode, we must not try
6575 to call force_reg for that case. Avoid that case. */
6576 if (GET_CODE (op0) == MEM
6577 && GET_MODE (op0) == BLKmode
6578 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6580 && (bitpos % bitsize) == 0
6581 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6582 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6584 rtx temp = change_address (op0, mode1,
6585 plus_constant (XEXP (op0, 0),
6588 if (GET_CODE (XEXP (temp, 0)) == REG)
6591 op0 = change_address (op0, mode1,
6592 force_reg (GET_MODE (XEXP (temp, 0)),
6598 op0 = change_address (op0, VOIDmode,
6599 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6600 force_reg (ptr_mode, offset_rtx)));
6603 /* Don't forget about volatility even if this is a bitfield. */
6604 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6606 op0 = copy_rtx (op0);
6607 MEM_VOLATILE_P (op0) = 1;
6610 /* Check the access. */
6611 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6613 enum memory_use_mode memory_usage;
6614 memory_usage = get_memory_usage_from_modifier (modifier);
6616 if (memory_usage != MEMORY_USE_DONT)
6621 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6622 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6624 /* Check the access right of the pointer. */
6625 if (size > BITS_PER_UNIT)
6626 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6628 GEN_INT (size / BITS_PER_UNIT),
6629 TYPE_MODE (sizetype),
6630 GEN_INT (memory_usage),
6631 TYPE_MODE (integer_type_node));
6635 /* In cases where an aligned union has an unaligned object
6636 as a field, we might be extracting a BLKmode value from
6637 an integer-mode (e.g., SImode) object. Handle this case
6638 by doing the extract into an object as wide as the field
6639 (which we know to be the width of a basic mode), then
6640 storing into memory, and changing the mode to BLKmode.
6641 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6642 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6643 if (mode1 == VOIDmode
6644 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6645 || (modifier != EXPAND_CONST_ADDRESS
6646 && modifier != EXPAND_INITIALIZER
6647 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6648 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6649 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6650 /* If the field isn't aligned enough to fetch as a memref,
6651 fetch it as a bit field. */
6652 || (SLOW_UNALIGNED_ACCESS
6653 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6654 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6656 enum machine_mode ext_mode = mode;
6658 if (ext_mode == BLKmode)
6659 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6661 if (ext_mode == BLKmode)
6663 /* In this case, BITPOS must start at a byte boundary and
6664 TARGET, if specified, must be a MEM. */
6665 if (GET_CODE (op0) != MEM
6666 || (target != 0 && GET_CODE (target) != MEM)
6667 || bitpos % BITS_PER_UNIT != 0)
6670 op0 = change_address (op0, VOIDmode,
6671 plus_constant (XEXP (op0, 0),
6672 bitpos / BITS_PER_UNIT));
6674 target = assign_temp (type, 0, 1, 1);
6676 emit_block_move (target, op0,
6677 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6684 op0 = validize_mem (op0);
6686 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6687 mark_reg_pointer (XEXP (op0, 0), alignment);
6689 op0 = extract_bit_field (op0, bitsize, bitpos,
6690 unsignedp, target, ext_mode, ext_mode,
6692 int_size_in_bytes (TREE_TYPE (tem)));
6694 /* If the result is a record type and BITSIZE is narrower than
6695 the mode of OP0, an integral mode, and this is a big endian
6696 machine, we must put the field into the high-order bits. */
6697 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6698 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6699 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6700 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6701 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6705 if (mode == BLKmode)
6707 rtx new = assign_stack_temp (ext_mode,
6708 bitsize / BITS_PER_UNIT, 0);
6710 emit_move_insn (new, op0);
6711 op0 = copy_rtx (new);
6712 PUT_MODE (op0, BLKmode);
6713 MEM_SET_IN_STRUCT_P (op0, 1);
6719 /* If the result is BLKmode, use that to access the object
6721 if (mode == BLKmode)
6724 /* Get a reference to just this component. */
6725 if (modifier == EXPAND_CONST_ADDRESS
6726 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6727 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6728 (bitpos / BITS_PER_UNIT)));
6730 op0 = change_address (op0, mode1,
6731 plus_constant (XEXP (op0, 0),
6732 (bitpos / BITS_PER_UNIT)));
6734 if (GET_CODE (op0) == MEM)
6735 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6737 if (GET_CODE (XEXP (op0, 0)) == REG)
6738 mark_reg_pointer (XEXP (op0, 0), alignment);
6740 MEM_SET_IN_STRUCT_P (op0, 1);
6741 MEM_VOLATILE_P (op0) |= volatilep;
6742 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6743 || modifier == EXPAND_CONST_ADDRESS
6744 || modifier == EXPAND_INITIALIZER)
6746 else if (target == 0)
6747 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6749 convert_move (target, op0, unsignedp);
6753 /* Intended for a reference to a buffer of a file-object in Pascal.
6754 But it's not certain that a special tree code will really be
6755 necessary for these. INDIRECT_REF might work for them. */
6761 /* Pascal set IN expression.
6764 rlo = set_low - (set_low%bits_per_word);
6765 the_word = set [ (index - rlo)/bits_per_word ];
6766 bit_index = index % bits_per_word;
6767 bitmask = 1 << bit_index;
6768 return !!(the_word & bitmask); */
6770 tree set = TREE_OPERAND (exp, 0);
6771 tree index = TREE_OPERAND (exp, 1);
6772 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6773 tree set_type = TREE_TYPE (set);
6774 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6775 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6776 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6777 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6778 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6779 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6780 rtx setaddr = XEXP (setval, 0);
6781 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6783 rtx diff, quo, rem, addr, bit, result;
6785 preexpand_calls (exp);
6787 /* If domain is empty, answer is no. Likewise if index is constant
6788 and out of bounds. */
6789 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6790 && TREE_CODE (set_low_bound) == INTEGER_CST
6791 && tree_int_cst_lt (set_high_bound, set_low_bound))
6792 || (TREE_CODE (index) == INTEGER_CST
6793 && TREE_CODE (set_low_bound) == INTEGER_CST
6794 && tree_int_cst_lt (index, set_low_bound))
6795 || (TREE_CODE (set_high_bound) == INTEGER_CST
6796 && TREE_CODE (index) == INTEGER_CST
6797 && tree_int_cst_lt (set_high_bound, index))))
6801 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6803 /* If we get here, we have to generate the code for both cases
6804 (in range and out of range). */
6806 op0 = gen_label_rtx ();
6807 op1 = gen_label_rtx ();
6809 if (! (GET_CODE (index_val) == CONST_INT
6810 && GET_CODE (lo_r) == CONST_INT))
6812 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6813 GET_MODE (index_val), iunsignedp, 0, op1);
6816 if (! (GET_CODE (index_val) == CONST_INT
6817 && GET_CODE (hi_r) == CONST_INT))
6819 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6820 GET_MODE (index_val), iunsignedp, 0, op1);
6823 /* Calculate the element number of bit zero in the first word
6825 if (GET_CODE (lo_r) == CONST_INT)
6826 rlow = GEN_INT (INTVAL (lo_r)
6827 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6829 rlow = expand_binop (index_mode, and_optab, lo_r,
6830 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6831 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6833 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6834 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6836 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6837 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6838 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6839 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6841 addr = memory_address (byte_mode,
6842 expand_binop (index_mode, add_optab, diff,
6843 setaddr, NULL_RTX, iunsignedp,
6846 /* Extract the bit we want to examine */
6847 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6848 gen_rtx_MEM (byte_mode, addr),
6849 make_tree (TREE_TYPE (index), rem),
6851 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6852 GET_MODE (target) == byte_mode ? target : 0,
6853 1, OPTAB_LIB_WIDEN);
6855 if (result != target)
6856 convert_move (target, result, 1);
6858 /* Output the code to handle the out-of-range case. */
6861 emit_move_insn (target, const0_rtx);
6866 case WITH_CLEANUP_EXPR:
6867 if (RTL_EXPR_RTL (exp) == 0)
6870 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6871 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6873 /* That's it for this cleanup. */
6874 TREE_OPERAND (exp, 2) = 0;
6876 return RTL_EXPR_RTL (exp);
6878 case CLEANUP_POINT_EXPR:
6880 /* Start a new binding layer that will keep track of all cleanup
6881 actions to be performed. */
6882 expand_start_bindings (0);
6884 target_temp_slot_level = temp_slot_level;
6886 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6887 /* If we're going to use this value, load it up now. */
6889 op0 = force_not_mem (op0);
6890 preserve_temp_slots (op0);
6891 expand_end_bindings (NULL_TREE, 0, 0);
6896 /* Check for a built-in function. */
6897 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6898 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6900 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6901 return expand_builtin (exp, target, subtarget, tmode, ignore);
6903 /* If this call was expanded already by preexpand_calls,
6904 just return the result we got. */
6905 if (CALL_EXPR_RTL (exp) != 0)
6906 return CALL_EXPR_RTL (exp);
6908 return expand_call (exp, target, ignore);
6910 case NON_LVALUE_EXPR:
6913 case REFERENCE_EXPR:
6914 if (TREE_CODE (type) == UNION_TYPE)
6916 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6919 if (mode != BLKmode)
6920 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6922 target = assign_temp (type, 0, 1, 1);
6925 if (GET_CODE (target) == MEM)
6926 /* Store data into beginning of memory target. */
6927 store_expr (TREE_OPERAND (exp, 0),
6928 change_address (target, TYPE_MODE (valtype), 0), 0);
6930 else if (GET_CODE (target) == REG)
6931 /* Store this field into a union of the proper type. */
6932 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6933 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6935 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6940 /* Return the entire union. */
6944 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6946 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6949 /* If the signedness of the conversion differs and OP0 is
6950 a promoted SUBREG, clear that indication since we now
6951 have to do the proper extension. */
6952 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6953 && GET_CODE (op0) == SUBREG)
6954 SUBREG_PROMOTED_VAR_P (op0) = 0;
6959 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6960 if (GET_MODE (op0) == mode)
6963 /* If OP0 is a constant, just convert it into the proper mode. */
6964 if (CONSTANT_P (op0))
6966 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6967 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6969 if (modifier == EXPAND_INITIALIZER)
6970 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6974 convert_to_mode (mode, op0,
6975 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6977 convert_move (target, op0,
6978 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6982 /* We come here from MINUS_EXPR when the second operand is a
6985 this_optab = add_optab;
6987 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6988 something else, make sure we add the register to the constant and
6989 then to the other thing. This case can occur during strength
6990 reduction and doing it this way will produce better code if the
6991 frame pointer or argument pointer is eliminated.
6993 fold-const.c will ensure that the constant is always in the inner
6994 PLUS_EXPR, so the only case we need to do anything about is if
6995 sp, ap, or fp is our second argument, in which case we must swap
6996 the innermost first argument and our second argument. */
6998 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6999 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7000 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7001 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7002 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7003 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7005 tree t = TREE_OPERAND (exp, 1);
7007 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7008 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7011 /* If the result is to be ptr_mode and we are adding an integer to
7012 something, we might be forming a constant. So try to use
7013 plus_constant. If it produces a sum and we can't accept it,
7014 use force_operand. This allows P = &ARR[const] to generate
7015 efficient code on machines where a SYMBOL_REF is not a valid
7018 If this is an EXPAND_SUM call, always return the sum. */
7019 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7020 || mode == ptr_mode)
7022 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7023 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7024 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7026 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7028 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
7029 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7030 op1 = force_operand (op1, target);
7034 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7035 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7036 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7038 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7040 if (! CONSTANT_P (op0))
7042 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7043 VOIDmode, modifier);
7044 /* Don't go to both_summands if modifier
7045 says it's not right to return a PLUS. */
7046 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7050 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
7051 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7052 op0 = force_operand (op0, target);
7057 /* No sense saving up arithmetic to be done
7058 if it's all in the wrong mode to form part of an address.
7059 And force_operand won't know whether to sign-extend or
7061 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7062 || mode != ptr_mode)
7065 preexpand_calls (exp);
7066 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7069 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7070 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7073 /* Make sure any term that's a sum with a constant comes last. */
7074 if (GET_CODE (op0) == PLUS
7075 && CONSTANT_P (XEXP (op0, 1)))
7081 /* If adding to a sum including a constant,
7082 associate it to put the constant outside. */
7083 if (GET_CODE (op1) == PLUS
7084 && CONSTANT_P (XEXP (op1, 1)))
7086 rtx constant_term = const0_rtx;
7088 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7091 /* Ensure that MULT comes first if there is one. */
7092 else if (GET_CODE (op0) == MULT)
7093 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7095 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7097 /* Let's also eliminate constants from op0 if possible. */
7098 op0 = eliminate_constant_term (op0, &constant_term);
7100 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7101 their sum should be a constant. Form it into OP1, since the
7102 result we want will then be OP0 + OP1. */
7104 temp = simplify_binary_operation (PLUS, mode, constant_term,
7109 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7112 /* Put a constant term last and put a multiplication first. */
7113 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7114 temp = op1, op1 = op0, op0 = temp;
7116 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7117 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7120 /* For initializers, we are allowed to return a MINUS of two
7121 symbolic constants. Here we handle all cases when both operands
7123 /* Handle difference of two symbolic constants,
7124 for the sake of an initializer. */
7125 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7126 && really_constant_p (TREE_OPERAND (exp, 0))
7127 && really_constant_p (TREE_OPERAND (exp, 1)))
7129 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7130 VOIDmode, ro_modifier);
7131 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7132 VOIDmode, ro_modifier);
7134 /* If the last operand is a CONST_INT, use plus_constant of
7135 the negated constant. Else make the MINUS. */
7136 if (GET_CODE (op1) == CONST_INT)
7137 return plus_constant (op0, - INTVAL (op1));
7139 return gen_rtx_MINUS (mode, op0, op1);
7141 /* Convert A - const to A + (-const). */
7142 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7144 tree negated = fold (build1 (NEGATE_EXPR, type,
7145 TREE_OPERAND (exp, 1)));
7147 /* Deal with the case where we can't negate the constant
7149 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7151 tree newtype = signed_type (type);
7152 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7153 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7154 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7156 if (! TREE_OVERFLOW (newneg))
7157 return expand_expr (convert (type,
7158 build (PLUS_EXPR, newtype,
7160 target, tmode, ro_modifier);
7164 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7168 this_optab = sub_optab;
7172 preexpand_calls (exp);
7173 /* If first operand is constant, swap them.
7174 Thus the following special case checks need only
7175 check the second operand. */
7176 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7178 register tree t1 = TREE_OPERAND (exp, 0);
7179 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7180 TREE_OPERAND (exp, 1) = t1;
7183 /* Attempt to return something suitable for generating an
7184 indexed address, for machines that support that. */
7186 if (modifier == EXPAND_SUM && mode == ptr_mode
7187 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7188 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7190 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7193 /* Apply distributive law if OP0 is x+c. */
7194 if (GET_CODE (op0) == PLUS
7195 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7196 return gen_rtx_PLUS (mode,
7197 gen_rtx_MULT (mode, XEXP (op0, 0),
7198 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7199 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7200 * INTVAL (XEXP (op0, 1))));
7202 if (GET_CODE (op0) != REG)
7203 op0 = force_operand (op0, NULL_RTX);
7204 if (GET_CODE (op0) != REG)
7205 op0 = copy_to_mode_reg (mode, op0);
7207 return gen_rtx_MULT (mode, op0,
7208 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7211 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7214 /* Check for multiplying things that have been extended
7215 from a narrower type. If this machine supports multiplying
7216 in that narrower type with a result in the desired type,
7217 do it that way, and avoid the explicit type-conversion. */
7218 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7219 && TREE_CODE (type) == INTEGER_TYPE
7220 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7221 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7222 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7223 && int_fits_type_p (TREE_OPERAND (exp, 1),
7224 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7225 /* Don't use a widening multiply if a shift will do. */
7226 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7227 > HOST_BITS_PER_WIDE_INT)
7228 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7230 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7231 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7233 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7234 /* If both operands are extended, they must either both
7235 be zero-extended or both be sign-extended. */
7236 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7238 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7240 enum machine_mode innermode
7241 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7242 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7243 ? smul_widen_optab : umul_widen_optab);
7244 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7245 ? umul_widen_optab : smul_widen_optab);
7246 if (mode == GET_MODE_WIDER_MODE (innermode))
7248 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7250 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7251 NULL_RTX, VOIDmode, 0);
7252 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7253 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7256 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7257 NULL_RTX, VOIDmode, 0);
7260 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7261 && innermode == word_mode)
7264 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7265 NULL_RTX, VOIDmode, 0);
7266 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7267 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7270 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7271 NULL_RTX, VOIDmode, 0);
7272 temp = expand_binop (mode, other_optab, op0, op1, target,
7273 unsignedp, OPTAB_LIB_WIDEN);
7274 htem = expand_mult_highpart_adjust (innermode,
7275 gen_highpart (innermode, temp),
7277 gen_highpart (innermode, temp),
7279 emit_move_insn (gen_highpart (innermode, temp), htem);
7284 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7285 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7286 return expand_mult (mode, op0, op1, target, unsignedp);
7288 case TRUNC_DIV_EXPR:
7289 case FLOOR_DIV_EXPR:
7291 case ROUND_DIV_EXPR:
7292 case EXACT_DIV_EXPR:
7293 preexpand_calls (exp);
7294 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7296 /* Possible optimization: compute the dividend with EXPAND_SUM
7297 then if the divisor is constant can optimize the case
7298 where some terms of the dividend have coeffs divisible by it. */
7299 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7300 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7301 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7304 this_optab = flodiv_optab;
7307 case TRUNC_MOD_EXPR:
7308 case FLOOR_MOD_EXPR:
7310 case ROUND_MOD_EXPR:
7311 preexpand_calls (exp);
7312 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7314 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7315 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7316 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7318 case FIX_ROUND_EXPR:
7319 case FIX_FLOOR_EXPR:
7321 abort (); /* Not used for C. */
7323 case FIX_TRUNC_EXPR:
7324 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7326 target = gen_reg_rtx (mode);
7327 expand_fix (target, op0, unsignedp);
7331 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7333 target = gen_reg_rtx (mode);
7334 /* expand_float can't figure out what to do if FROM has VOIDmode.
7335 So give it the correct mode. With -O, cse will optimize this. */
7336 if (GET_MODE (op0) == VOIDmode)
7337 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7339 expand_float (target, op0,
7340 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7344 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7345 temp = expand_unop (mode, neg_optab, op0, target, 0);
7351 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7353 /* Handle complex values specially. */
7354 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7355 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7356 return expand_complex_abs (mode, op0, target, unsignedp);
7358 /* Unsigned abs is simply the operand. Testing here means we don't
7359 risk generating incorrect code below. */
7360 if (TREE_UNSIGNED (type))
7363 return expand_abs (mode, op0, target,
7364 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7368 target = original_target;
7369 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7370 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7371 || GET_MODE (target) != mode
7372 || (GET_CODE (target) == REG
7373 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7374 target = gen_reg_rtx (mode);
7375 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7376 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7378 /* First try to do it with a special MIN or MAX instruction.
7379 If that does not win, use a conditional jump to select the proper
7381 this_optab = (TREE_UNSIGNED (type)
7382 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7383 : (code == MIN_EXPR ? smin_optab : smax_optab));
7385 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7390 /* At this point, a MEM target is no longer useful; we will get better
7393 if (GET_CODE (target) == MEM)
7394 target = gen_reg_rtx (mode);
7397 emit_move_insn (target, op0);
7399 op0 = gen_label_rtx ();
7401 /* If this mode is an integer too wide to compare properly,
7402 compare word by word. Rely on cse to optimize constant cases. */
7403 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7405 if (code == MAX_EXPR)
7406 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7407 target, op1, NULL_RTX, op0);
7409 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7410 op1, target, NULL_RTX, op0);
7411 emit_move_insn (target, op1);
7415 if (code == MAX_EXPR)
7416 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7417 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7418 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7420 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7421 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7422 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7423 if (temp == const0_rtx)
7424 emit_move_insn (target, op1);
7425 else if (temp != const_true_rtx)
7427 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7428 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7431 emit_move_insn (target, op1);
7438 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7439 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7445 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7446 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7451 /* ??? Can optimize bitwise operations with one arg constant.
7452 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7453 and (a bitwise1 b) bitwise2 b (etc)
7454 but that is probably not worth while. */
7456 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7457 boolean values when we want in all cases to compute both of them. In
7458 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7459 as actual zero-or-1 values and then bitwise anding. In cases where
7460 there cannot be any side effects, better code would be made by
7461 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7462 how to recognize those cases. */
7464 case TRUTH_AND_EXPR:
7466 this_optab = and_optab;
7471 this_optab = ior_optab;
7474 case TRUTH_XOR_EXPR:
7476 this_optab = xor_optab;
7483 preexpand_calls (exp);
7484 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7486 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7487 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7490 /* Could determine the answer when only additive constants differ. Also,
7491 the addition of one can be handled by changing the condition. */
7498 preexpand_calls (exp);
7499 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7503 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7504 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7506 && GET_CODE (original_target) == REG
7507 && (GET_MODE (original_target)
7508 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7510 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7513 if (temp != original_target)
7514 temp = copy_to_reg (temp);
7516 op1 = gen_label_rtx ();
7517 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7518 GET_MODE (temp), unsignedp, 0, op1);
7519 emit_move_insn (temp, const1_rtx);
7524 /* If no set-flag instruction, must generate a conditional
7525 store into a temporary variable. Drop through
7526 and handle this like && and ||. */
7528 case TRUTH_ANDIF_EXPR:
7529 case TRUTH_ORIF_EXPR:
7531 && (target == 0 || ! safe_from_p (target, exp, 1)
7532 /* Make sure we don't have a hard reg (such as function's return
7533 value) live across basic blocks, if not optimizing. */
7534 || (!optimize && GET_CODE (target) == REG
7535 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7536 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7539 emit_clr_insn (target);
7541 op1 = gen_label_rtx ();
7542 jumpifnot (exp, op1);
7545 emit_0_to_1_insn (target);
7548 return ignore ? const0_rtx : target;
7550 case TRUTH_NOT_EXPR:
7551 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7552 /* The parser is careful to generate TRUTH_NOT_EXPR
7553 only with operands that are always zero or one. */
7554 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7555 target, 1, OPTAB_LIB_WIDEN);
7561 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7563 return expand_expr (TREE_OPERAND (exp, 1),
7564 (ignore ? const0_rtx : target),
7568 /* If we would have a "singleton" (see below) were it not for a
7569 conversion in each arm, bring that conversion back out. */
7570 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7571 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7572 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7573 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7575 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7576 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7578 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7579 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7580 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7581 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7582 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7583 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7584 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7585 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7586 return expand_expr (build1 (NOP_EXPR, type,
7587 build (COND_EXPR, TREE_TYPE (true),
7588 TREE_OPERAND (exp, 0),
7590 target, tmode, modifier);
7594 /* Note that COND_EXPRs whose type is a structure or union
7595 are required to be constructed to contain assignments of
7596 a temporary variable, so that we can evaluate them here
7597 for side effect only. If type is void, we must do likewise. */
7599 /* If an arm of the branch requires a cleanup,
7600 only that cleanup is performed. */
7603 tree binary_op = 0, unary_op = 0;
7605 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7606 convert it to our mode, if necessary. */
7607 if (integer_onep (TREE_OPERAND (exp, 1))
7608 && integer_zerop (TREE_OPERAND (exp, 2))
7609 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7613 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7618 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7619 if (GET_MODE (op0) == mode)
7623 target = gen_reg_rtx (mode);
7624 convert_move (target, op0, unsignedp);
7628 /* Check for X ? A + B : A. If we have this, we can copy A to the
7629 output and conditionally add B. Similarly for unary operations.
7630 Don't do this if X has side-effects because those side effects
7631 might affect A or B and the "?" operation is a sequence point in
7632 ANSI. (operand_equal_p tests for side effects.) */
7634 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7635 && operand_equal_p (TREE_OPERAND (exp, 2),
7636 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7637 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7638 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7639 && operand_equal_p (TREE_OPERAND (exp, 1),
7640 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7641 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7642 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7643 && operand_equal_p (TREE_OPERAND (exp, 2),
7644 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7645 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7646 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7647 && operand_equal_p (TREE_OPERAND (exp, 1),
7648 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7649 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7651 /* If we are not to produce a result, we have no target. Otherwise,
7652 if a target was specified use it; it will not be used as an
7653 intermediate target unless it is safe. If no target, use a
7658 else if (original_target
7659 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7660 || (singleton && GET_CODE (original_target) == REG
7661 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7662 && original_target == var_rtx (singleton)))
7663 && GET_MODE (original_target) == mode
7664 #ifdef HAVE_conditional_move
7665 && (! can_conditionally_move_p (mode)
7666 || GET_CODE (original_target) == REG
7667 || TREE_ADDRESSABLE (type))
7669 && ! (GET_CODE (original_target) == MEM
7670 && MEM_VOLATILE_P (original_target)))
7671 temp = original_target;
7672 else if (TREE_ADDRESSABLE (type))
7675 temp = assign_temp (type, 0, 0, 1);
7677 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7678 do the test of X as a store-flag operation, do this as
7679 A + ((X != 0) << log C). Similarly for other simple binary
7680 operators. Only do for C == 1 if BRANCH_COST is low. */
7681 if (temp && singleton && binary_op
7682 && (TREE_CODE (binary_op) == PLUS_EXPR
7683 || TREE_CODE (binary_op) == MINUS_EXPR
7684 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7685 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7686 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7687 : integer_onep (TREE_OPERAND (binary_op, 1)))
7688 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7691 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7692 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7693 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7696 /* If we had X ? A : A + 1, do this as A + (X == 0).
7698 We have to invert the truth value here and then put it
7699 back later if do_store_flag fails. We cannot simply copy
7700 TREE_OPERAND (exp, 0) to another variable and modify that
7701 because invert_truthvalue can modify the tree pointed to
7703 if (singleton == TREE_OPERAND (exp, 1))
7704 TREE_OPERAND (exp, 0)
7705 = invert_truthvalue (TREE_OPERAND (exp, 0));
7707 result = do_store_flag (TREE_OPERAND (exp, 0),
7708 (safe_from_p (temp, singleton, 1)
7710 mode, BRANCH_COST <= 1);
7712 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7713 result = expand_shift (LSHIFT_EXPR, mode, result,
7714 build_int_2 (tree_log2
7718 (safe_from_p (temp, singleton, 1)
7719 ? temp : NULL_RTX), 0);
7723 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7724 return expand_binop (mode, boptab, op1, result, temp,
7725 unsignedp, OPTAB_LIB_WIDEN);
7727 else if (singleton == TREE_OPERAND (exp, 1))
7728 TREE_OPERAND (exp, 0)
7729 = invert_truthvalue (TREE_OPERAND (exp, 0));
7732 do_pending_stack_adjust ();
7734 op0 = gen_label_rtx ();
7736 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7740 /* If the target conflicts with the other operand of the
7741 binary op, we can't use it. Also, we can't use the target
7742 if it is a hard register, because evaluating the condition
7743 might clobber it. */
7745 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7746 || (GET_CODE (temp) == REG
7747 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7748 temp = gen_reg_rtx (mode);
7749 store_expr (singleton, temp, 0);
7752 expand_expr (singleton,
7753 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7754 if (singleton == TREE_OPERAND (exp, 1))
7755 jumpif (TREE_OPERAND (exp, 0), op0);
7757 jumpifnot (TREE_OPERAND (exp, 0), op0);
7759 start_cleanup_deferral ();
7760 if (binary_op && temp == 0)
7761 /* Just touch the other operand. */
7762 expand_expr (TREE_OPERAND (binary_op, 1),
7763 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7765 store_expr (build (TREE_CODE (binary_op), type,
7766 make_tree (type, temp),
7767 TREE_OPERAND (binary_op, 1)),
7770 store_expr (build1 (TREE_CODE (unary_op), type,
7771 make_tree (type, temp)),
7775 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7776 comparison operator. If we have one of these cases, set the
7777 output to A, branch on A (cse will merge these two references),
7778 then set the output to FOO. */
7780 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7781 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7782 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7783 TREE_OPERAND (exp, 1), 0)
7784 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7785 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7786 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7788 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7789 temp = gen_reg_rtx (mode);
7790 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7791 jumpif (TREE_OPERAND (exp, 0), op0);
7793 start_cleanup_deferral ();
7794 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7798 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7799 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7800 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7801 TREE_OPERAND (exp, 2), 0)
7802 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7803 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7804 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7806 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7807 temp = gen_reg_rtx (mode);
7808 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7809 jumpifnot (TREE_OPERAND (exp, 0), op0);
7811 start_cleanup_deferral ();
7812 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7817 op1 = gen_label_rtx ();
7818 jumpifnot (TREE_OPERAND (exp, 0), op0);
7820 start_cleanup_deferral ();
7822 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7824 expand_expr (TREE_OPERAND (exp, 1),
7825 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7826 end_cleanup_deferral ();
7828 emit_jump_insn (gen_jump (op1));
7831 start_cleanup_deferral ();
7833 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7835 expand_expr (TREE_OPERAND (exp, 2),
7836 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7839 end_cleanup_deferral ();
7850 /* Something needs to be initialized, but we didn't know
7851 where that thing was when building the tree. For example,
7852 it could be the return value of a function, or a parameter
7853 to a function which lays down in the stack, or a temporary
7854 variable which must be passed by reference.
7856 We guarantee that the expression will either be constructed
7857 or copied into our original target. */
7859 tree slot = TREE_OPERAND (exp, 0);
7860 tree cleanups = NULL_TREE;
7863 if (TREE_CODE (slot) != VAR_DECL)
7867 target = original_target;
7871 if (DECL_RTL (slot) != 0)
7873 target = DECL_RTL (slot);
7874 /* If we have already expanded the slot, so don't do
7876 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7881 target = assign_temp (type, 2, 0, 1);
7882 /* All temp slots at this level must not conflict. */
7883 preserve_temp_slots (target);
7884 DECL_RTL (slot) = target;
7885 if (TREE_ADDRESSABLE (slot))
7887 TREE_ADDRESSABLE (slot) = 0;
7888 mark_addressable (slot);
7891 /* Since SLOT is not known to the called function
7892 to belong to its stack frame, we must build an explicit
7893 cleanup. This case occurs when we must build up a reference
7894 to pass the reference as an argument. In this case,
7895 it is very likely that such a reference need not be
7898 if (TREE_OPERAND (exp, 2) == 0)
7899 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7900 cleanups = TREE_OPERAND (exp, 2);
7905 /* This case does occur, when expanding a parameter which
7906 needs to be constructed on the stack. The target
7907 is the actual stack address that we want to initialize.
7908 The function we call will perform the cleanup in this case. */
7910 /* If we have already assigned it space, use that space,
7911 not target that we were passed in, as our target
7912 parameter is only a hint. */
7913 if (DECL_RTL (slot) != 0)
7915 target = DECL_RTL (slot);
7916 /* If we have already expanded the slot, so don't do
7918 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7923 DECL_RTL (slot) = target;
7924 /* If we must have an addressable slot, then make sure that
7925 the RTL that we just stored in slot is OK. */
7926 if (TREE_ADDRESSABLE (slot))
7928 TREE_ADDRESSABLE (slot) = 0;
7929 mark_addressable (slot);
7934 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7935 /* Mark it as expanded. */
7936 TREE_OPERAND (exp, 1) = NULL_TREE;
7938 TREE_USED (slot) = 1;
7939 store_expr (exp1, target, 0);
7941 expand_decl_cleanup (NULL_TREE, cleanups);
7948 tree lhs = TREE_OPERAND (exp, 0);
7949 tree rhs = TREE_OPERAND (exp, 1);
7950 tree noncopied_parts = 0;
7951 tree lhs_type = TREE_TYPE (lhs);
7953 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7954 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7955 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7956 TYPE_NONCOPIED_PARTS (lhs_type));
7957 while (noncopied_parts != 0)
7959 expand_assignment (TREE_VALUE (noncopied_parts),
7960 TREE_PURPOSE (noncopied_parts), 0, 0);
7961 noncopied_parts = TREE_CHAIN (noncopied_parts);
7968 /* If lhs is complex, expand calls in rhs before computing it.
7969 That's so we don't compute a pointer and save it over a call.
7970 If lhs is simple, compute it first so we can give it as a
7971 target if the rhs is just a call. This avoids an extra temp and copy
7972 and that prevents a partial-subsumption which makes bad code.
7973 Actually we could treat component_ref's of vars like vars. */
7975 tree lhs = TREE_OPERAND (exp, 0);
7976 tree rhs = TREE_OPERAND (exp, 1);
7977 tree noncopied_parts = 0;
7978 tree lhs_type = TREE_TYPE (lhs);
7982 if (TREE_CODE (lhs) != VAR_DECL
7983 && TREE_CODE (lhs) != RESULT_DECL
7984 && TREE_CODE (lhs) != PARM_DECL
7985 && ! (TREE_CODE (lhs) == INDIRECT_REF
7986 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7987 preexpand_calls (exp);
7989 /* Check for |= or &= of a bitfield of size one into another bitfield
7990 of size 1. In this case, (unless we need the result of the
7991 assignment) we can do this more efficiently with a
7992 test followed by an assignment, if necessary.
7994 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7995 things change so we do, this code should be enhanced to
7998 && TREE_CODE (lhs) == COMPONENT_REF
7999 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8000 || TREE_CODE (rhs) == BIT_AND_EXPR)
8001 && TREE_OPERAND (rhs, 0) == lhs
8002 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8003 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8004 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8006 rtx label = gen_label_rtx ();
8008 do_jump (TREE_OPERAND (rhs, 1),
8009 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8010 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8011 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8012 (TREE_CODE (rhs) == BIT_IOR_EXPR
8014 : integer_zero_node)),
8016 do_pending_stack_adjust ();
8021 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8022 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8023 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8024 TYPE_NONCOPIED_PARTS (lhs_type));
8026 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8027 while (noncopied_parts != 0)
8029 expand_assignment (TREE_PURPOSE (noncopied_parts),
8030 TREE_VALUE (noncopied_parts), 0, 0);
8031 noncopied_parts = TREE_CHAIN (noncopied_parts);
8037 if (!TREE_OPERAND (exp, 0))
8038 expand_null_return ();
8040 expand_return (TREE_OPERAND (exp, 0));
8043 case PREINCREMENT_EXPR:
8044 case PREDECREMENT_EXPR:
8045 return expand_increment (exp, 0, ignore);
8047 case POSTINCREMENT_EXPR:
8048 case POSTDECREMENT_EXPR:
8049 /* Faster to treat as pre-increment if result is not used. */
8050 return expand_increment (exp, ! ignore, ignore);
8053 /* If nonzero, TEMP will be set to the address of something that might
8054 be a MEM corresponding to a stack slot. */
8057 /* Are we taking the address of a nested function? */
8058 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8059 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8060 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8061 && ! TREE_STATIC (exp))
8063 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8064 op0 = force_operand (op0, target);
8066 /* If we are taking the address of something erroneous, just
8068 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8072 /* We make sure to pass const0_rtx down if we came in with
8073 ignore set, to avoid doing the cleanups twice for something. */
8074 op0 = expand_expr (TREE_OPERAND (exp, 0),
8075 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8076 (modifier == EXPAND_INITIALIZER
8077 ? modifier : EXPAND_CONST_ADDRESS));
8079 /* If we are going to ignore the result, OP0 will have been set
8080 to const0_rtx, so just return it. Don't get confused and
8081 think we are taking the address of the constant. */
8085 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8086 clever and returns a REG when given a MEM. */
8087 op0 = protect_from_queue (op0, 1);
8089 /* We would like the object in memory. If it is a constant,
8090 we can have it be statically allocated into memory. For
8091 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
8092 memory and store the value into it. */
8094 if (CONSTANT_P (op0))
8095 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8097 else if (GET_CODE (op0) == MEM)
8099 mark_temp_addr_taken (op0);
8100 temp = XEXP (op0, 0);
8103 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8104 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8106 /* If this object is in a register, it must be not
8108 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8109 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8111 mark_temp_addr_taken (memloc);
8112 emit_move_insn (memloc, op0);
8116 if (GET_CODE (op0) != MEM)
8119 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8121 temp = XEXP (op0, 0);
8122 #ifdef POINTERS_EXTEND_UNSIGNED
8123 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8124 && mode == ptr_mode)
8125 temp = convert_memory_address (ptr_mode, temp);
8130 op0 = force_operand (XEXP (op0, 0), target);
8133 if (flag_force_addr && GET_CODE (op0) != REG)
8134 op0 = force_reg (Pmode, op0);
8136 if (GET_CODE (op0) == REG
8137 && ! REG_USERVAR_P (op0))
8138 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8140 /* If we might have had a temp slot, add an equivalent address
8143 update_temp_slot_address (temp, op0);
8145 #ifdef POINTERS_EXTEND_UNSIGNED
8146 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8147 && mode == ptr_mode)
8148 op0 = convert_memory_address (ptr_mode, op0);
8153 case ENTRY_VALUE_EXPR:
8156 /* COMPLEX type for Extended Pascal & Fortran */
8159 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8162 /* Get the rtx code of the operands. */
8163 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8164 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8167 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8171 /* Move the real (op0) and imaginary (op1) parts to their location. */
8172 emit_move_insn (gen_realpart (mode, target), op0);
8173 emit_move_insn (gen_imagpart (mode, target), op1);
8175 insns = get_insns ();
8178 /* Complex construction should appear as a single unit. */
8179 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8180 each with a separate pseudo as destination.
8181 It's not correct for flow to treat them as a unit. */
8182 if (GET_CODE (target) != CONCAT)
8183 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8191 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8192 return gen_realpart (mode, op0);
8195 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8196 return gen_imagpart (mode, op0);
8200 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8204 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8207 target = gen_reg_rtx (mode);
8211 /* Store the realpart and the negated imagpart to target. */
8212 emit_move_insn (gen_realpart (partmode, target),
8213 gen_realpart (partmode, op0));
8215 imag_t = gen_imagpart (partmode, target);
8216 temp = expand_unop (partmode, neg_optab,
8217 gen_imagpart (partmode, op0), imag_t, 0);
8219 emit_move_insn (imag_t, temp);
8221 insns = get_insns ();
8224 /* Conjugate should appear as a single unit
8225 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8226 each with a separate pseudo as destination.
8227 It's not correct for flow to treat them as a unit. */
8228 if (GET_CODE (target) != CONCAT)
8229 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8236 case TRY_CATCH_EXPR:
8238 tree handler = TREE_OPERAND (exp, 1);
8240 expand_eh_region_start ();
8242 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8244 expand_eh_region_end (handler);
8249 case TRY_FINALLY_EXPR:
8251 tree try_block = TREE_OPERAND (exp, 0);
8252 tree finally_block = TREE_OPERAND (exp, 1);
8253 rtx finally_label = gen_label_rtx ();
8254 rtx done_label = gen_label_rtx ();
8255 rtx return_link = gen_reg_rtx (Pmode);
8256 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8257 (tree) finally_label, (tree) return_link);
8258 TREE_SIDE_EFFECTS (cleanup) = 1;
8260 /* Start a new binding layer that will keep track of all cleanup
8261 actions to be performed. */
8262 expand_start_bindings (0);
8264 target_temp_slot_level = temp_slot_level;
8266 expand_decl_cleanup (NULL_TREE, cleanup);
8267 op0 = expand_expr (try_block, target, tmode, modifier);
8269 preserve_temp_slots (op0);
8270 expand_end_bindings (NULL_TREE, 0, 0);
8271 emit_jump (done_label);
8272 emit_label (finally_label);
8273 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8274 emit_indirect_jump (return_link);
8275 emit_label (done_label);
8279 case GOTO_SUBROUTINE_EXPR:
8281 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8282 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8283 rtx return_address = gen_label_rtx ();
8284 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8286 emit_label (return_address);
8292 rtx dcc = get_dynamic_cleanup_chain ();
8293 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8299 rtx dhc = get_dynamic_handler_chain ();
8300 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8305 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8308 /* Here to do an ordinary binary operator, generating an instruction
8309 from the optab already placed in `this_optab'. */
8311 preexpand_calls (exp);
8312 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8314 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8315 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8317 temp = expand_binop (mode, this_optab, op0, op1, target,
8318 unsignedp, OPTAB_LIB_WIDEN);
8326 /* Return the alignment in bits of EXP, a pointer valued expression.
8327 But don't return more than MAX_ALIGN no matter what.
8328 The alignment returned is, by default, the alignment of the thing that
8329 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8331 Otherwise, look at the expression to see if we can do better, i.e., if the
8332 expression is actually pointing at an object whose alignment is tighter. */
8335 get_pointer_alignment (exp, max_align)
8339 unsigned align, inner;
8341 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8344 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8345 align = MIN (align, max_align);
8349 switch (TREE_CODE (exp))
8353 case NON_LVALUE_EXPR:
8354 exp = TREE_OPERAND (exp, 0);
8355 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8357 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8358 align = MIN (inner, max_align);
8362 /* If sum of pointer + int, restrict our maximum alignment to that
8363 imposed by the integer. If not, we can't do any better than
8365 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8368 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8373 exp = TREE_OPERAND (exp, 0);
8377 /* See what we are pointing at and look at its alignment. */
8378 exp = TREE_OPERAND (exp, 0);
8379 if (TREE_CODE (exp) == FUNCTION_DECL)
8380 align = FUNCTION_BOUNDARY;
8381 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8382 align = DECL_ALIGN (exp);
8383 #ifdef CONSTANT_ALIGNMENT
8384 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8385 align = CONSTANT_ALIGNMENT (exp, align);
8387 return MIN (align, max_align);
8395 /* Return the tree node and offset if a given argument corresponds to
8396 a string constant. */
8399 string_constant (arg, ptr_offset)
8405 if (TREE_CODE (arg) == ADDR_EXPR
8406 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8408 *ptr_offset = integer_zero_node;
8409 return TREE_OPERAND (arg, 0);
8411 else if (TREE_CODE (arg) == PLUS_EXPR)
8413 tree arg0 = TREE_OPERAND (arg, 0);
8414 tree arg1 = TREE_OPERAND (arg, 1);
8419 if (TREE_CODE (arg0) == ADDR_EXPR
8420 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8423 return TREE_OPERAND (arg0, 0);
8425 else if (TREE_CODE (arg1) == ADDR_EXPR
8426 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8429 return TREE_OPERAND (arg1, 0);
8436 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8437 way, because it could contain a zero byte in the middle.
8438 TREE_STRING_LENGTH is the size of the character array, not the string.
8440 Unfortunately, string_constant can't access the values of const char
8441 arrays with initializers, so neither can we do so here. */
8451 src = string_constant (src, &offset_node);
8454 max = TREE_STRING_LENGTH (src);
8455 ptr = TREE_STRING_POINTER (src);
8456 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8458 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8459 compute the offset to the following null if we don't know where to
8460 start searching for it. */
8462 for (i = 0; i < max; i++)
8465 /* We don't know the starting offset, but we do know that the string
8466 has no internal zero bytes. We can assume that the offset falls
8467 within the bounds of the string; otherwise, the programmer deserves
8468 what he gets. Subtract the offset from the length of the string,
8470 /* This would perhaps not be valid if we were dealing with named
8471 arrays in addition to literal string constants. */
8472 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8475 /* We have a known offset into the string. Start searching there for
8476 a null character. */
8477 if (offset_node == 0)
8481 /* Did we get a long long offset? If so, punt. */
8482 if (TREE_INT_CST_HIGH (offset_node) != 0)
8484 offset = TREE_INT_CST_LOW (offset_node);
8486 /* If the offset is known to be out of bounds, warn, and call strlen at
8488 if (offset < 0 || offset > max)
8490 warning ("offset outside bounds of constant string");
8493 /* Use strlen to search for the first zero byte. Since any strings
8494 constructed with build_string will have nulls appended, we win even
8495 if we get handed something like (char[4])"abcd".
8497 Since OFFSET is our starting index into the string, no further
8498 calculation is needed. */
8499 return size_int (strlen (ptr + offset));
8503 expand_builtin_return_addr (fndecl_code, count, tem)
8504 enum built_in_function fndecl_code;
8510 /* Some machines need special handling before we can access
8511 arbitrary frames. For example, on the sparc, we must first flush
8512 all register windows to the stack. */
8513 #ifdef SETUP_FRAME_ADDRESSES
8515 SETUP_FRAME_ADDRESSES ();
8518 /* On the sparc, the return address is not in the frame, it is in a
8519 register. There is no way to access it off of the current frame
8520 pointer, but it can be accessed off the previous frame pointer by
8521 reading the value from the register window save area. */
8522 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8523 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8527 /* Scan back COUNT frames to the specified frame. */
8528 for (i = 0; i < count; i++)
8530 /* Assume the dynamic chain pointer is in the word that the
8531 frame address points to, unless otherwise specified. */
8532 #ifdef DYNAMIC_CHAIN_ADDRESS
8533 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8535 tem = memory_address (Pmode, tem);
8536 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8539 /* For __builtin_frame_address, return what we've got. */
8540 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8543 /* For __builtin_return_address, Get the return address from that
8545 #ifdef RETURN_ADDR_RTX
8546 tem = RETURN_ADDR_RTX (count, tem);
8548 tem = memory_address (Pmode,
8549 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8550 tem = gen_rtx_MEM (Pmode, tem);
8555 /* Construct the leading half of a __builtin_setjmp call. Control will
8556 return to RECEIVER_LABEL. This is used directly by sjlj exception
8560 expand_builtin_setjmp_setup (buf_addr, receiver_label)
8564 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8567 #ifdef POINTERS_EXTEND_UNSIGNED
8568 buf_addr = convert_memory_address (Pmode, buf_addr);
8571 buf_addr = force_reg (Pmode, buf_addr);
8575 /* We store the frame pointer and the address of receiver_label in
8576 the buffer and use the rest of it for the stack save area, which
8577 is machine-dependent. */
8579 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8580 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8583 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8584 BUILTIN_SETJMP_FRAME_VALUE);
8585 emit_move_insn (validize_mem
8586 (gen_rtx_MEM (Pmode,
8587 plus_constant (buf_addr,
8588 GET_MODE_SIZE (Pmode)))),
8589 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
8591 stack_save = gen_rtx_MEM (sa_mode,
8592 plus_constant (buf_addr,
8593 2 * GET_MODE_SIZE (Pmode)));
8594 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8596 /* If there is further processing to do, do it. */
8597 #ifdef HAVE_builtin_setjmp_setup
8598 if (HAVE_builtin_setjmp_setup)
8599 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8602 /* Tell optimize_save_area_alloca that extra work is going to
8603 need to go on during alloca. */
8604 current_function_calls_setjmp = 1;
8606 /* Set this so all the registers get saved in our frame; we need to be
8607 able to copy the saved values for any registers from frames we unwind. */
8608 current_function_has_nonlocal_label = 1;
8611 /* Construct the trailing part of a __builtin_setjmp call.
8612 This is used directly by sjlj exception handling code. */
8615 expand_builtin_setjmp_receiver (receiver_label)
8616 rtx receiver_label ATTRIBUTE_UNUSED;
8618 /* Clobber the FP when we get here, so we have to make sure it's
8619 marked as used by this function. */
8620 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8622 /* Mark the static chain as clobbered here so life information
8623 doesn't get messed up for it. */
8624 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8626 /* Now put in the code to restore the frame pointer, and argument
8627 pointer, if needed. The code below is from expand_end_bindings
8628 in stmt.c; see detailed documentation there. */
8629 #ifdef HAVE_nonlocal_goto
8630 if (! HAVE_nonlocal_goto)
8632 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8634 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8635 if (fixed_regs[ARG_POINTER_REGNUM])
8637 #ifdef ELIMINABLE_REGS
8639 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8641 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8642 if (elim_regs[i].from == ARG_POINTER_REGNUM
8643 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8646 if (i == sizeof elim_regs / sizeof elim_regs [0])
8649 /* Now restore our arg pointer from the address at which it
8650 was saved in our stack frame.
8651 If there hasn't be space allocated for it yet, make
8653 if (arg_pointer_save_area == 0)
8654 arg_pointer_save_area
8655 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8656 emit_move_insn (virtual_incoming_args_rtx,
8657 copy_to_reg (arg_pointer_save_area));
8662 #ifdef HAVE_builtin_setjmp_receiver
8663 if (HAVE_builtin_setjmp_receiver)
8664 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
8667 #ifdef HAVE_nonlocal_goto_receiver
8668 if (HAVE_nonlocal_goto_receiver)
8669 emit_insn (gen_nonlocal_goto_receiver ());
8676 /* @@@ This is a kludge. Not all machine descriptions define a blockage
8677 insn, but we must not allow the code we just generated to be reordered
8678 by scheduling. Specifically, the update of the frame pointer must
8679 happen immediately, not later. So emit an ASM_INPUT to act as blockage
8681 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
8685 /* __builtin_setjmp is passed a pointer to an array of five words (not
8686 all will be used on all machines). It operates similarly to the C
8687 library function of the same name, but is more efficient. Much of
8688 the code below (and for longjmp) is copied from the handling of
8691 NOTE: This is intended for use by GNAT and the exception handling
8692 scheme in the compiler and will only work in the method used by
8696 expand_builtin_setjmp (arglist, target)
8700 rtx buf_addr, next_lab, cont_lab;
8703 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8706 if (target == 0 || GET_CODE (target) != REG
8707 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8708 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
8710 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8712 next_lab = gen_label_rtx ();
8713 cont_lab = gen_label_rtx ();
8715 expand_builtin_setjmp_setup (buf_addr, next_lab);
8717 /* Set TARGET to zero and branch to the continue label. */
8718 emit_move_insn (target, const0_rtx);
8719 emit_jump_insn (gen_jump (cont_lab));
8721 emit_label (next_lab);
8723 expand_builtin_setjmp_receiver (next_lab);
8725 /* Set TARGET to one. */
8726 emit_move_insn (target, const1_rtx);
8727 emit_label (cont_lab);
8729 /* Tell flow about the strange goings on. Putting `next_lab' on
8730 `nonlocal_goto_handler_labels' to indicates that function
8731 calls may traverse the arc back to this label. */
8733 current_function_has_nonlocal_label = 1;
8734 nonlocal_goto_handler_labels
8735 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
8741 expand_builtin_longjmp (buf_addr, value)
8742 rtx buf_addr, value;
8745 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8747 #ifdef POINTERS_EXTEND_UNSIGNED
8748 buf_addr = convert_memory_address (Pmode, buf_addr);
8750 buf_addr = force_reg (Pmode, buf_addr);
8752 /* We used to store value in static_chain_rtx, but that fails if pointers
8753 are smaller than integers. We instead require that the user must pass
8754 a second argument of 1, because that is what builtin_setjmp will
8755 return. This also makes EH slightly more efficient, since we are no
8756 longer copying around a value that we don't care about. */
8757 if (value != const1_rtx)
8760 #ifdef HAVE_builtin_longjmp
8761 if (HAVE_builtin_longjmp)
8762 emit_insn (gen_builtin_longjmp (buf_addr));
8766 fp = gen_rtx_MEM (Pmode, buf_addr);
8767 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8768 GET_MODE_SIZE (Pmode)));
8770 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8771 2 * GET_MODE_SIZE (Pmode)));
8773 /* Pick up FP, label, and SP from the block and jump. This code is
8774 from expand_goto in stmt.c; see there for detailed comments. */
8775 #if HAVE_nonlocal_goto
8776 if (HAVE_nonlocal_goto)
8777 /* We have to pass a value to the nonlocal_goto pattern that will
8778 get copied into the static_chain pointer, but it does not matter
8779 what that value is, because builtin_setjmp does not use it. */
8780 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8784 lab = copy_to_reg (lab);
8786 emit_move_insn (hard_frame_pointer_rtx, fp);
8787 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8789 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8790 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8791 emit_indirect_jump (lab);
8797 get_memory_rtx (exp)
8803 mem = gen_rtx_MEM (BLKmode,
8804 memory_address (BLKmode,
8805 expand_expr (exp, NULL_RTX,
8806 ptr_mode, EXPAND_SUM)));
8808 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8810 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8811 if the value is the address of a structure or if the expression is
8812 cast to a pointer to structure type. */
8815 while (TREE_CODE (exp) == NOP_EXPR)
8817 tree cast_type = TREE_TYPE (exp);
8818 if (TREE_CODE (cast_type) == POINTER_TYPE
8819 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8824 exp = TREE_OPERAND (exp, 0);
8827 if (is_aggregate == 0)
8831 if (TREE_CODE (exp) == ADDR_EXPR)
8832 /* If this is the address of an object, check whether the
8833 object is an array. */
8834 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8836 type = TREE_TYPE (TREE_TYPE (exp));
8837 is_aggregate = AGGREGATE_TYPE_P (type);
8840 MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8845 /* Expand an expression EXP that calls a built-in function,
8846 with result going to TARGET if that's convenient
8847 (and in mode MODE if that's convenient).
8848 SUBTARGET may be used as the target for computing one of EXP's operands.
8849 IGNORE is nonzero if the value is to be ignored. */
8851 #define CALLED_AS_BUILT_IN(NODE) \
8852 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8855 expand_builtin (exp, target, subtarget, mode, ignore)
8859 enum machine_mode mode;
8862 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8863 tree arglist = TREE_OPERAND (exp, 1);
8866 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8867 optab builtin_optab;
8869 switch (DECL_FUNCTION_CODE (fndecl))
8874 /* build_function_call changes these into ABS_EXPR. */
8879 /* Treat these like sqrt, but only if the user asks for them. */
8880 if (! flag_fast_math)
8882 case BUILT_IN_FSQRT:
8883 /* If not optimizing, call the library function. */
8888 /* Arg could be wrong type if user redeclared this fcn wrong. */
8889 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8892 /* Stabilize and compute the argument. */
8893 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8894 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8896 exp = copy_node (exp);
8897 arglist = copy_node (arglist);
8898 TREE_OPERAND (exp, 1) = arglist;
8899 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8901 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8903 /* Make a suitable register to place result in. */
8904 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8909 switch (DECL_FUNCTION_CODE (fndecl))
8912 builtin_optab = sin_optab; break;
8914 builtin_optab = cos_optab; break;
8915 case BUILT_IN_FSQRT:
8916 builtin_optab = sqrt_optab; break;
8921 /* Compute into TARGET.
8922 Set TARGET to wherever the result comes back. */
8923 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8924 builtin_optab, op0, target, 0);
8926 /* If we were unable to expand via the builtin, stop the
8927 sequence (without outputting the insns) and break, causing
8928 a call to the library function. */
8935 /* Check the results by default. But if flag_fast_math is turned on,
8936 then assume sqrt will always be called with valid arguments. */
8938 if (flag_errno_math && ! flag_fast_math)
8940 /* Don't define the builtin FP instructions
8941 if your machine is not IEEE. */
8942 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8945 lab1 = gen_label_rtx ();
8947 /* Test the result; if it is NaN, set errno=EDOM because
8948 the argument was not in the domain. */
8949 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8954 #ifdef GEN_ERRNO_RTX
8955 rtx errno_rtx = GEN_ERRNO_RTX;
8958 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8961 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8964 /* We can't set errno=EDOM directly; let the library call do it.
8965 Pop the arguments right away in case the call gets deleted. */
8967 expand_call (exp, target, 0);
8974 /* Output the entire sequence. */
8975 insns = get_insns ();
8984 /* __builtin_apply_args returns block of memory allocated on
8985 the stack into which is stored the arg pointer, structure
8986 value address, static chain, and all the registers that might
8987 possibly be used in performing a function call. The code is
8988 moved to the start of the function so the incoming values are
8990 case BUILT_IN_APPLY_ARGS:
8991 /* Don't do __builtin_apply_args more than once in a function.
8992 Save the result of the first call and reuse it. */
8993 if (apply_args_value != 0)
8994 return apply_args_value;
8996 /* When this function is called, it means that registers must be
8997 saved on entry to this function. So we migrate the
8998 call to the first insn of this function. */
9003 temp = expand_builtin_apply_args ();
9007 apply_args_value = temp;
9009 /* Put the sequence after the NOTE that starts the function.
9010 If this is inside a SEQUENCE, make the outer-level insn
9011 chain current, so the code is placed at the start of the
9013 push_topmost_sequence ();
9014 emit_insns_before (seq, NEXT_INSN (get_insns ()));
9015 pop_topmost_sequence ();
9019 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9020 FUNCTION with a copy of the parameters described by
9021 ARGUMENTS, and ARGSIZE. It returns a block of memory
9022 allocated on the stack into which is stored all the registers
9023 that might possibly be used for returning the result of a
9024 function. ARGUMENTS is the value returned by
9025 __builtin_apply_args. ARGSIZE is the number of bytes of
9026 arguments that must be copied. ??? How should this value be
9027 computed? We'll also need a safe worst case value for varargs
9029 case BUILT_IN_APPLY:
9031 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9032 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
9033 || TREE_CHAIN (arglist) == 0
9034 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9035 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9036 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9044 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
9045 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
9047 return expand_builtin_apply (ops[0], ops[1], ops[2]);
9050 /* __builtin_return (RESULT) causes the function to return the
9051 value described by RESULT. RESULT is address of the block of
9052 memory returned by __builtin_apply. */
9053 case BUILT_IN_RETURN:
9055 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9056 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
9057 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
9058 NULL_RTX, VOIDmode, 0));
9061 case BUILT_IN_SAVEREGS:
9062 /* Don't do __builtin_saveregs more than once in a function.
9063 Save the result of the first call and reuse it. */
9064 if (saveregs_value != 0)
9065 return saveregs_value;
9067 /* When this function is called, it means that registers must be
9068 saved on entry to this function. So we migrate the
9069 call to the first insn of this function. */
9073 /* Now really call the function. `expand_call' does not call
9074 expand_builtin, so there is no danger of infinite recursion here. */
9077 #ifdef EXPAND_BUILTIN_SAVEREGS
9078 /* Do whatever the machine needs done in this case. */
9079 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
9081 /* The register where the function returns its value
9082 is likely to have something else in it, such as an argument.
9083 So preserve that register around the call. */
9085 if (value_mode != VOIDmode)
9087 rtx valreg = hard_libcall_value (value_mode);
9088 rtx saved_valreg = gen_reg_rtx (value_mode);
9090 emit_move_insn (saved_valreg, valreg);
9091 temp = expand_call (exp, target, ignore);
9092 emit_move_insn (valreg, saved_valreg);
9095 /* Generate the call, putting the value in a pseudo. */
9096 temp = expand_call (exp, target, ignore);
9102 saveregs_value = temp;
9104 /* Put the sequence after the NOTE that starts the function.
9105 If this is inside a SEQUENCE, make the outer-level insn
9106 chain current, so the code is placed at the start of the
9108 push_topmost_sequence ();
9109 emit_insns_before (seq, NEXT_INSN (get_insns ()));
9110 pop_topmost_sequence ();
9114 /* __builtin_args_info (N) returns word N of the arg space info
9115 for the current function. The number and meanings of words
9116 is controlled by the definition of CUMULATIVE_ARGS. */
9117 case BUILT_IN_ARGS_INFO:
9119 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
9120 int *word_ptr = (int *) ¤t_function_args_info;
9122 /* These are used by the code below that is if 0'ed away */
9124 tree type, elts, result;
9127 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
9128 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
9129 __FILE__, __LINE__);
9133 tree arg = TREE_VALUE (arglist);
9134 if (TREE_CODE (arg) != INTEGER_CST)
9135 error ("argument of `__builtin_args_info' must be constant");
9138 int wordnum = TREE_INT_CST_LOW (arg);
9140 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
9141 error ("argument of `__builtin_args_info' out of range");
9143 return GEN_INT (word_ptr[wordnum]);
9147 error ("missing argument in `__builtin_args_info'");
9152 for (i = 0; i < nwords; i++)
9153 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
9155 type = build_array_type (integer_type_node,
9156 build_index_type (build_int_2 (nwords, 0)));
9157 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
9158 TREE_CONSTANT (result) = 1;
9159 TREE_STATIC (result) = 1;
9160 result = build (INDIRECT_REF, build_pointer_type (type), result);
9161 TREE_CONSTANT (result) = 1;
9162 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
9166 /* Return the address of the first anonymous stack arg. */
9167 case BUILT_IN_NEXT_ARG:
9169 tree fntype = TREE_TYPE (current_function_decl);
9171 if ((TYPE_ARG_TYPES (fntype) == 0
9172 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
9174 && ! current_function_varargs)
9176 error ("`va_start' used in function with fixed args");
9182 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9183 tree arg = TREE_VALUE (arglist);
9185 /* Strip off all nops for the sake of the comparison. This
9186 is not quite the same as STRIP_NOPS. It does more.
9187 We must also strip off INDIRECT_EXPR for C++ reference
9189 while (TREE_CODE (arg) == NOP_EXPR
9190 || TREE_CODE (arg) == CONVERT_EXPR
9191 || TREE_CODE (arg) == NON_LVALUE_EXPR
9192 || TREE_CODE (arg) == INDIRECT_REF)
9193 arg = TREE_OPERAND (arg, 0);
9194 if (arg != last_parm)
9195 warning ("second parameter of `va_start' not last named argument");
9197 else if (! current_function_varargs)
9198 /* Evidently an out of date version of <stdarg.h>; can't validate
9199 va_start's second argument, but can still work as intended. */
9200 warning ("`__builtin_next_arg' called without an argument");
9203 return expand_binop (Pmode, add_optab,
9204 current_function_internal_arg_pointer,
9205 current_function_arg_offset_rtx,
9206 NULL_RTX, 0, OPTAB_LIB_WIDEN);
9208 case BUILT_IN_CLASSIFY_TYPE:
9211 tree type = TREE_TYPE (TREE_VALUE (arglist));
9212 enum tree_code code = TREE_CODE (type);
9213 if (code == VOID_TYPE)
9214 return GEN_INT (void_type_class);
9215 if (code == INTEGER_TYPE)
9216 return GEN_INT (integer_type_class);
9217 if (code == CHAR_TYPE)
9218 return GEN_INT (char_type_class);
9219 if (code == ENUMERAL_TYPE)
9220 return GEN_INT (enumeral_type_class);
9221 if (code == BOOLEAN_TYPE)
9222 return GEN_INT (boolean_type_class);
9223 if (code == POINTER_TYPE)
9224 return GEN_INT (pointer_type_class);
9225 if (code == REFERENCE_TYPE)
9226 return GEN_INT (reference_type_class);
9227 if (code == OFFSET_TYPE)
9228 return GEN_INT (offset_type_class);
9229 if (code == REAL_TYPE)
9230 return GEN_INT (real_type_class);
9231 if (code == COMPLEX_TYPE)
9232 return GEN_INT (complex_type_class);
9233 if (code == FUNCTION_TYPE)
9234 return GEN_INT (function_type_class);
9235 if (code == METHOD_TYPE)
9236 return GEN_INT (method_type_class);
9237 if (code == RECORD_TYPE)
9238 return GEN_INT (record_type_class);
9239 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9240 return GEN_INT (union_type_class);
9241 if (code == ARRAY_TYPE)
9243 if (TYPE_STRING_FLAG (type))
9244 return GEN_INT (string_type_class);
9246 return GEN_INT (array_type_class);
9248 if (code == SET_TYPE)
9249 return GEN_INT (set_type_class);
9250 if (code == FILE_TYPE)
9251 return GEN_INT (file_type_class);
9252 if (code == LANG_TYPE)
9253 return GEN_INT (lang_type_class);
9255 return GEN_INT (no_type_class);
9257 case BUILT_IN_CONSTANT_P:
9262 tree arg = TREE_VALUE (arglist);
9265 /* We return 1 for a numeric type that's known to be a constant
9266 value at compile-time or for an aggregate type that's a
9267 literal constant. */
9270 /* If we know this is a constant, emit the constant of one. */
9271 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9272 || (TREE_CODE (arg) == CONSTRUCTOR
9273 && TREE_CONSTANT (arg))
9274 || (TREE_CODE (arg) == ADDR_EXPR
9275 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9278 /* If we aren't going to be running CSE or this expression
9279 has side effects, show we don't know it to be a constant.
9280 Likewise if it's a pointer or aggregate type since in those
9281 case we only want literals, since those are only optimized
9282 when generating RTL, not later. */
9283 if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9284 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9285 || POINTER_TYPE_P (TREE_TYPE (arg)))
9288 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9289 chance to see if it can deduce whether ARG is constant. */
9291 tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9292 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9296 case BUILT_IN_FRAME_ADDRESS:
9297 /* The argument must be a nonnegative integer constant.
9298 It counts the number of frames to scan up the stack.
9299 The value is the address of that frame. */
9300 case BUILT_IN_RETURN_ADDRESS:
9301 /* The argument must be a nonnegative integer constant.
9302 It counts the number of frames to scan up the stack.
9303 The value is the return address saved in that frame. */
9305 /* Warning about missing arg was already issued. */
9307 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9308 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9310 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9311 error ("invalid arg to `__builtin_frame_address'");
9313 error ("invalid arg to `__builtin_return_address'");
9318 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9319 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9320 hard_frame_pointer_rtx);
9322 /* Some ports cannot access arbitrary stack frames. */
9325 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9326 warning ("unsupported arg to `__builtin_frame_address'");
9328 warning ("unsupported arg to `__builtin_return_address'");
9332 /* For __builtin_frame_address, return what we've got. */
9333 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9336 if (GET_CODE (tem) != REG
9337 && ! CONSTANT_P (tem))
9338 tem = copy_to_mode_reg (Pmode, tem);
9342 /* Returns the address of the area where the structure is returned.
9344 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9346 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9347 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9350 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9352 case BUILT_IN_ALLOCA:
9354 /* Arg could be non-integer if user redeclared this fcn wrong. */
9355 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9358 /* Compute the argument. */
9359 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9361 /* Allocate the desired space. */
9362 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9365 /* If not optimizing, call the library function. */
9366 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9370 /* Arg could be non-integer if user redeclared this fcn wrong. */
9371 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9374 /* Compute the argument. */
9375 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9376 /* Compute ffs, into TARGET if possible.
9377 Set TARGET to wherever the result comes back. */
9378 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9379 ffs_optab, op0, target, 1);
9384 case BUILT_IN_STRLEN:
9385 /* If not optimizing, call the library function. */
9386 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9390 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9391 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9395 tree src = TREE_VALUE (arglist);
9396 tree len = c_strlen (src);
9399 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9401 rtx result, src_rtx, char_rtx;
9402 enum machine_mode insn_mode = value_mode, char_mode;
9403 enum insn_code icode;
9405 /* If the length is known, just return it. */
9407 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9409 /* If SRC is not a pointer type, don't do this operation inline. */
9413 /* Call a function if we can't compute strlen in the right mode. */
9415 while (insn_mode != VOIDmode)
9417 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9418 if (icode != CODE_FOR_nothing)
9421 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9423 if (insn_mode == VOIDmode)
9426 /* Make a place to write the result of the instruction. */
9429 && GET_CODE (result) == REG
9430 && GET_MODE (result) == insn_mode
9431 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9432 result = gen_reg_rtx (insn_mode);
9434 /* Make sure the operands are acceptable to the predicates. */
9436 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9437 result = gen_reg_rtx (insn_mode);
9438 src_rtx = memory_address (BLKmode,
9439 expand_expr (src, NULL_RTX, ptr_mode,
9442 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9443 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9445 /* Check the string is readable and has an end. */
9446 if (current_function_check_memory_usage)
9447 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9449 GEN_INT (MEMORY_USE_RO),
9450 TYPE_MODE (integer_type_node));
9452 char_rtx = const0_rtx;
9453 char_mode = insn_operand_mode[(int)icode][2];
9454 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9455 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9457 emit_insn (GEN_FCN (icode) (result,
9458 gen_rtx_MEM (BLKmode, src_rtx),
9459 char_rtx, GEN_INT (align)));
9461 /* Return the value in the proper mode for this function. */
9462 if (GET_MODE (result) == value_mode)
9464 else if (target != 0)
9466 convert_move (target, result, 0);
9470 return convert_to_mode (value_mode, result, 0);
9473 case BUILT_IN_STRCPY:
9474 /* If not optimizing, call the library function. */
9475 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9479 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9480 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9481 || TREE_CHAIN (arglist) == 0
9482 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9486 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9491 len = size_binop (PLUS_EXPR, len, integer_one_node);
9493 chainon (arglist, build_tree_list (NULL_TREE, len));
9497 case BUILT_IN_MEMCPY:
9498 /* If not optimizing, call the library function. */
9499 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9503 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9504 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9505 || TREE_CHAIN (arglist) == 0
9506 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9508 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9509 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9510 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9515 tree dest = TREE_VALUE (arglist);
9516 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9517 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9520 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9522 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9523 rtx dest_mem, src_mem, dest_addr, len_rtx;
9525 /* If either SRC or DEST is not a pointer type, don't do
9526 this operation in-line. */
9527 if (src_align == 0 || dest_align == 0)
9529 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9530 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9534 dest_mem = get_memory_rtx (dest);
9535 src_mem = get_memory_rtx (src);
9536 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9538 /* Just copy the rights of SRC to the rights of DEST. */
9539 if (current_function_check_memory_usage)
9540 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9541 XEXP (dest_mem, 0), Pmode,
9542 XEXP (src_mem, 0), Pmode,
9543 len_rtx, TYPE_MODE (sizetype));
9545 /* Copy word part most expediently. */
9547 = emit_block_move (dest_mem, src_mem, len_rtx,
9548 MIN (src_align, dest_align));
9551 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9556 case BUILT_IN_MEMSET:
9557 /* If not optimizing, call the library function. */
9558 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9562 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9563 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9564 || TREE_CHAIN (arglist) == 0
9565 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9567 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9569 != (TREE_CODE (TREE_TYPE
9571 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9575 tree dest = TREE_VALUE (arglist);
9576 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9577 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9580 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9581 rtx dest_mem, dest_addr, len_rtx;
9583 /* If DEST is not a pointer type, don't do this
9584 operation in-line. */
9585 if (dest_align == 0)
9588 /* If the arguments have side-effects, then we can only evaluate
9589 them at most once. The following code evaluates them twice if
9590 they are not constants because we break out to expand_call
9591 in that case. They can't be constants if they have side-effects
9592 so we can check for that first. Alternatively, we could call
9593 save_expr to make multiple evaluation safe. */
9594 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9597 /* If VAL is not 0, don't do this operation in-line. */
9598 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9601 /* If LEN does not expand to a constant, don't do this
9602 operation in-line. */
9603 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9604 if (GET_CODE (len_rtx) != CONST_INT)
9607 dest_mem = get_memory_rtx (dest);
9609 /* Just check DST is writable and mark it as readable. */
9610 if (current_function_check_memory_usage)
9611 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9612 XEXP (dest_mem, 0), Pmode,
9613 len_rtx, TYPE_MODE (sizetype),
9614 GEN_INT (MEMORY_USE_WO),
9615 TYPE_MODE (integer_type_node));
9618 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9621 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9626 /* These comparison functions need an instruction that returns an actual
9627 index. An ordinary compare that just sets the condition codes
9629 #ifdef HAVE_cmpstrsi
9630 case BUILT_IN_STRCMP:
9631 /* If not optimizing, call the library function. */
9632 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9635 /* If we need to check memory accesses, call the library function. */
9636 if (current_function_check_memory_usage)
9640 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9641 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9642 || TREE_CHAIN (arglist) == 0
9643 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9645 else if (!HAVE_cmpstrsi)
9648 tree arg1 = TREE_VALUE (arglist);
9649 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9652 len = c_strlen (arg1);
9654 len = size_binop (PLUS_EXPR, integer_one_node, len);
9655 len2 = c_strlen (arg2);
9657 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9659 /* If we don't have a constant length for the first, use the length
9660 of the second, if we know it. We don't require a constant for
9661 this case; some cost analysis could be done if both are available
9662 but neither is constant. For now, assume they're equally cheap.
9664 If both strings have constant lengths, use the smaller. This
9665 could arise if optimization results in strcpy being called with
9666 two fixed strings, or if the code was machine-generated. We should
9667 add some code to the `memcmp' handler below to deal with such
9668 situations, someday. */
9669 if (!len || TREE_CODE (len) != INTEGER_CST)
9676 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9678 if (tree_int_cst_lt (len2, len))
9682 chainon (arglist, build_tree_list (NULL_TREE, len));
9686 case BUILT_IN_MEMCMP:
9687 /* If not optimizing, call the library function. */
9688 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9691 /* If we need to check memory accesses, call the library function. */
9692 if (current_function_check_memory_usage)
9696 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9697 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9698 || TREE_CHAIN (arglist) == 0
9699 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9700 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9701 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9703 else if (!HAVE_cmpstrsi)
9706 tree arg1 = TREE_VALUE (arglist);
9707 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9708 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9712 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9714 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9715 enum machine_mode insn_mode
9716 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9718 /* If we don't have POINTER_TYPE, call the function. */
9719 if (arg1_align == 0 || arg2_align == 0)
9721 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9722 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9726 /* Make a place to write the result of the instruction. */
9729 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9730 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9731 result = gen_reg_rtx (insn_mode);
9733 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9734 get_memory_rtx (arg2),
9735 expand_expr (len, NULL_RTX, VOIDmode, 0),
9736 GEN_INT (MIN (arg1_align, arg2_align))));
9738 /* Return the value in the proper mode for this function. */
9739 mode = TYPE_MODE (TREE_TYPE (exp));
9740 if (GET_MODE (result) == mode)
9742 else if (target != 0)
9744 convert_move (target, result, 0);
9748 return convert_to_mode (mode, result, 0);
9751 case BUILT_IN_STRCMP:
9752 case BUILT_IN_MEMCMP:
9756 case BUILT_IN_SETJMP:
9757 target = expand_builtin_setjmp (arglist, target);
9762 /* __builtin_longjmp is passed a pointer to an array of five words.
9763 It's similar to the C library longjmp function but works with
9764 __builtin_setjmp above. */
9765 case BUILT_IN_LONGJMP:
9766 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9767 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9771 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9773 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9774 NULL_RTX, VOIDmode, 0);
9776 if (value != const1_rtx)
9778 error ("__builtin_longjmp second argument must be 1");
9782 expand_builtin_longjmp (buf_addr, value);
9789 emit_insn (gen_trap ());
9792 error ("__builtin_trap not supported by this target");
9796 /* Various hooks for the DWARF 2 __throw routine. */
9797 case BUILT_IN_UNWIND_INIT:
9798 expand_builtin_unwind_init ();
9800 case BUILT_IN_DWARF_CFA:
9801 return virtual_cfa_rtx;
9802 #ifdef DWARF2_UNWIND_INFO
9803 case BUILT_IN_DWARF_FP_REGNUM:
9804 return expand_builtin_dwarf_fp_regnum ();
9805 case BUILT_IN_DWARF_REG_SIZE:
9806 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9808 case BUILT_IN_FROB_RETURN_ADDR:
9809 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9810 case BUILT_IN_EXTRACT_RETURN_ADDR:
9811 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9812 case BUILT_IN_EH_RETURN:
9813 expand_builtin_eh_return (TREE_VALUE (arglist),
9814 TREE_VALUE (TREE_CHAIN (arglist)),
9815 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9818 default: /* just do library call, if unknown builtin */
9819 error ("built-in function `%s' not currently supported",
9820 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9823 /* The switch statement above can drop through to cause the function
9824 to be called normally. */
9826 return expand_call (exp, target, ignore);
9829 /* Built-in functions to perform an untyped call and return. */
9831 /* For each register that may be used for calling a function, this
9832 gives a mode used to copy the register's value. VOIDmode indicates
9833 the register is not used for calling a function. If the machine
9834 has register windows, this gives only the outbound registers.
9835 INCOMING_REGNO gives the corresponding inbound register. */
9836 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9838 /* For each register that may be used for returning values, this gives
9839 a mode used to copy the register's value. VOIDmode indicates the
9840 register is not used for returning values. If the machine has
9841 register windows, this gives only the outbound registers.
9842 INCOMING_REGNO gives the corresponding inbound register. */
9843 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9845 /* For each register that may be used for calling a function, this
9846 gives the offset of that register into the block returned by
9847 __builtin_apply_args. 0 indicates that the register is not
9848 used for calling a function. */
9849 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9851 /* Return the offset of register REGNO into the block returned by
9852 __builtin_apply_args. This is not declared static, since it is
9853 needed in objc-act.c. */
9856 apply_args_register_offset (regno)
9861 /* Arguments are always put in outgoing registers (in the argument
9862 block) if such make sense. */
9863 #ifdef OUTGOING_REGNO
9864 regno = OUTGOING_REGNO(regno);
9866 return apply_args_reg_offset[regno];
9869 /* Return the size required for the block returned by __builtin_apply_args,
9870 and initialize apply_args_mode. */
9875 static int size = -1;
9877 enum machine_mode mode;
9879 /* The values computed by this function never change. */
9882 /* The first value is the incoming arg-pointer. */
9883 size = GET_MODE_SIZE (Pmode);
9885 /* The second value is the structure value address unless this is
9886 passed as an "invisible" first argument. */
9887 if (struct_value_rtx)
9888 size += GET_MODE_SIZE (Pmode);
9890 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9891 if (FUNCTION_ARG_REGNO_P (regno))
9893 /* Search for the proper mode for copying this register's
9894 value. I'm not sure this is right, but it works so far. */
9895 enum machine_mode best_mode = VOIDmode;
9897 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9899 mode = GET_MODE_WIDER_MODE (mode))
9900 if (HARD_REGNO_MODE_OK (regno, mode)
9901 && HARD_REGNO_NREGS (regno, mode) == 1)
9904 if (best_mode == VOIDmode)
9905 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9907 mode = GET_MODE_WIDER_MODE (mode))
9908 if (HARD_REGNO_MODE_OK (regno, mode)
9909 && (mov_optab->handlers[(int) mode].insn_code
9910 != CODE_FOR_nothing))
9914 if (mode == VOIDmode)
9917 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9918 if (size % align != 0)
9919 size = CEIL (size, align) * align;
9920 apply_args_reg_offset[regno] = size;
9921 size += GET_MODE_SIZE (mode);
9922 apply_args_mode[regno] = mode;
9926 apply_args_mode[regno] = VOIDmode;
9927 apply_args_reg_offset[regno] = 0;
9933 /* Return the size required for the block returned by __builtin_apply,
9934 and initialize apply_result_mode. */
9937 apply_result_size ()
9939 static int size = -1;
9941 enum machine_mode mode;
9943 /* The values computed by this function never change. */
9948 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9949 if (FUNCTION_VALUE_REGNO_P (regno))
9951 /* Search for the proper mode for copying this register's
9952 value. I'm not sure this is right, but it works so far. */
9953 enum machine_mode best_mode = VOIDmode;
9955 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9957 mode = GET_MODE_WIDER_MODE (mode))
9958 if (HARD_REGNO_MODE_OK (regno, mode))
9961 if (best_mode == VOIDmode)
9962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9964 mode = GET_MODE_WIDER_MODE (mode))
9965 if (HARD_REGNO_MODE_OK (regno, mode)
9966 && (mov_optab->handlers[(int) mode].insn_code
9967 != CODE_FOR_nothing))
9971 if (mode == VOIDmode)
9974 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9975 if (size % align != 0)
9976 size = CEIL (size, align) * align;
9977 size += GET_MODE_SIZE (mode);
9978 apply_result_mode[regno] = mode;
9981 apply_result_mode[regno] = VOIDmode;
9983 /* Allow targets that use untyped_call and untyped_return to override
9984 the size so that machine-specific information can be stored here. */
9985 #ifdef APPLY_RESULT_SIZE
9986 size = APPLY_RESULT_SIZE;
9992 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9993 /* Create a vector describing the result block RESULT. If SAVEP is true,
9994 the result block is used to save the values; otherwise it is used to
9995 restore the values. */
9998 result_vector (savep, result)
10002 int regno, size, align, nelts;
10003 enum machine_mode mode;
10005 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
10008 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10009 if ((mode = apply_result_mode[regno]) != VOIDmode)
10011 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10012 if (size % align != 0)
10013 size = CEIL (size, align) * align;
10014 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
10015 mem = change_address (result, mode,
10016 plus_constant (XEXP (result, 0), size));
10017 savevec[nelts++] = (savep
10018 ? gen_rtx_SET (VOIDmode, mem, reg)
10019 : gen_rtx_SET (VOIDmode, reg, mem));
10020 size += GET_MODE_SIZE (mode);
10022 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
10024 #endif /* HAVE_untyped_call or HAVE_untyped_return */
10026 /* Save the state required to perform an untyped call with the same
10027 arguments as were passed to the current function. */
10030 expand_builtin_apply_args ()
10033 int size, align, regno;
10034 enum machine_mode mode;
10036 /* Create a block where the arg-pointer, structure value address,
10037 and argument registers can be saved. */
10038 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
10040 /* Walk past the arg-pointer and structure value address. */
10041 size = GET_MODE_SIZE (Pmode);
10042 if (struct_value_rtx)
10043 size += GET_MODE_SIZE (Pmode);
10045 /* Save each register used in calling a function to the block. */
10046 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10047 if ((mode = apply_args_mode[regno]) != VOIDmode)
10051 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10052 if (size % align != 0)
10053 size = CEIL (size, align) * align;
10055 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10058 /* For reg-stack.c's stack register household.
10059 Compare with a similar piece of code in function.c. */
10061 emit_insn (gen_rtx_USE (mode, tem));
10064 emit_move_insn (change_address (registers, mode,
10065 plus_constant (XEXP (registers, 0),
10068 size += GET_MODE_SIZE (mode);
10071 /* Save the arg pointer to the block. */
10072 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
10073 copy_to_reg (virtual_incoming_args_rtx));
10074 size = GET_MODE_SIZE (Pmode);
10076 /* Save the structure value address unless this is passed as an
10077 "invisible" first argument. */
10078 if (struct_value_incoming_rtx)
10080 emit_move_insn (change_address (registers, Pmode,
10081 plus_constant (XEXP (registers, 0),
10083 copy_to_reg (struct_value_incoming_rtx));
10084 size += GET_MODE_SIZE (Pmode);
10087 /* Return the address of the block. */
10088 return copy_addr_to_reg (XEXP (registers, 0));
10091 /* Perform an untyped call and save the state required to perform an
10092 untyped return of whatever value was returned by the given function. */
10095 expand_builtin_apply (function, arguments, argsize)
10096 rtx function, arguments, argsize;
10098 int size, align, regno;
10099 enum machine_mode mode;
10100 rtx incoming_args, result, reg, dest, call_insn;
10101 rtx old_stack_level = 0;
10102 rtx call_fusage = 0;
10104 /* Create a block where the return registers can be saved. */
10105 result = assign_stack_local (BLKmode, apply_result_size (), -1);
10107 /* ??? The argsize value should be adjusted here. */
10109 /* Fetch the arg pointer from the ARGUMENTS block. */
10110 incoming_args = gen_reg_rtx (Pmode);
10111 emit_move_insn (incoming_args,
10112 gen_rtx_MEM (Pmode, arguments));
10113 #ifndef STACK_GROWS_DOWNWARD
10114 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
10115 incoming_args, 0, OPTAB_LIB_WIDEN);
10118 /* Perform postincrements before actually calling the function. */
10121 /* Push a new argument block and copy the arguments. */
10122 do_pending_stack_adjust ();
10124 /* Save the stack with nonlocal if available */
10125 #ifdef HAVE_save_stack_nonlocal
10126 if (HAVE_save_stack_nonlocal)
10127 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
10130 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
10132 /* Push a block of memory onto the stack to store the memory arguments.
10133 Save the address in a register, and copy the memory arguments. ??? I
10134 haven't figured out how the calling convention macros effect this,
10135 but it's likely that the source and/or destination addresses in
10136 the block copy will need updating in machine specific ways. */
10137 dest = allocate_dynamic_stack_space (argsize, 0, 0);
10138 emit_block_move (gen_rtx_MEM (BLKmode, dest),
10139 gen_rtx_MEM (BLKmode, incoming_args),
10141 PARM_BOUNDARY / BITS_PER_UNIT);
10143 /* Refer to the argument block. */
10144 apply_args_size ();
10145 arguments = gen_rtx_MEM (BLKmode, arguments);
10147 /* Walk past the arg-pointer and structure value address. */
10148 size = GET_MODE_SIZE (Pmode);
10149 if (struct_value_rtx)
10150 size += GET_MODE_SIZE (Pmode);
10152 /* Restore each of the registers previously saved. Make USE insns
10153 for each of these registers for use in making the call. */
10154 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10155 if ((mode = apply_args_mode[regno]) != VOIDmode)
10157 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10158 if (size % align != 0)
10159 size = CEIL (size, align) * align;
10160 reg = gen_rtx_REG (mode, regno);
10161 emit_move_insn (reg,
10162 change_address (arguments, mode,
10163 plus_constant (XEXP (arguments, 0),
10166 use_reg (&call_fusage, reg);
10167 size += GET_MODE_SIZE (mode);
10170 /* Restore the structure value address unless this is passed as an
10171 "invisible" first argument. */
10172 size = GET_MODE_SIZE (Pmode);
10173 if (struct_value_rtx)
10175 rtx value = gen_reg_rtx (Pmode);
10176 emit_move_insn (value,
10177 change_address (arguments, Pmode,
10178 plus_constant (XEXP (arguments, 0),
10180 emit_move_insn (struct_value_rtx, value);
10181 if (GET_CODE (struct_value_rtx) == REG)
10182 use_reg (&call_fusage, struct_value_rtx);
10183 size += GET_MODE_SIZE (Pmode);
10186 /* All arguments and registers used for the call are set up by now! */
10187 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
10189 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
10190 and we don't want to load it into a register as an optimization,
10191 because prepare_call_address already did it if it should be done. */
10192 if (GET_CODE (function) != SYMBOL_REF)
10193 function = memory_address (FUNCTION_MODE, function);
10195 /* Generate the actual call instruction and save the return value. */
10196 #ifdef HAVE_untyped_call
10197 if (HAVE_untyped_call)
10198 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
10199 result, result_vector (1, result)));
10202 #ifdef HAVE_call_value
10203 if (HAVE_call_value)
10207 /* Locate the unique return register. It is not possible to
10208 express a call that sets more than one return register using
10209 call_value; use untyped_call for that. In fact, untyped_call
10210 only needs to save the return registers in the given block. */
10211 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10212 if ((mode = apply_result_mode[regno]) != VOIDmode)
10215 abort (); /* HAVE_untyped_call required. */
10216 valreg = gen_rtx_REG (mode, regno);
10219 emit_call_insn (gen_call_value (valreg,
10220 gen_rtx_MEM (FUNCTION_MODE, function),
10221 const0_rtx, NULL_RTX, const0_rtx));
10223 emit_move_insn (change_address (result, GET_MODE (valreg),
10231 /* Find the CALL insn we just emitted. */
10232 for (call_insn = get_last_insn ();
10233 call_insn && GET_CODE (call_insn) != CALL_INSN;
10234 call_insn = PREV_INSN (call_insn))
10240 /* Put the register usage information on the CALL. If there is already
10241 some usage information, put ours at the end. */
10242 if (CALL_INSN_FUNCTION_USAGE (call_insn))
10246 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10247 link = XEXP (link, 1))
10250 XEXP (link, 1) = call_fusage;
10253 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10255 /* Restore the stack. */
10256 #ifdef HAVE_save_stack_nonlocal
10257 if (HAVE_save_stack_nonlocal)
10258 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10261 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10263 /* Return the address of the result block. */
10264 return copy_addr_to_reg (XEXP (result, 0));
10267 /* Perform an untyped return. */
10270 expand_builtin_return (result)
10273 int size, align, regno;
10274 enum machine_mode mode;
10276 rtx call_fusage = 0;
10278 apply_result_size ();
10279 result = gen_rtx_MEM (BLKmode, result);
10281 #ifdef HAVE_untyped_return
10282 if (HAVE_untyped_return)
10284 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10290 /* Restore the return value and note that each value is used. */
10292 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10293 if ((mode = apply_result_mode[regno]) != VOIDmode)
10295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10296 if (size % align != 0)
10297 size = CEIL (size, align) * align;
10298 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10299 emit_move_insn (reg,
10300 change_address (result, mode,
10301 plus_constant (XEXP (result, 0),
10304 push_to_sequence (call_fusage);
10305 emit_insn (gen_rtx_USE (VOIDmode, reg));
10306 call_fusage = get_insns ();
10308 size += GET_MODE_SIZE (mode);
10311 /* Put the USE insns before the return. */
10312 emit_insns (call_fusage);
10314 /* Return whatever values was restored by jumping directly to the end
10315 of the function. */
10316 expand_null_return ();
10319 /* Expand code for a post- or pre- increment or decrement
10320 and return the RTX for the result.
10321 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10324 expand_increment (exp, post, ignore)
10328 register rtx op0, op1;
10329 register rtx temp, value;
10330 register tree incremented = TREE_OPERAND (exp, 0);
10331 optab this_optab = add_optab;
10333 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10334 int op0_is_copy = 0;
10335 int single_insn = 0;
10336 /* 1 means we can't store into OP0 directly,
10337 because it is a subreg narrower than a word,
10338 and we don't dare clobber the rest of the word. */
10339 int bad_subreg = 0;
10341 /* Stabilize any component ref that might need to be
10342 evaluated more than once below. */
10344 || TREE_CODE (incremented) == BIT_FIELD_REF
10345 || (TREE_CODE (incremented) == COMPONENT_REF
10346 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10347 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10348 incremented = stabilize_reference (incremented);
10349 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10350 ones into save exprs so that they don't accidentally get evaluated
10351 more than once by the code below. */
10352 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10353 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10354 incremented = save_expr (incremented);
10356 /* Compute the operands as RTX.
10357 Note whether OP0 is the actual lvalue or a copy of it:
10358 I believe it is a copy iff it is a register or subreg
10359 and insns were generated in computing it. */
10361 temp = get_last_insn ();
10362 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10364 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10365 in place but instead must do sign- or zero-extension during assignment,
10366 so we copy it into a new register and let the code below use it as
10369 Note that we can safely modify this SUBREG since it is know not to be
10370 shared (it was made by the expand_expr call above). */
10372 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10375 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10379 else if (GET_CODE (op0) == SUBREG
10380 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10382 /* We cannot increment this SUBREG in place. If we are
10383 post-incrementing, get a copy of the old value. Otherwise,
10384 just mark that we cannot increment in place. */
10386 op0 = copy_to_reg (op0);
10391 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10392 && temp != get_last_insn ());
10393 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10394 EXPAND_MEMORY_USE_BAD);
10396 /* Decide whether incrementing or decrementing. */
10397 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10398 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10399 this_optab = sub_optab;
10401 /* Convert decrement by a constant into a negative increment. */
10402 if (this_optab == sub_optab
10403 && GET_CODE (op1) == CONST_INT)
10405 op1 = GEN_INT (- INTVAL (op1));
10406 this_optab = add_optab;
10409 /* For a preincrement, see if we can do this with a single instruction. */
10412 icode = (int) this_optab->handlers[(int) mode].insn_code;
10413 if (icode != (int) CODE_FOR_nothing
10414 /* Make sure that OP0 is valid for operands 0 and 1
10415 of the insn we want to queue. */
10416 && (*insn_operand_predicate[icode][0]) (op0, mode)
10417 && (*insn_operand_predicate[icode][1]) (op0, mode)
10418 && (*insn_operand_predicate[icode][2]) (op1, mode))
10422 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10423 then we cannot just increment OP0. We must therefore contrive to
10424 increment the original value. Then, for postincrement, we can return
10425 OP0 since it is a copy of the old value. For preincrement, expand here
10426 unless we can do it with a single insn.
10428 Likewise if storing directly into OP0 would clobber high bits
10429 we need to preserve (bad_subreg). */
10430 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10432 /* This is the easiest way to increment the value wherever it is.
10433 Problems with multiple evaluation of INCREMENTED are prevented
10434 because either (1) it is a component_ref or preincrement,
10435 in which case it was stabilized above, or (2) it is an array_ref
10436 with constant index in an array in a register, which is
10437 safe to reevaluate. */
10438 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10439 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10440 ? MINUS_EXPR : PLUS_EXPR),
10443 TREE_OPERAND (exp, 1));
10445 while (TREE_CODE (incremented) == NOP_EXPR
10446 || TREE_CODE (incremented) == CONVERT_EXPR)
10448 newexp = convert (TREE_TYPE (incremented), newexp);
10449 incremented = TREE_OPERAND (incremented, 0);
10452 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10453 return post ? op0 : temp;
10458 /* We have a true reference to the value in OP0.
10459 If there is an insn to add or subtract in this mode, queue it.
10460 Queueing the increment insn avoids the register shuffling
10461 that often results if we must increment now and first save
10462 the old value for subsequent use. */
10464 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10465 op0 = stabilize (op0);
10468 icode = (int) this_optab->handlers[(int) mode].insn_code;
10469 if (icode != (int) CODE_FOR_nothing
10470 /* Make sure that OP0 is valid for operands 0 and 1
10471 of the insn we want to queue. */
10472 && (*insn_operand_predicate[icode][0]) (op0, mode)
10473 && (*insn_operand_predicate[icode][1]) (op0, mode))
10475 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10476 op1 = force_reg (mode, op1);
10478 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10480 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10482 rtx addr = (general_operand (XEXP (op0, 0), mode)
10483 ? force_reg (Pmode, XEXP (op0, 0))
10484 : copy_to_reg (XEXP (op0, 0)));
10487 op0 = change_address (op0, VOIDmode, addr);
10488 temp = force_reg (GET_MODE (op0), op0);
10489 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10490 op1 = force_reg (mode, op1);
10492 /* The increment queue is LIFO, thus we have to `queue'
10493 the instructions in reverse order. */
10494 enqueue_insn (op0, gen_move_insn (op0, temp));
10495 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10500 /* Preincrement, or we can't increment with one simple insn. */
10502 /* Save a copy of the value before inc or dec, to return it later. */
10503 temp = value = copy_to_reg (op0);
10505 /* Arrange to return the incremented value. */
10506 /* Copy the rtx because expand_binop will protect from the queue,
10507 and the results of that would be invalid for us to return
10508 if our caller does emit_queue before using our result. */
10509 temp = copy_rtx (value = op0);
10511 /* Increment however we can. */
10512 op1 = expand_binop (mode, this_optab, value, op1,
10513 current_function_check_memory_usage ? NULL_RTX : op0,
10514 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10515 /* Make sure the value is stored into OP0. */
10517 emit_move_insn (op0, op1);
10522 /* Expand all function calls contained within EXP, innermost ones first.
10523 But don't look within expressions that have sequence points.
10524 For each CALL_EXPR, record the rtx for its value
10525 in the CALL_EXPR_RTL field. */
10528 preexpand_calls (exp)
10531 register int nops, i;
10532 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10534 if (! do_preexpand_calls)
10537 /* Only expressions and references can contain calls. */
10539 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10542 switch (TREE_CODE (exp))
10545 /* Do nothing if already expanded. */
10546 if (CALL_EXPR_RTL (exp) != 0
10547 /* Do nothing if the call returns a variable-sized object. */
10548 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10549 /* Do nothing to built-in functions. */
10550 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10551 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10553 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10556 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10559 case COMPOUND_EXPR:
10561 case TRUTH_ANDIF_EXPR:
10562 case TRUTH_ORIF_EXPR:
10563 /* If we find one of these, then we can be sure
10564 the adjust will be done for it (since it makes jumps).
10565 Do it now, so that if this is inside an argument
10566 of a function, we don't get the stack adjustment
10567 after some other args have already been pushed. */
10568 do_pending_stack_adjust ();
10573 case WITH_CLEANUP_EXPR:
10574 case CLEANUP_POINT_EXPR:
10575 case TRY_CATCH_EXPR:
10579 if (SAVE_EXPR_RTL (exp) != 0)
10586 nops = tree_code_length[(int) TREE_CODE (exp)];
10587 for (i = 0; i < nops; i++)
10588 if (TREE_OPERAND (exp, i) != 0)
10590 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10591 if (type == 'e' || type == '<' || type == '1' || type == '2'
10593 preexpand_calls (TREE_OPERAND (exp, i));
10597 /* At the start of a function, record that we have no previously-pushed
10598 arguments waiting to be popped. */
10601 init_pending_stack_adjust ()
10603 pending_stack_adjust = 0;
10606 /* When exiting from function, if safe, clear out any pending stack adjust
10607 so the adjustment won't get done.
10609 Note, if the current function calls alloca, then it must have a
10610 frame pointer regardless of the value of flag_omit_frame_pointer. */
10613 clear_pending_stack_adjust ()
10615 #ifdef EXIT_IGNORE_STACK
10617 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10618 && EXIT_IGNORE_STACK
10619 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10620 && ! flag_inline_functions)
10621 pending_stack_adjust = 0;
10625 /* Pop any previously-pushed arguments that have not been popped yet. */
10628 do_pending_stack_adjust ()
10630 if (inhibit_defer_pop == 0)
10632 if (pending_stack_adjust != 0)
10633 adjust_stack (GEN_INT (pending_stack_adjust));
10634 pending_stack_adjust = 0;
10638 /* Expand conditional expressions. */
10640 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10641 LABEL is an rtx of code CODE_LABEL, in this function and all the
10645 jumpifnot (exp, label)
10649 do_jump (exp, label, NULL_RTX);
10652 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10655 jumpif (exp, label)
10659 do_jump (exp, NULL_RTX, label);
10662 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10663 the result is zero, or IF_TRUE_LABEL if the result is one.
10664 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10665 meaning fall through in that case.
10667 do_jump always does any pending stack adjust except when it does not
10668 actually perform a jump. An example where there is no jump
10669 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10671 This function is responsible for optimizing cases such as
10672 &&, || and comparison operators in EXP. */
10675 do_jump (exp, if_false_label, if_true_label)
10677 rtx if_false_label, if_true_label;
10679 register enum tree_code code = TREE_CODE (exp);
10680 /* Some cases need to create a label to jump to
10681 in order to properly fall through.
10682 These cases set DROP_THROUGH_LABEL nonzero. */
10683 rtx drop_through_label = 0;
10685 rtx comparison = 0;
10688 enum machine_mode mode;
10690 #ifdef MAX_INTEGER_COMPUTATION_MODE
10691 check_max_integer_computation_mode (exp);
10702 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10708 /* This is not true with #pragma weak */
10710 /* The address of something can never be zero. */
10712 emit_jump (if_true_label);
10717 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10718 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10719 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10722 /* If we are narrowing the operand, we have to do the compare in the
10724 if ((TYPE_PRECISION (TREE_TYPE (exp))
10725 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10727 case NON_LVALUE_EXPR:
10728 case REFERENCE_EXPR:
10733 /* These cannot change zero->non-zero or vice versa. */
10734 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10738 /* This is never less insns than evaluating the PLUS_EXPR followed by
10739 a test and can be longer if the test is eliminated. */
10741 /* Reduce to minus. */
10742 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10743 TREE_OPERAND (exp, 0),
10744 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10745 TREE_OPERAND (exp, 1))));
10746 /* Process as MINUS. */
10750 /* Non-zero iff operands of minus differ. */
10751 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10752 TREE_OPERAND (exp, 0),
10753 TREE_OPERAND (exp, 1)),
10758 /* If we are AND'ing with a small constant, do this comparison in the
10759 smallest type that fits. If the machine doesn't have comparisons
10760 that small, it will be converted back to the wider comparison.
10761 This helps if we are testing the sign bit of a narrower object.
10762 combine can't do this for us because it can't know whether a
10763 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10765 if (! SLOW_BYTE_ACCESS
10766 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10767 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10768 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10769 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10770 && (type = type_for_mode (mode, 1)) != 0
10771 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10772 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10773 != CODE_FOR_nothing))
10775 do_jump (convert (type, exp), if_false_label, if_true_label);
10780 case TRUTH_NOT_EXPR:
10781 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10784 case TRUTH_ANDIF_EXPR:
10785 if (if_false_label == 0)
10786 if_false_label = drop_through_label = gen_label_rtx ();
10787 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10788 start_cleanup_deferral ();
10789 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10790 end_cleanup_deferral ();
10793 case TRUTH_ORIF_EXPR:
10794 if (if_true_label == 0)
10795 if_true_label = drop_through_label = gen_label_rtx ();
10796 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10797 start_cleanup_deferral ();
10798 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10799 end_cleanup_deferral ();
10802 case COMPOUND_EXPR:
10803 push_temp_slots ();
10804 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10805 preserve_temp_slots (NULL_RTX);
10806 free_temp_slots ();
10809 do_pending_stack_adjust ();
10810 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10813 case COMPONENT_REF:
10814 case BIT_FIELD_REF:
10817 int bitsize, bitpos, unsignedp;
10818 enum machine_mode mode;
10824 /* Get description of this reference. We don't actually care
10825 about the underlying object here. */
10826 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10827 &mode, &unsignedp, &volatilep,
10830 type = type_for_size (bitsize, unsignedp);
10831 if (! SLOW_BYTE_ACCESS
10832 && type != 0 && bitsize >= 0
10833 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10834 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10835 != CODE_FOR_nothing))
10837 do_jump (convert (type, exp), if_false_label, if_true_label);
10844 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10845 if (integer_onep (TREE_OPERAND (exp, 1))
10846 && integer_zerop (TREE_OPERAND (exp, 2)))
10847 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10849 else if (integer_zerop (TREE_OPERAND (exp, 1))
10850 && integer_onep (TREE_OPERAND (exp, 2)))
10851 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10855 register rtx label1 = gen_label_rtx ();
10856 drop_through_label = gen_label_rtx ();
10858 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10860 start_cleanup_deferral ();
10861 /* Now the THEN-expression. */
10862 do_jump (TREE_OPERAND (exp, 1),
10863 if_false_label ? if_false_label : drop_through_label,
10864 if_true_label ? if_true_label : drop_through_label);
10865 /* In case the do_jump just above never jumps. */
10866 do_pending_stack_adjust ();
10867 emit_label (label1);
10869 /* Now the ELSE-expression. */
10870 do_jump (TREE_OPERAND (exp, 2),
10871 if_false_label ? if_false_label : drop_through_label,
10872 if_true_label ? if_true_label : drop_through_label);
10873 end_cleanup_deferral ();
10879 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10881 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10882 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10884 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10885 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10888 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10889 fold (build (EQ_EXPR, TREE_TYPE (exp),
10890 fold (build1 (REALPART_EXPR,
10891 TREE_TYPE (inner_type),
10893 fold (build1 (REALPART_EXPR,
10894 TREE_TYPE (inner_type),
10896 fold (build (EQ_EXPR, TREE_TYPE (exp),
10897 fold (build1 (IMAGPART_EXPR,
10898 TREE_TYPE (inner_type),
10900 fold (build1 (IMAGPART_EXPR,
10901 TREE_TYPE (inner_type),
10903 if_false_label, if_true_label);
10906 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10907 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10909 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10910 && !can_compare_p (TYPE_MODE (inner_type)))
10911 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10913 comparison = compare (exp, EQ, EQ);
10919 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10921 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10922 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10924 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10925 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10928 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10929 fold (build (NE_EXPR, TREE_TYPE (exp),
10930 fold (build1 (REALPART_EXPR,
10931 TREE_TYPE (inner_type),
10933 fold (build1 (REALPART_EXPR,
10934 TREE_TYPE (inner_type),
10936 fold (build (NE_EXPR, TREE_TYPE (exp),
10937 fold (build1 (IMAGPART_EXPR,
10938 TREE_TYPE (inner_type),
10940 fold (build1 (IMAGPART_EXPR,
10941 TREE_TYPE (inner_type),
10943 if_false_label, if_true_label);
10946 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10947 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10949 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10950 && !can_compare_p (TYPE_MODE (inner_type)))
10951 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10953 comparison = compare (exp, NE, NE);
10958 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10960 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10961 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10963 comparison = compare (exp, LT, LTU);
10967 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10969 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10970 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10972 comparison = compare (exp, LE, LEU);
10976 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10978 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10979 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10981 comparison = compare (exp, GT, GTU);
10985 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10987 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10988 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10990 comparison = compare (exp, GE, GEU);
10995 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10997 /* This is not needed any more and causes poor code since it causes
10998 comparisons and tests from non-SI objects to have different code
11000 /* Copy to register to avoid generating bad insns by cse
11001 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
11002 if (!cse_not_expected && GET_CODE (temp) == MEM)
11003 temp = copy_to_reg (temp);
11005 do_pending_stack_adjust ();
11006 if (GET_CODE (temp) == CONST_INT)
11007 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
11008 else if (GET_CODE (temp) == LABEL_REF)
11009 comparison = const_true_rtx;
11010 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
11011 && !can_compare_p (GET_MODE (temp)))
11012 /* Note swapping the labels gives us not-equal. */
11013 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
11014 else if (GET_MODE (temp) != VOIDmode)
11015 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
11016 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
11017 GET_MODE (temp), NULL_RTX, 0);
11022 /* Do any postincrements in the expression that was tested. */
11025 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
11026 straight into a conditional jump instruction as the jump condition.
11027 Otherwise, all the work has been done already. */
11029 if (comparison == const_true_rtx)
11032 emit_jump (if_true_label);
11034 else if (comparison == const0_rtx)
11036 if (if_false_label)
11037 emit_jump (if_false_label);
11039 else if (comparison)
11040 do_jump_for_compare (comparison, if_false_label, if_true_label);
11042 if (drop_through_label)
11044 /* If do_jump produces code that might be jumped around,
11045 do any stack adjusts from that code, before the place
11046 where control merges in. */
11047 do_pending_stack_adjust ();
11048 emit_label (drop_through_label);
11052 /* Given a comparison expression EXP for values too wide to be compared
11053 with one insn, test the comparison and jump to the appropriate label.
11054 The code of EXP is ignored; we always test GT if SWAP is 0,
11055 and LT if SWAP is 1. */
11058 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
11061 rtx if_false_label, if_true_label;
11063 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
11064 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
11065 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11066 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11067 rtx drop_through_label = 0;
11068 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
11071 if (! if_true_label || ! if_false_label)
11072 drop_through_label = gen_label_rtx ();
11073 if (! if_true_label)
11074 if_true_label = drop_through_label;
11075 if (! if_false_label)
11076 if_false_label = drop_through_label;
11078 /* Compare a word at a time, high order first. */
11079 for (i = 0; i < nwords; i++)
11082 rtx op0_word, op1_word;
11084 if (WORDS_BIG_ENDIAN)
11086 op0_word = operand_subword_force (op0, i, mode);
11087 op1_word = operand_subword_force (op1, i, mode);
11091 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11092 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11095 /* All but high-order word must be compared as unsigned. */
11096 comp = compare_from_rtx (op0_word, op1_word,
11097 (unsignedp || i > 0) ? GTU : GT,
11098 unsignedp, word_mode, NULL_RTX, 0);
11099 if (comp == const_true_rtx)
11100 emit_jump (if_true_label);
11101 else if (comp != const0_rtx)
11102 do_jump_for_compare (comp, NULL_RTX, if_true_label);
11104 /* Consider lower words only if these are equal. */
11105 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11107 if (comp == const_true_rtx)
11108 emit_jump (if_false_label);
11109 else if (comp != const0_rtx)
11110 do_jump_for_compare (comp, NULL_RTX, if_false_label);
11113 if (if_false_label)
11114 emit_jump (if_false_label);
11115 if (drop_through_label)
11116 emit_label (drop_through_label);
11119 /* Compare OP0 with OP1, word at a time, in mode MODE.
11120 UNSIGNEDP says to do unsigned comparison.
11121 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
11124 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
11125 enum machine_mode mode;
11128 rtx if_false_label, if_true_label;
11130 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11131 rtx drop_through_label = 0;
11134 if (! if_true_label || ! if_false_label)
11135 drop_through_label = gen_label_rtx ();
11136 if (! if_true_label)
11137 if_true_label = drop_through_label;
11138 if (! if_false_label)
11139 if_false_label = drop_through_label;
11141 /* Compare a word at a time, high order first. */
11142 for (i = 0; i < nwords; i++)
11145 rtx op0_word, op1_word;
11147 if (WORDS_BIG_ENDIAN)
11149 op0_word = operand_subword_force (op0, i, mode);
11150 op1_word = operand_subword_force (op1, i, mode);
11154 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11155 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11158 /* All but high-order word must be compared as unsigned. */
11159 comp = compare_from_rtx (op0_word, op1_word,
11160 (unsignedp || i > 0) ? GTU : GT,
11161 unsignedp, word_mode, NULL_RTX, 0);
11162 if (comp == const_true_rtx)
11163 emit_jump (if_true_label);
11164 else if (comp != const0_rtx)
11165 do_jump_for_compare (comp, NULL_RTX, if_true_label);
11167 /* Consider lower words only if these are equal. */
11168 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11170 if (comp == const_true_rtx)
11171 emit_jump (if_false_label);
11172 else if (comp != const0_rtx)
11173 do_jump_for_compare (comp, NULL_RTX, if_false_label);
11176 if (if_false_label)
11177 emit_jump (if_false_label);
11178 if (drop_through_label)
11179 emit_label (drop_through_label);
11182 /* Given an EQ_EXPR expression EXP for values too wide to be compared
11183 with one insn, test the comparison and jump to the appropriate label. */
11186 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
11188 rtx if_false_label, if_true_label;
11190 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11191 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11192 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11193 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11195 rtx drop_through_label = 0;
11197 if (! if_false_label)
11198 drop_through_label = if_false_label = gen_label_rtx ();
11200 for (i = 0; i < nwords; i++)
11202 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
11203 operand_subword_force (op1, i, mode),
11204 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
11205 word_mode, NULL_RTX, 0);
11206 if (comp == const_true_rtx)
11207 emit_jump (if_false_label);
11208 else if (comp != const0_rtx)
11209 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11213 emit_jump (if_true_label);
11214 if (drop_through_label)
11215 emit_label (drop_through_label);
11218 /* Jump according to whether OP0 is 0.
11219 We assume that OP0 has an integer mode that is too wide
11220 for the available compare insns. */
11223 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
11225 rtx if_false_label, if_true_label;
11227 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11230 rtx drop_through_label = 0;
11232 /* The fastest way of doing this comparison on almost any machine is to
11233 "or" all the words and compare the result. If all have to be loaded
11234 from memory and this is a very wide item, it's possible this may
11235 be slower, but that's highly unlikely. */
11237 part = gen_reg_rtx (word_mode);
11238 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11239 for (i = 1; i < nwords && part != 0; i++)
11240 part = expand_binop (word_mode, ior_optab, part,
11241 operand_subword_force (op0, i, GET_MODE (op0)),
11242 part, 1, OPTAB_WIDEN);
11246 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11249 if (comp == const_true_rtx)
11250 emit_jump (if_false_label);
11251 else if (comp == const0_rtx)
11252 emit_jump (if_true_label);
11254 do_jump_for_compare (comp, if_false_label, if_true_label);
11259 /* If we couldn't do the "or" simply, do this with a series of compares. */
11260 if (! if_false_label)
11261 drop_through_label = if_false_label = gen_label_rtx ();
11263 for (i = 0; i < nwords; i++)
11265 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11267 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11268 if (comp == const_true_rtx)
11269 emit_jump (if_false_label);
11270 else if (comp != const0_rtx)
11271 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11275 emit_jump (if_true_label);
11277 if (drop_through_label)
11278 emit_label (drop_through_label);
11281 /* Given a comparison expression in rtl form, output conditional branches to
11282 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11285 do_jump_for_compare (comparison, if_false_label, if_true_label)
11286 rtx comparison, if_false_label, if_true_label;
11290 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11291 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11296 if (if_false_label)
11297 emit_jump (if_false_label);
11299 else if (if_false_label)
11301 rtx first = get_last_insn (), insn, branch;
11304 /* Output the branch with the opposite condition. Then try to invert
11305 what is generated. If more than one insn is a branch, or if the
11306 branch is not the last insn written, abort. If we can't invert
11307 the branch, emit make a true label, redirect this jump to that,
11308 emit a jump to the false label and define the true label. */
11309 /* ??? Note that we wouldn't have to do any of this nonsense if
11310 we passed both labels into a combined compare-and-branch.
11311 Ah well, jump threading does a good job of repairing the damage. */
11313 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11314 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11319 /* Here we get the first insn that was just emitted. It used to be the
11320 case that, on some machines, emitting the branch would discard
11321 the previous compare insn and emit a replacement. This isn't
11322 done anymore, but abort if we see that FIRST is deleted. */
11325 first = get_insns ();
11326 else if (INSN_DELETED_P (first))
11329 first = NEXT_INSN (first);
11331 /* Look for multiple branches in this sequence, as might be generated
11332 for a multi-word integer comparison. */
11336 for (insn = first; insn ; insn = NEXT_INSN (insn))
11337 if (GET_CODE (insn) == JUMP_INSN)
11343 /* If we've got one branch at the end of the sequence,
11344 we can try to reverse it. */
11346 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11349 insn_label = XEXP (condjump_label (branch), 0);
11350 JUMP_LABEL (branch) = insn_label;
11352 if (insn_label != if_false_label)
11355 if (invert_jump (branch, if_false_label))
11359 /* Multiple branches, or reversion failed. Convert to branches
11360 around an unconditional jump. */
11362 if_true_label = gen_label_rtx ();
11363 for (insn = first; insn; insn = NEXT_INSN (insn))
11364 if (GET_CODE (insn) == JUMP_INSN)
11367 insn_label = XEXP (condjump_label (insn), 0);
11368 JUMP_LABEL (insn) = insn_label;
11370 if (insn_label == if_false_label)
11371 redirect_jump (insn, if_true_label);
11373 emit_jump (if_false_label);
11374 emit_label (if_true_label);
11378 /* Generate code for a comparison expression EXP
11379 (including code to compute the values to be compared)
11380 and set (CC0) according to the result.
11381 SIGNED_CODE should be the rtx operation for this comparison for
11382 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11384 We force a stack adjustment unless there are currently
11385 things pushed on the stack that aren't yet used. */
11388 compare (exp, signed_code, unsigned_code)
11390 enum rtx_code signed_code, unsigned_code;
11392 register rtx op0, op1;
11393 register tree type;
11394 register enum machine_mode mode;
11396 enum rtx_code code;
11398 /* Don't crash if the comparison was erroneous. */
11399 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11400 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
11403 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11404 type = TREE_TYPE (TREE_OPERAND (exp, 0));
11405 mode = TYPE_MODE (type);
11406 unsignedp = TREE_UNSIGNED (type);
11407 code = unsignedp ? unsigned_code : signed_code;
11409 #ifdef HAVE_canonicalize_funcptr_for_compare
11410 /* If function pointers need to be "canonicalized" before they can
11411 be reliably compared, then canonicalize them. */
11412 if (HAVE_canonicalize_funcptr_for_compare
11413 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11414 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11417 rtx new_op0 = gen_reg_rtx (mode);
11419 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11423 if (HAVE_canonicalize_funcptr_for_compare
11424 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11425 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11428 rtx new_op1 = gen_reg_rtx (mode);
11430 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11435 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11437 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11438 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11441 /* Like compare but expects the values to compare as two rtx's.
11442 The decision as to signed or unsigned comparison must be made by the caller.
11444 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11447 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11448 size of MODE should be used. */
11451 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11452 register rtx op0, op1;
11453 enum rtx_code code;
11455 enum machine_mode mode;
11461 /* If one operand is constant, make it the second one. Only do this
11462 if the other operand is not constant as well. */
11464 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11465 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11470 code = swap_condition (code);
11473 if (flag_force_mem)
11475 op0 = force_not_mem (op0);
11476 op1 = force_not_mem (op1);
11479 do_pending_stack_adjust ();
11481 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11482 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11486 /* There's no need to do this now that combine.c can eliminate lots of
11487 sign extensions. This can be less efficient in certain cases on other
11490 /* If this is a signed equality comparison, we can do it as an
11491 unsigned comparison since zero-extension is cheaper than sign
11492 extension and comparisons with zero are done as unsigned. This is
11493 the case even on machines that can do fast sign extension, since
11494 zero-extension is easier to combine with other operations than
11495 sign-extension is. If we are comparing against a constant, we must
11496 convert it to what it would look like unsigned. */
11497 if ((code == EQ || code == NE) && ! unsignedp
11498 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11500 if (GET_CODE (op1) == CONST_INT
11501 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11502 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11507 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11509 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11512 /* Generate code to calculate EXP using a store-flag instruction
11513 and return an rtx for the result. EXP is either a comparison
11514 or a TRUTH_NOT_EXPR whose operand is a comparison.
11516 If TARGET is nonzero, store the result there if convenient.
11518 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11521 Return zero if there is no suitable set-flag instruction
11522 available on this machine.
11524 Once expand_expr has been called on the arguments of the comparison,
11525 we are committed to doing the store flag, since it is not safe to
11526 re-evaluate the expression. We emit the store-flag insn by calling
11527 emit_store_flag, but only expand the arguments if we have a reason
11528 to believe that emit_store_flag will be successful. If we think that
11529 it will, but it isn't, we have to simulate the store-flag with a
11530 set/jump/set sequence. */
11533 do_store_flag (exp, target, mode, only_cheap)
11536 enum machine_mode mode;
11539 enum rtx_code code;
11540 tree arg0, arg1, type;
11542 enum machine_mode operand_mode;
11546 enum insn_code icode;
11547 rtx subtarget = target;
11550 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11551 result at the end. We can't simply invert the test since it would
11552 have already been inverted if it were valid. This case occurs for
11553 some floating-point comparisons. */
11555 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11556 invert = 1, exp = TREE_OPERAND (exp, 0);
11558 arg0 = TREE_OPERAND (exp, 0);
11559 arg1 = TREE_OPERAND (exp, 1);
11560 type = TREE_TYPE (arg0);
11561 operand_mode = TYPE_MODE (type);
11562 unsignedp = TREE_UNSIGNED (type);
11564 /* We won't bother with BLKmode store-flag operations because it would mean
11565 passing a lot of information to emit_store_flag. */
11566 if (operand_mode == BLKmode)
11569 /* We won't bother with store-flag operations involving function pointers
11570 when function pointers must be canonicalized before comparisons. */
11571 #ifdef HAVE_canonicalize_funcptr_for_compare
11572 if (HAVE_canonicalize_funcptr_for_compare
11573 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11574 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11576 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11577 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11578 == FUNCTION_TYPE))))
11585 /* Get the rtx comparison code to use. We know that EXP is a comparison
11586 operation of some type. Some comparisons against 1 and -1 can be
11587 converted to comparisons with zero. Do so here so that the tests
11588 below will be aware that we have a comparison with zero. These
11589 tests will not catch constants in the first operand, but constants
11590 are rarely passed as the first operand. */
11592 switch (TREE_CODE (exp))
11601 if (integer_onep (arg1))
11602 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11604 code = unsignedp ? LTU : LT;
11607 if (! unsignedp && integer_all_onesp (arg1))
11608 arg1 = integer_zero_node, code = LT;
11610 code = unsignedp ? LEU : LE;
11613 if (! unsignedp && integer_all_onesp (arg1))
11614 arg1 = integer_zero_node, code = GE;
11616 code = unsignedp ? GTU : GT;
11619 if (integer_onep (arg1))
11620 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11622 code = unsignedp ? GEU : GE;
11628 /* Put a constant second. */
11629 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11631 tem = arg0; arg0 = arg1; arg1 = tem;
11632 code = swap_condition (code);
11635 /* If this is an equality or inequality test of a single bit, we can
11636 do this by shifting the bit being tested to the low-order bit and
11637 masking the result with the constant 1. If the condition was EQ,
11638 we xor it with 1. This does not require an scc insn and is faster
11639 than an scc insn even if we have it. */
11641 if ((code == NE || code == EQ)
11642 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11643 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11645 tree inner = TREE_OPERAND (arg0, 0);
11646 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11649 /* If INNER is a right shift of a constant and it plus BITNUM does
11650 not overflow, adjust BITNUM and INNER. */
11652 if (TREE_CODE (inner) == RSHIFT_EXPR
11653 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11654 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11655 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11656 < TYPE_PRECISION (type)))
11658 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11659 inner = TREE_OPERAND (inner, 0);
11662 /* If we are going to be able to omit the AND below, we must do our
11663 operations as unsigned. If we must use the AND, we have a choice.
11664 Normally unsigned is faster, but for some machines signed is. */
11665 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11666 #ifdef LOAD_EXTEND_OP
11667 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11673 if (subtarget == 0 || GET_CODE (subtarget) != REG
11674 || GET_MODE (subtarget) != operand_mode
11675 || ! safe_from_p (subtarget, inner, 1))
11678 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11681 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11682 size_int (bitnum), subtarget, ops_unsignedp);
11684 if (GET_MODE (op0) != mode)
11685 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11687 if ((code == EQ && ! invert) || (code == NE && invert))
11688 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11689 ops_unsignedp, OPTAB_LIB_WIDEN);
11691 /* Put the AND last so it can combine with more things. */
11692 if (bitnum != TYPE_PRECISION (type) - 1)
11693 op0 = expand_and (op0, const1_rtx, subtarget);
11698 /* Now see if we are likely to be able to do this. Return if not. */
11699 if (! can_compare_p (operand_mode))
11701 icode = setcc_gen_code[(int) code];
11702 if (icode == CODE_FOR_nothing
11703 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11705 /* We can only do this if it is one of the special cases that
11706 can be handled without an scc insn. */
11707 if ((code == LT && integer_zerop (arg1))
11708 || (! only_cheap && code == GE && integer_zerop (arg1)))
11710 else if (BRANCH_COST >= 0
11711 && ! only_cheap && (code == NE || code == EQ)
11712 && TREE_CODE (type) != REAL_TYPE
11713 && ((abs_optab->handlers[(int) operand_mode].insn_code
11714 != CODE_FOR_nothing)
11715 || (ffs_optab->handlers[(int) operand_mode].insn_code
11716 != CODE_FOR_nothing)))
11722 preexpand_calls (exp);
11723 if (subtarget == 0 || GET_CODE (subtarget) != REG
11724 || GET_MODE (subtarget) != operand_mode
11725 || ! safe_from_p (subtarget, arg1, 1))
11728 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11729 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11732 target = gen_reg_rtx (mode);
11734 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11735 because, if the emit_store_flag does anything it will succeed and
11736 OP0 and OP1 will not be used subsequently. */
11738 result = emit_store_flag (target, code,
11739 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11740 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11741 operand_mode, unsignedp, 1);
11746 result = expand_binop (mode, xor_optab, result, const1_rtx,
11747 result, 0, OPTAB_LIB_WIDEN);
11751 /* If this failed, we have to do this with set/compare/jump/set code. */
11752 if (GET_CODE (target) != REG
11753 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11754 target = gen_reg_rtx (GET_MODE (target));
11756 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11757 result = compare_from_rtx (op0, op1, code, unsignedp,
11758 operand_mode, NULL_RTX, 0);
11759 if (GET_CODE (result) == CONST_INT)
11760 return (((result == const0_rtx && ! invert)
11761 || (result != const0_rtx && invert))
11762 ? const0_rtx : const1_rtx);
11764 label = gen_label_rtx ();
11765 if (bcc_gen_fctn[(int) code] == 0)
11768 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11769 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11770 emit_label (label);
11775 /* Generate a tablejump instruction (used for switch statements). */
11777 #ifdef HAVE_tablejump
11779 /* INDEX is the value being switched on, with the lowest value
11780 in the table already subtracted.
11781 MODE is its expected mode (needed if INDEX is constant).
11782 RANGE is the length of the jump table.
11783 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11785 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11786 index value is out of range. */
11789 do_tablejump (index, mode, range, table_label, default_label)
11790 rtx index, range, table_label, default_label;
11791 enum machine_mode mode;
11793 register rtx temp, vector;
11795 /* Do an unsigned comparison (in the proper mode) between the index
11796 expression and the value which represents the length of the range.
11797 Since we just finished subtracting the lower bound of the range
11798 from the index expression, this comparison allows us to simultaneously
11799 check that the original index expression value is both greater than
11800 or equal to the minimum value of the range and less than or equal to
11801 the maximum value of the range. */
11803 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11806 /* If index is in range, it must fit in Pmode.
11807 Convert to Pmode so we can index with it. */
11809 index = convert_to_mode (Pmode, index, 1);
11811 /* Don't let a MEM slip thru, because then INDEX that comes
11812 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11813 and break_out_memory_refs will go to work on it and mess it up. */
11814 #ifdef PIC_CASE_VECTOR_ADDRESS
11815 if (flag_pic && GET_CODE (index) != REG)
11816 index = copy_to_mode_reg (Pmode, index);
11819 /* If flag_force_addr were to affect this address
11820 it could interfere with the tricky assumptions made
11821 about addresses that contain label-refs,
11822 which may be valid only very near the tablejump itself. */
11823 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11824 GET_MODE_SIZE, because this indicates how large insns are. The other
11825 uses should all be Pmode, because they are addresses. This code
11826 could fail if addresses and insns are not the same size. */
11827 index = gen_rtx_PLUS (Pmode,
11828 gen_rtx_MULT (Pmode, index,
11829 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11830 gen_rtx_LABEL_REF (Pmode, table_label));
11831 #ifdef PIC_CASE_VECTOR_ADDRESS
11833 index = PIC_CASE_VECTOR_ADDRESS (index);
11836 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11837 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11838 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11839 RTX_UNCHANGING_P (vector) = 1;
11840 convert_move (temp, vector, 0);
11842 emit_jump_insn (gen_tablejump (temp, table_label));
11844 /* If we are generating PIC code or if the table is PC-relative, the
11845 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11846 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11850 #endif /* HAVE_tablejump */