1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
38 #include "diagnostic.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
45 #include "insn-attr.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
52 #include "optabs-tree.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-ssa-live.h"
58 #include "tree-outof-ssa.h"
59 #include "tree-ssa-address.h"
61 #include "tree-chkp.h"
64 #include "rtx-vector-builder.h"
67 /* If this is nonzero, we do not bother generating VOLATILE
68 around volatile memory references, and we are willing to
69 output indirect addresses. If cse is to follow, we reject
70 indirect addresses so a useful potential cse is generated;
71 if it is used only once, instruction combination will produce
72 the same indirect address eventually. */
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
77 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
78 unsigned HOST_WIDE_INT);
79 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
80 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
81 static rtx_insn *compress_float_constant (rtx, rtx);
82 static rtx get_subtarget (rtx);
83 static void store_constructor (tree, rtx, int, poly_int64, bool);
84 static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64,
85 machine_mode, tree, alias_set_type, bool, bool);
87 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
89 static int is_aligning_offset (const_tree, const_tree);
90 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
91 static rtx do_store_flag (sepops, rtx, machine_mode);
93 static void emit_single_push_insn (machine_mode, rtx, tree);
95 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx,
97 static rtx const_vector_from_tree (tree);
98 static rtx const_scalar_mask_from_tree (scalar_int_mode, tree);
99 static tree tree_expr_size (const_tree);
100 static HOST_WIDE_INT int_expr_size (tree);
101 static void convert_mode_scalar (rtx, rtx, int);
104 /* This is run to set up which modes can be used
105 directly in memory and to initialize the block move optab. It is run
106 at the beginning of compilation and when the target is reinitialized. */
109 init_expr_target (void)
116 /* Try indexing by frame ptr and try by stack ptr.
117 It is known that on the Convex the stack ptr isn't a valid index.
118 With luck, one or the other is valid on any machine. */
119 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
120 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
122 /* A scratch register we can modify in-place below to avoid
123 useless RTL allocations. */
124 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
126 rtx_insn *insn = as_a<rtx_insn *> (rtx_alloc (INSN));
127 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
128 PATTERN (insn) = pat;
130 for (machine_mode mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
131 mode = (machine_mode) ((int) mode + 1))
135 direct_load[(int) mode] = direct_store[(int) mode] = 0;
136 PUT_MODE (mem, mode);
137 PUT_MODE (mem1, mode);
139 /* See if there is some register that can be used in this mode and
140 directly loaded or stored from memory. */
142 if (mode != VOIDmode && mode != BLKmode)
143 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
144 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
147 if (!targetm.hard_regno_mode_ok (regno, mode))
150 set_mode_and_regno (reg, mode, regno);
153 SET_DEST (pat) = reg;
154 if (recog (pat, insn, &num_clobbers) >= 0)
155 direct_load[(int) mode] = 1;
157 SET_SRC (pat) = mem1;
158 SET_DEST (pat) = reg;
159 if (recog (pat, insn, &num_clobbers) >= 0)
160 direct_load[(int) mode] = 1;
163 SET_DEST (pat) = mem;
164 if (recog (pat, insn, &num_clobbers) >= 0)
165 direct_store[(int) mode] = 1;
168 SET_DEST (pat) = mem1;
169 if (recog (pat, insn, &num_clobbers) >= 0)
170 direct_store[(int) mode] = 1;
174 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
176 opt_scalar_float_mode mode_iter;
177 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_FLOAT)
179 scalar_float_mode mode = mode_iter.require ();
180 scalar_float_mode srcmode;
181 FOR_EACH_MODE_UNTIL (srcmode, mode)
185 ic = can_extend_p (mode, srcmode, 0);
186 if (ic == CODE_FOR_nothing)
189 PUT_MODE (mem, srcmode);
191 if (insn_operand_matches (ic, 1, mem))
192 float_extend_from_mem[mode][srcmode] = true;
197 /* This is run at the start of compiling a function. */
202 memset (&crtl->expr, 0, sizeof (crtl->expr));
205 /* Copy data from FROM to TO, where the machine modes are not the same.
206 Both modes may be integer, or both may be floating, or both may be
208 UNSIGNEDP should be nonzero if FROM is an unsigned type.
209 This causes zero-extension instead of sign-extension. */
212 convert_move (rtx to, rtx from, int unsignedp)
214 machine_mode to_mode = GET_MODE (to);
215 machine_mode from_mode = GET_MODE (from);
217 gcc_assert (to_mode != BLKmode);
218 gcc_assert (from_mode != BLKmode);
220 /* If the source and destination are already the same, then there's
225 /* If FROM is a SUBREG that indicates that we have already done at least
226 the required extension, strip it. We don't handle such SUBREGs as
229 scalar_int_mode to_int_mode;
230 if (GET_CODE (from) == SUBREG
231 && SUBREG_PROMOTED_VAR_P (from)
232 && is_a <scalar_int_mode> (to_mode, &to_int_mode)
233 && (GET_MODE_PRECISION (subreg_promoted_mode (from))
234 >= GET_MODE_PRECISION (to_int_mode))
235 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
236 from = gen_lowpart (to_int_mode, from), from_mode = to_int_mode;
238 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
240 if (to_mode == from_mode
241 || (from_mode == VOIDmode && CONSTANT_P (from)))
243 emit_move_insn (to, from);
247 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
249 gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode),
250 GET_MODE_BITSIZE (to_mode)));
252 if (VECTOR_MODE_P (to_mode))
253 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
255 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
257 emit_move_insn (to, from);
261 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
263 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
264 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
268 convert_mode_scalar (to, from, unsignedp);
271 /* Like convert_move, but deals only with scalar modes. */
274 convert_mode_scalar (rtx to, rtx from, int unsignedp)
276 /* Both modes should be scalar types. */
277 scalar_mode from_mode = as_a <scalar_mode> (GET_MODE (from));
278 scalar_mode to_mode = as_a <scalar_mode> (GET_MODE (to));
279 bool to_real = SCALAR_FLOAT_MODE_P (to_mode);
280 bool from_real = SCALAR_FLOAT_MODE_P (from_mode);
284 gcc_assert (to_real == from_real);
286 /* rtx code for making an equivalent value. */
287 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
288 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
296 gcc_assert ((GET_MODE_PRECISION (from_mode)
297 != GET_MODE_PRECISION (to_mode))
298 || (DECIMAL_FLOAT_MODE_P (from_mode)
299 != DECIMAL_FLOAT_MODE_P (to_mode)));
301 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
302 /* Conversion between decimal float and binary float, same size. */
303 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
304 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
309 /* Try converting directly if the insn is supported. */
311 code = convert_optab_handler (tab, to_mode, from_mode);
312 if (code != CODE_FOR_nothing)
314 emit_unop_insn (code, to, from,
315 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
319 /* Otherwise use a libcall. */
320 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
322 /* Is this conversion implemented yet? */
323 gcc_assert (libcall);
326 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
328 insns = get_insns ();
330 emit_libcall_block (insns, to, value,
331 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
333 : gen_rtx_FLOAT_EXTEND (to_mode, from));
337 /* Handle pointer conversion. */ /* SPEE 900220. */
338 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
342 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
349 if (convert_optab_handler (ctab, to_mode, from_mode)
352 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
358 /* Targets are expected to provide conversion insns between PxImode and
359 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
360 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
362 scalar_int_mode full_mode
363 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode));
365 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
366 != CODE_FOR_nothing);
368 if (full_mode != from_mode)
369 from = convert_to_mode (full_mode, from, unsignedp);
370 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
374 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
377 scalar_int_mode full_mode
378 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode));
379 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
380 enum insn_code icode;
382 icode = convert_optab_handler (ctab, full_mode, from_mode);
383 gcc_assert (icode != CODE_FOR_nothing);
385 if (to_mode == full_mode)
387 emit_unop_insn (icode, to, from, UNKNOWN);
391 new_from = gen_reg_rtx (full_mode);
392 emit_unop_insn (icode, new_from, from, UNKNOWN);
394 /* else proceed to integer conversions below. */
395 from_mode = full_mode;
399 /* Make sure both are fixed-point modes or both are not. */
400 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
401 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
402 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
404 /* If we widen from_mode to to_mode and they are in the same class,
405 we won't saturate the result.
406 Otherwise, always saturate the result to play safe. */
407 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
408 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
409 expand_fixed_convert (to, from, 0, 0);
411 expand_fixed_convert (to, from, 0, 1);
415 /* Now both modes are integers. */
417 /* Handle expanding beyond a word. */
418 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
419 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
426 scalar_mode lowpart_mode;
427 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
429 /* Try converting directly if the insn is supported. */
430 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
433 /* If FROM is a SUBREG, put it into a register. Do this
434 so that we always generate the same set of insns for
435 better cse'ing; if an intermediate assignment occurred,
436 we won't be doing the operation directly on the SUBREG. */
437 if (optimize > 0 && GET_CODE (from) == SUBREG)
438 from = force_reg (from_mode, from);
439 emit_unop_insn (code, to, from, equiv_code);
442 /* Next, try converting via full word. */
443 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
444 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
445 != CODE_FOR_nothing))
447 rtx word_to = gen_reg_rtx (word_mode);
450 if (reg_overlap_mentioned_p (to, from))
451 from = force_reg (from_mode, from);
454 convert_move (word_to, from, unsignedp);
455 emit_unop_insn (code, to, word_to, equiv_code);
459 /* No special multiword conversion insn; do it by hand. */
462 /* Since we will turn this into a no conflict block, we must ensure
463 the source does not overlap the target so force it into an isolated
464 register when maybe so. Likewise for any MEM input, since the
465 conversion sequence might require several references to it and we
466 must ensure we're getting the same value every time. */
468 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
469 from = force_reg (from_mode, from);
471 /* Get a copy of FROM widened to a word, if necessary. */
472 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
473 lowpart_mode = word_mode;
475 lowpart_mode = from_mode;
477 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
479 lowpart = gen_lowpart (lowpart_mode, to);
480 emit_move_insn (lowpart, lowfrom);
482 /* Compute the value to put in each remaining word. */
484 fill_value = const0_rtx;
486 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
487 LT, lowfrom, const0_rtx,
488 lowpart_mode, 0, -1);
490 /* Fill the remaining words. */
491 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
493 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
494 rtx subword = operand_subword (to, index, 1, to_mode);
496 gcc_assert (subword);
498 if (fill_value != subword)
499 emit_move_insn (subword, fill_value);
502 insns = get_insns ();
509 /* Truncating multi-word to a word or less. */
510 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
511 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
514 && ! MEM_VOLATILE_P (from)
515 && direct_load[(int) to_mode]
516 && ! mode_dependent_address_p (XEXP (from, 0),
517 MEM_ADDR_SPACE (from)))
519 || GET_CODE (from) == SUBREG))
520 from = force_reg (from_mode, from);
521 convert_move (to, gen_lowpart (word_mode, from), 0);
525 /* Now follow all the conversions between integers
526 no more than a word long. */
528 /* For truncation, usually we can just refer to FROM in a narrower mode. */
529 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
530 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
533 && ! MEM_VOLATILE_P (from)
534 && direct_load[(int) to_mode]
535 && ! mode_dependent_address_p (XEXP (from, 0),
536 MEM_ADDR_SPACE (from)))
538 || GET_CODE (from) == SUBREG))
539 from = force_reg (from_mode, from);
540 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
541 && !targetm.hard_regno_mode_ok (REGNO (from), to_mode))
542 from = copy_to_reg (from);
543 emit_move_insn (to, gen_lowpart (to_mode, from));
547 /* Handle extension. */
548 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
550 /* Convert directly if that works. */
551 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
554 emit_unop_insn (code, to, from, equiv_code);
559 scalar_mode intermediate;
563 /* Search for a mode to convert via. */
564 opt_scalar_mode intermediate_iter;
565 FOR_EACH_MODE_FROM (intermediate_iter, from_mode)
567 scalar_mode intermediate = intermediate_iter.require ();
568 if (((can_extend_p (to_mode, intermediate, unsignedp)
570 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
571 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode,
573 && (can_extend_p (intermediate, from_mode, unsignedp)
574 != CODE_FOR_nothing))
576 convert_move (to, convert_to_mode (intermediate, from,
577 unsignedp), unsignedp);
582 /* No suitable intermediate mode.
583 Generate what we need with shifts. */
584 shift_amount = (GET_MODE_PRECISION (to_mode)
585 - GET_MODE_PRECISION (from_mode));
586 from = gen_lowpart (to_mode, force_reg (from_mode, from));
587 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
589 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
592 emit_move_insn (to, tmp);
597 /* Support special truncate insns for certain modes. */
598 if (convert_optab_handler (trunc_optab, to_mode,
599 from_mode) != CODE_FOR_nothing)
601 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
606 /* Handle truncation of volatile memrefs, and so on;
607 the things that couldn't be truncated directly,
608 and for which there was no special instruction.
610 ??? Code above formerly short-circuited this, for most integer
611 mode pairs, with a force_reg in from_mode followed by a recursive
612 call to this routine. Appears always to have been wrong. */
613 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
615 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
616 emit_move_insn (to, temp);
620 /* Mode combination is not recognized. */
624 /* Return an rtx for a value that would result
625 from converting X to mode MODE.
626 Both X and MODE may be floating, or both integer.
627 UNSIGNEDP is nonzero if X is an unsigned value.
628 This can be done by referring to a part of X in place
629 or by copying to a new temporary with conversion. */
632 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
634 return convert_modes (mode, VOIDmode, x, unsignedp);
637 /* Return an rtx for a value that would result
638 from converting X from mode OLDMODE to mode MODE.
639 Both modes may be floating, or both integer.
640 UNSIGNEDP is nonzero if X is an unsigned value.
642 This can be done by referring to a part of X in place
643 or by copying to a new temporary with conversion.
645 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
648 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
651 scalar_int_mode int_mode;
653 /* If FROM is a SUBREG that indicates that we have already done at least
654 the required extension, strip it. */
656 if (GET_CODE (x) == SUBREG
657 && SUBREG_PROMOTED_VAR_P (x)
658 && is_a <scalar_int_mode> (mode, &int_mode)
659 && (GET_MODE_PRECISION (subreg_promoted_mode (x))
660 >= GET_MODE_PRECISION (int_mode))
661 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
662 x = gen_lowpart (int_mode, SUBREG_REG (x));
664 if (GET_MODE (x) != VOIDmode)
665 oldmode = GET_MODE (x);
670 if (CONST_SCALAR_INT_P (x)
671 && is_int_mode (mode, &int_mode))
673 /* If the caller did not tell us the old mode, then there is not
674 much to do with respect to canonicalization. We have to
675 assume that all the bits are significant. */
676 if (GET_MODE_CLASS (oldmode) != MODE_INT)
677 oldmode = MAX_MODE_INT;
678 wide_int w = wide_int::from (rtx_mode_t (x, oldmode),
679 GET_MODE_PRECISION (int_mode),
680 unsignedp ? UNSIGNED : SIGNED);
681 return immed_wide_int_const (w, int_mode);
684 /* We can do this with a gen_lowpart if both desired and current modes
685 are integer, and this is either a constant integer, a register, or a
687 scalar_int_mode int_oldmode;
688 if (is_int_mode (mode, &int_mode)
689 && is_int_mode (oldmode, &int_oldmode)
690 && GET_MODE_PRECISION (int_mode) <= GET_MODE_PRECISION (int_oldmode)
691 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) int_mode])
692 || CONST_POLY_INT_P (x)
694 && (!HARD_REGISTER_P (x)
695 || targetm.hard_regno_mode_ok (REGNO (x), int_mode))
696 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, GET_MODE (x)))))
697 return gen_lowpart (int_mode, x);
699 /* Converting from integer constant into mode is always equivalent to an
701 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
703 gcc_assert (known_eq (GET_MODE_BITSIZE (mode),
704 GET_MODE_BITSIZE (oldmode)));
705 return simplify_gen_subreg (mode, x, oldmode, 0);
708 temp = gen_reg_rtx (mode);
709 convert_move (temp, x, unsignedp);
713 /* Return the largest alignment we can use for doing a move (or store)
714 of MAX_PIECES. ALIGN is the largest alignment we could use. */
717 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
719 scalar_int_mode tmode
720 = int_mode_for_size (max_pieces * BITS_PER_UNIT, 1).require ();
722 if (align >= GET_MODE_ALIGNMENT (tmode))
723 align = GET_MODE_ALIGNMENT (tmode);
726 scalar_int_mode xmode = NARROWEST_INT_MODE;
727 opt_scalar_int_mode mode_iter;
728 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
730 tmode = mode_iter.require ();
731 if (GET_MODE_SIZE (tmode) > max_pieces
732 || targetm.slow_unaligned_access (tmode, align))
737 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
743 /* Return the widest integer mode that is narrower than SIZE bytes. */
745 static scalar_int_mode
746 widest_int_mode_for_size (unsigned int size)
748 scalar_int_mode result = NARROWEST_INT_MODE;
750 gcc_checking_assert (size > 1);
752 opt_scalar_int_mode tmode;
753 FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
754 if (GET_MODE_SIZE (tmode.require ()) < size)
755 result = tmode.require ();
760 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
761 and should be performed piecewise. */
764 can_do_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align,
765 enum by_pieces_operation op)
767 return targetm.use_by_pieces_infrastructure_p (len, align, op,
768 optimize_insn_for_speed_p ());
771 /* Determine whether the LEN bytes can be moved by using several move
772 instructions. Return nonzero if a call to move_by_pieces should
776 can_move_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align)
778 return can_do_by_pieces (len, align, MOVE_BY_PIECES);
781 /* Return number of insns required to perform operation OP by pieces
782 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
784 unsigned HOST_WIDE_INT
785 by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
786 unsigned int max_size, by_pieces_operation op)
788 unsigned HOST_WIDE_INT n_insns = 0;
790 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
792 while (max_size > 1 && l > 0)
794 scalar_int_mode mode = widest_int_mode_for_size (max_size);
795 enum insn_code icode;
797 unsigned int modesize = GET_MODE_SIZE (mode);
799 icode = optab_handler (mov_optab, mode);
800 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
802 unsigned HOST_WIDE_INT n_pieces = l / modesize;
810 case COMPARE_BY_PIECES:
811 int batch = targetm.compare_by_pieces_branch_ratio (mode);
812 int batch_ops = 4 * batch - 1;
813 unsigned HOST_WIDE_INT full = n_pieces / batch;
814 n_insns += full * batch_ops;
815 if (n_pieces % batch != 0)
828 /* Used when performing piecewise block operations, holds information
829 about one of the memory objects involved. The member functions
830 can be used to generate code for loading from the object and
831 updating the address when iterating. */
835 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
838 /* The address of the object. Can differ from that seen in the
839 MEM rtx if we copied the address to a register. */
841 /* Nonzero if the address on the object has an autoincrement already,
842 signifies whether that was an increment or decrement. */
843 signed char m_addr_inc;
844 /* Nonzero if we intend to use autoinc without the address already
845 having autoinc form. We will insert add insns around each memory
846 reference, expecting later passes to form autoinc addressing modes.
847 The only supported options are predecrement and postincrement. */
848 signed char m_explicit_inc;
849 /* True if we have either of the two possible cases of using
852 /* True if this is an address to be used for load operations rather
856 /* Optionally, a function to obtain constants for any given offset into
857 the objects, and data associated with it. */
858 by_pieces_constfn m_constfn;
861 pieces_addr (rtx, bool, by_pieces_constfn, void *);
862 rtx adjust (scalar_int_mode, HOST_WIDE_INT);
863 void increment_address (HOST_WIDE_INT);
864 void maybe_predec (HOST_WIDE_INT);
865 void maybe_postinc (HOST_WIDE_INT);
866 void decide_autoinc (machine_mode, bool, HOST_WIDE_INT);
873 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
874 true if the operation to be performed on this object is a load
875 rather than a store. For stores, OBJ can be NULL, in which case we
876 assume the operation is a stack push. For loads, the optional
877 CONSTFN and its associated CFNDATA can be used in place of the
880 pieces_addr::pieces_addr (rtx obj, bool is_load, by_pieces_constfn constfn,
882 : m_obj (obj), m_is_load (is_load), m_constfn (constfn), m_cfndata (cfndata)
888 rtx addr = XEXP (obj, 0);
889 rtx_code code = GET_CODE (addr);
891 bool dec = code == PRE_DEC || code == POST_DEC;
892 bool inc = code == PRE_INC || code == POST_INC;
895 m_addr_inc = dec ? -1 : 1;
897 /* While we have always looked for these codes here, the code
898 implementing the memory operation has never handled them.
899 Support could be added later if necessary or beneficial. */
900 gcc_assert (code != PRE_INC && code != POST_DEC);
908 if (STACK_GROWS_DOWNWARD)
914 gcc_assert (constfn != NULL);
918 gcc_assert (is_load);
921 /* Decide whether to use autoinc for an address involved in a memory op.
922 MODE is the mode of the accesses, REVERSE is true if we've decided to
923 perform the operation starting from the end, and LEN is the length of
924 the operation. Don't override an earlier decision to set m_auto. */
927 pieces_addr::decide_autoinc (machine_mode ARG_UNUSED (mode), bool reverse,
930 if (m_auto || m_obj == NULL_RTX)
933 bool use_predec = (m_is_load
934 ? USE_LOAD_PRE_DECREMENT (mode)
935 : USE_STORE_PRE_DECREMENT (mode));
936 bool use_postinc = (m_is_load
937 ? USE_LOAD_POST_INCREMENT (mode)
938 : USE_STORE_POST_INCREMENT (mode));
939 machine_mode addr_mode = get_address_mode (m_obj);
941 if (use_predec && reverse)
943 m_addr = copy_to_mode_reg (addr_mode,
944 plus_constant (addr_mode,
949 else if (use_postinc && !reverse)
951 m_addr = copy_to_mode_reg (addr_mode, m_addr);
955 else if (CONSTANT_P (m_addr))
956 m_addr = copy_to_mode_reg (addr_mode, m_addr);
959 /* Adjust the address to refer to the data at OFFSET in MODE. If we
960 are using autoincrement for this address, we don't add the offset,
961 but we still modify the MEM's properties. */
964 pieces_addr::adjust (scalar_int_mode mode, HOST_WIDE_INT offset)
967 return m_constfn (m_cfndata, offset, mode);
968 if (m_obj == NULL_RTX)
971 return adjust_automodify_address (m_obj, mode, m_addr, offset);
973 return adjust_address (m_obj, mode, offset);
976 /* Emit an add instruction to increment the address by SIZE. */
979 pieces_addr::increment_address (HOST_WIDE_INT size)
981 rtx amount = gen_int_mode (size, GET_MODE (m_addr));
982 emit_insn (gen_add2_insn (m_addr, amount));
985 /* If we are supposed to decrement the address after each access, emit code
986 to do so now. Increment by SIZE (which has should have the correct sign
990 pieces_addr::maybe_predec (HOST_WIDE_INT size)
992 if (m_explicit_inc >= 0)
994 gcc_assert (HAVE_PRE_DECREMENT);
995 increment_address (size);
998 /* If we are supposed to decrement the address after each access, emit code
999 to do so now. Increment by SIZE. */
1002 pieces_addr::maybe_postinc (HOST_WIDE_INT size)
1004 if (m_explicit_inc <= 0)
1006 gcc_assert (HAVE_POST_INCREMENT);
1007 increment_address (size);
1010 /* This structure is used by do_op_by_pieces to describe the operation
1013 class op_by_pieces_d
1016 pieces_addr m_to, m_from;
1017 unsigned HOST_WIDE_INT m_len;
1018 HOST_WIDE_INT m_offset;
1019 unsigned int m_align;
1020 unsigned int m_max_size;
1023 /* Virtual functions, overriden by derived classes for the specific
1025 virtual void generate (rtx, rtx, machine_mode) = 0;
1026 virtual bool prepare_mode (machine_mode, unsigned int) = 0;
1027 virtual void finish_mode (machine_mode)
1032 op_by_pieces_d (rtx, bool, rtx, bool, by_pieces_constfn, void *,
1033 unsigned HOST_WIDE_INT, unsigned int);
1037 /* The constructor for an op_by_pieces_d structure. We require two
1038 objects named TO and FROM, which are identified as loads or stores
1039 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1040 and its associated FROM_CFN_DATA can be used to replace loads with
1041 constant values. LEN describes the length of the operation. */
1043 op_by_pieces_d::op_by_pieces_d (rtx to, bool to_load,
1044 rtx from, bool from_load,
1045 by_pieces_constfn from_cfn,
1046 void *from_cfn_data,
1047 unsigned HOST_WIDE_INT len,
1049 : m_to (to, to_load, NULL, NULL),
1050 m_from (from, from_load, from_cfn, from_cfn_data),
1051 m_len (len), m_max_size (MOVE_MAX_PIECES + 1)
1053 int toi = m_to.get_addr_inc ();
1054 int fromi = m_from.get_addr_inc ();
1055 if (toi >= 0 && fromi >= 0)
1057 else if (toi <= 0 && fromi <= 0)
1062 m_offset = m_reverse ? len : 0;
1063 align = MIN (to ? MEM_ALIGN (to) : align,
1064 from ? MEM_ALIGN (from) : align);
1066 /* If copying requires more than two move insns,
1067 copy addresses to registers (to make displacements shorter)
1068 and use post-increment if available. */
1069 if (by_pieces_ninsns (len, align, m_max_size, MOVE_BY_PIECES) > 2)
1071 /* Find the mode of the largest comparison. */
1072 scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1074 m_from.decide_autoinc (mode, m_reverse, len);
1075 m_to.decide_autoinc (mode, m_reverse, len);
1078 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1082 /* This function contains the main loop used for expanding a block
1083 operation. First move what we can in the largest integer mode,
1084 then go to successively smaller modes. For every access, call
1085 GENFUN with the two operands and the EXTRA_DATA. */
1088 op_by_pieces_d::run ()
1090 while (m_max_size > 1 && m_len > 0)
1092 scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1094 if (prepare_mode (mode, m_align))
1096 unsigned int size = GET_MODE_SIZE (mode);
1097 rtx to1 = NULL_RTX, from1;
1099 while (m_len >= size)
1104 to1 = m_to.adjust (mode, m_offset);
1105 from1 = m_from.adjust (mode, m_offset);
1107 m_to.maybe_predec (-(HOST_WIDE_INT)size);
1108 m_from.maybe_predec (-(HOST_WIDE_INT)size);
1110 generate (to1, from1, mode);
1112 m_to.maybe_postinc (size);
1113 m_from.maybe_postinc (size);
1124 m_max_size = GET_MODE_SIZE (mode);
1127 /* The code above should have handled everything. */
1128 gcc_assert (!m_len);
1131 /* Derived class from op_by_pieces_d, providing support for block move
1134 class move_by_pieces_d : public op_by_pieces_d
1136 insn_gen_fn m_gen_fun;
1137 void generate (rtx, rtx, machine_mode);
1138 bool prepare_mode (machine_mode, unsigned int);
1141 move_by_pieces_d (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1143 : op_by_pieces_d (to, false, from, true, NULL, NULL, len, align)
1146 rtx finish_endp (int);
1149 /* Return true if MODE can be used for a set of copies, given an
1150 alignment ALIGN. Prepare whatever data is necessary for later
1151 calls to generate. */
1154 move_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1156 insn_code icode = optab_handler (mov_optab, mode);
1157 m_gen_fun = GEN_FCN (icode);
1158 return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1161 /* A callback used when iterating for a compare_by_pieces_operation.
1162 OP0 and OP1 are the values that have been loaded and should be
1163 compared in MODE. If OP0 is NULL, this means we should generate a
1164 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1165 gen function that should be used to generate the mode. */
1168 move_by_pieces_d::generate (rtx op0, rtx op1,
1169 machine_mode mode ATTRIBUTE_UNUSED)
1171 #ifdef PUSH_ROUNDING
1172 if (op0 == NULL_RTX)
1174 emit_single_push_insn (mode, op1, NULL);
1178 emit_insn (m_gen_fun (op0, op1));
1181 /* Perform the final adjustment at the end of a string to obtain the
1182 correct return value for the block operation. If ENDP is 1 return
1183 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1184 end minus one byte ala stpcpy. */
1187 move_by_pieces_d::finish_endp (int endp)
1189 gcc_assert (!m_reverse);
1192 m_to.maybe_postinc (-1);
1195 return m_to.adjust (QImode, m_offset);
1198 /* Generate several move instructions to copy LEN bytes from block FROM to
1199 block TO. (These are MEM rtx's with BLKmode).
1201 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1202 used to push FROM to the stack.
1204 ALIGN is maximum stack alignment we can assume.
1206 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1207 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1211 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1212 unsigned int align, int endp)
1214 #ifndef PUSH_ROUNDING
1219 move_by_pieces_d data (to, from, len, align);
1224 return data.finish_endp (endp);
1229 /* Derived class from op_by_pieces_d, providing support for block move
1232 class store_by_pieces_d : public op_by_pieces_d
1234 insn_gen_fn m_gen_fun;
1235 void generate (rtx, rtx, machine_mode);
1236 bool prepare_mode (machine_mode, unsigned int);
1239 store_by_pieces_d (rtx to, by_pieces_constfn cfn, void *cfn_data,
1240 unsigned HOST_WIDE_INT len, unsigned int align)
1241 : op_by_pieces_d (to, false, NULL_RTX, true, cfn, cfn_data, len, align)
1244 rtx finish_endp (int);
1247 /* Return true if MODE can be used for a set of stores, given an
1248 alignment ALIGN. Prepare whatever data is necessary for later
1249 calls to generate. */
1252 store_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1254 insn_code icode = optab_handler (mov_optab, mode);
1255 m_gen_fun = GEN_FCN (icode);
1256 return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1259 /* A callback used when iterating for a store_by_pieces_operation.
1260 OP0 and OP1 are the values that have been loaded and should be
1261 compared in MODE. If OP0 is NULL, this means we should generate a
1262 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1263 gen function that should be used to generate the mode. */
1266 store_by_pieces_d::generate (rtx op0, rtx op1, machine_mode)
1268 emit_insn (m_gen_fun (op0, op1));
1271 /* Perform the final adjustment at the end of a string to obtain the
1272 correct return value for the block operation. If ENDP is 1 return
1273 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1274 end minus one byte ala stpcpy. */
1277 store_by_pieces_d::finish_endp (int endp)
1279 gcc_assert (!m_reverse);
1282 m_to.maybe_postinc (-1);
1285 return m_to.adjust (QImode, m_offset);
1288 /* Determine whether the LEN bytes generated by CONSTFUN can be
1289 stored to memory using several move instructions. CONSTFUNDATA is
1290 a pointer which will be passed as argument in every CONSTFUN call.
1291 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1292 a memset operation and false if it's a copy of a constant string.
1293 Return nonzero if a call to store_by_pieces should succeed. */
1296 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1297 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1298 void *constfundata, unsigned int align, bool memsetp)
1300 unsigned HOST_WIDE_INT l;
1301 unsigned int max_size;
1302 HOST_WIDE_INT offset = 0;
1303 enum insn_code icode;
1305 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1306 rtx cst ATTRIBUTE_UNUSED;
1311 if (!targetm.use_by_pieces_infrastructure_p (len, align,
1315 optimize_insn_for_speed_p ()))
1318 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
1320 /* We would first store what we can in the largest integer mode, then go to
1321 successively smaller modes. */
1324 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
1328 max_size = STORE_MAX_PIECES + 1;
1329 while (max_size > 1 && l > 0)
1331 scalar_int_mode mode = widest_int_mode_for_size (max_size);
1333 icode = optab_handler (mov_optab, mode);
1334 if (icode != CODE_FOR_nothing
1335 && align >= GET_MODE_ALIGNMENT (mode))
1337 unsigned int size = GET_MODE_SIZE (mode);
1344 cst = (*constfun) (constfundata, offset, mode);
1345 if (!targetm.legitimate_constant_p (mode, cst))
1355 max_size = GET_MODE_SIZE (mode);
1358 /* The code above should have handled everything. */
1365 /* Generate several move instructions to store LEN bytes generated by
1366 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1367 pointer which will be passed as argument in every CONSTFUN call.
1368 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1369 a memset operation and false if it's a copy of a constant string.
1370 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1371 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1375 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
1376 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1377 void *constfundata, unsigned int align, bool memsetp, int endp)
1381 gcc_assert (endp != 2);
1385 gcc_assert (targetm.use_by_pieces_infrastructure_p
1387 memsetp ? SET_BY_PIECES : STORE_BY_PIECES,
1388 optimize_insn_for_speed_p ()));
1390 store_by_pieces_d data (to, constfun, constfundata, len, align);
1394 return data.finish_endp (endp);
1399 /* Callback routine for clear_by_pieces.
1400 Return const0_rtx unconditionally. */
1403 clear_by_pieces_1 (void *, HOST_WIDE_INT, scalar_int_mode)
1408 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1409 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1412 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
1417 store_by_pieces_d data (to, clear_by_pieces_1, NULL, len, align);
1421 /* Context used by compare_by_pieces_genfn. It stores the fail label
1422 to jump to in case of miscomparison, and for branch ratios greater than 1,
1423 it stores an accumulator and the current and maximum counts before
1424 emitting another branch. */
1426 class compare_by_pieces_d : public op_by_pieces_d
1428 rtx_code_label *m_fail_label;
1430 int m_count, m_batch;
1432 void generate (rtx, rtx, machine_mode);
1433 bool prepare_mode (machine_mode, unsigned int);
1434 void finish_mode (machine_mode);
1436 compare_by_pieces_d (rtx op0, rtx op1, by_pieces_constfn op1_cfn,
1437 void *op1_cfn_data, HOST_WIDE_INT len, int align,
1438 rtx_code_label *fail_label)
1439 : op_by_pieces_d (op0, true, op1, true, op1_cfn, op1_cfn_data, len, align)
1441 m_fail_label = fail_label;
1445 /* A callback used when iterating for a compare_by_pieces_operation.
1446 OP0 and OP1 are the values that have been loaded and should be
1447 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1448 context structure. */
1451 compare_by_pieces_d::generate (rtx op0, rtx op1, machine_mode mode)
1455 rtx temp = expand_binop (mode, sub_optab, op0, op1, NULL_RTX,
1456 true, OPTAB_LIB_WIDEN);
1458 temp = expand_binop (mode, ior_optab, m_accumulator, temp, temp,
1459 true, OPTAB_LIB_WIDEN);
1460 m_accumulator = temp;
1462 if (++m_count < m_batch)
1466 op0 = m_accumulator;
1468 m_accumulator = NULL_RTX;
1470 do_compare_rtx_and_jump (op0, op1, NE, true, mode, NULL_RTX, NULL,
1471 m_fail_label, profile_probability::uninitialized ());
1474 /* Return true if MODE can be used for a set of moves and comparisons,
1475 given an alignment ALIGN. Prepare whatever data is necessary for
1476 later calls to generate. */
1479 compare_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1481 insn_code icode = optab_handler (mov_optab, mode);
1482 if (icode == CODE_FOR_nothing
1483 || align < GET_MODE_ALIGNMENT (mode)
1484 || !can_compare_p (EQ, mode, ccp_jump))
1486 m_batch = targetm.compare_by_pieces_branch_ratio (mode);
1489 m_accumulator = NULL_RTX;
1494 /* Called after expanding a series of comparisons in MODE. If we have
1495 accumulated results for which we haven't emitted a branch yet, do
1499 compare_by_pieces_d::finish_mode (machine_mode mode)
1501 if (m_accumulator != NULL_RTX)
1502 do_compare_rtx_and_jump (m_accumulator, const0_rtx, NE, true, mode,
1503 NULL_RTX, NULL, m_fail_label,
1504 profile_probability::uninitialized ());
1507 /* Generate several move instructions to compare LEN bytes from blocks
1508 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1510 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1511 used to push FROM to the stack.
1513 ALIGN is maximum stack alignment we can assume.
1515 Optionally, the caller can pass a constfn and associated data in A1_CFN
1516 and A1_CFN_DATA. describing that the second operand being compared is a
1517 known constant and how to obtain its data. */
1520 compare_by_pieces (rtx arg0, rtx arg1, unsigned HOST_WIDE_INT len,
1521 rtx target, unsigned int align,
1522 by_pieces_constfn a1_cfn, void *a1_cfn_data)
1524 rtx_code_label *fail_label = gen_label_rtx ();
1525 rtx_code_label *end_label = gen_label_rtx ();
1527 if (target == NULL_RTX
1528 || !REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
1529 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
1531 compare_by_pieces_d data (arg0, arg1, a1_cfn, a1_cfn_data, len, align,
1536 emit_move_insn (target, const0_rtx);
1537 emit_jump (end_label);
1539 emit_label (fail_label);
1540 emit_move_insn (target, const1_rtx);
1541 emit_label (end_label);
1546 /* Emit code to move a block Y to a block X. This may be done with
1547 string-move instructions, with multiple scalar move instructions,
1548 or with a library call.
1550 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1551 SIZE is an rtx that says how long they are.
1552 ALIGN is the maximum alignment we can assume they have.
1553 METHOD describes what kind of copy this is, and what mechanisms may be used.
1554 MIN_SIZE is the minimal size of block to move
1555 MAX_SIZE is the maximal size of block to move, if it can not be represented
1556 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1558 Return the address of the new block, if memcpy is called and returns it,
1562 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1563 unsigned int expected_align, HOST_WIDE_INT expected_size,
1564 unsigned HOST_WIDE_INT min_size,
1565 unsigned HOST_WIDE_INT max_size,
1566 unsigned HOST_WIDE_INT probable_max_size)
1573 if (CONST_INT_P (size) && INTVAL (size) == 0)
1578 case BLOCK_OP_NORMAL:
1579 case BLOCK_OP_TAILCALL:
1583 case BLOCK_OP_CALL_PARM:
1584 may_use_call = block_move_libcall_safe_for_call_parm ();
1586 /* Make inhibit_defer_pop nonzero around the library call
1587 to force it to pop the arguments right away. */
1591 case BLOCK_OP_NO_LIBCALL:
1595 case BLOCK_OP_NO_LIBCALL_RET:
1603 gcc_assert (MEM_P (x) && MEM_P (y));
1604 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1605 gcc_assert (align >= BITS_PER_UNIT);
1607 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1608 block copy is more efficient for other large modes, e.g. DCmode. */
1609 x = adjust_address (x, BLKmode, 0);
1610 y = adjust_address (y, BLKmode, 0);
1612 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1613 can be incorrect is coming from __builtin_memcpy. */
1614 if (CONST_INT_P (size))
1616 x = shallow_copy_rtx (x);
1617 y = shallow_copy_rtx (y);
1618 set_mem_size (x, INTVAL (size));
1619 set_mem_size (y, INTVAL (size));
1622 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1623 move_by_pieces (x, y, INTVAL (size), align, 0);
1624 else if (emit_block_move_via_movmem (x, y, size, align,
1625 expected_align, expected_size,
1626 min_size, max_size, probable_max_size))
1628 else if (may_use_call
1629 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1630 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1632 if (may_use_call < 0)
1635 retval = emit_block_copy_via_libcall (x, y, size,
1636 method == BLOCK_OP_TAILCALL);
1640 emit_block_move_via_loop (x, y, size, align);
1642 if (method == BLOCK_OP_CALL_PARM)
1649 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1651 unsigned HOST_WIDE_INT max, min = 0;
1652 if (GET_CODE (size) == CONST_INT)
1653 min = max = UINTVAL (size);
1655 max = GET_MODE_MASK (GET_MODE (size));
1656 return emit_block_move_hints (x, y, size, method, 0, -1,
1660 /* A subroutine of emit_block_move. Returns true if calling the
1661 block move libcall will not clobber any parameters which may have
1662 already been placed on the stack. */
1665 block_move_libcall_safe_for_call_parm (void)
1667 #if defined (REG_PARM_STACK_SPACE)
1671 /* If arguments are pushed on the stack, then they're safe. */
1675 /* If registers go on the stack anyway, any argument is sure to clobber
1676 an outgoing argument. */
1677 #if defined (REG_PARM_STACK_SPACE)
1678 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1679 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1680 depend on its argument. */
1682 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1683 && REG_PARM_STACK_SPACE (fn) != 0)
1687 /* If any argument goes in memory, then it might clobber an outgoing
1690 CUMULATIVE_ARGS args_so_far_v;
1691 cumulative_args_t args_so_far;
1694 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1695 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1696 args_so_far = pack_cumulative_args (&args_so_far_v);
1698 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1699 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1701 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1702 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1704 if (!tmp || !REG_P (tmp))
1706 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1708 targetm.calls.function_arg_advance (args_so_far, mode,
1715 /* A subroutine of emit_block_move. Expand a movmem pattern;
1716 return true if successful. */
1719 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1720 unsigned int expected_align, HOST_WIDE_INT expected_size,
1721 unsigned HOST_WIDE_INT min_size,
1722 unsigned HOST_WIDE_INT max_size,
1723 unsigned HOST_WIDE_INT probable_max_size)
1725 int save_volatile_ok = volatile_ok;
1727 if (expected_align < align)
1728 expected_align = align;
1729 if (expected_size != -1)
1731 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1732 expected_size = probable_max_size;
1733 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1734 expected_size = min_size;
1737 /* Since this is a move insn, we don't care about volatility. */
1740 /* Try the most limited insn first, because there's no point
1741 including more than one in the machine description unless
1742 the more limited one has some advantage. */
1744 opt_scalar_int_mode mode_iter;
1745 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
1747 scalar_int_mode mode = mode_iter.require ();
1748 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1750 if (code != CODE_FOR_nothing
1751 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1752 here because if SIZE is less than the mode mask, as it is
1753 returned by the macro, it will definitely be less than the
1754 actual mode mask. Since SIZE is within the Pmode address
1755 space, we limit MODE to Pmode. */
1756 && ((CONST_INT_P (size)
1757 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1758 <= (GET_MODE_MASK (mode) >> 1)))
1759 || max_size <= (GET_MODE_MASK (mode) >> 1)
1760 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1762 struct expand_operand ops[9];
1765 /* ??? When called via emit_block_move_for_call, it'd be
1766 nice if there were some way to inform the backend, so
1767 that it doesn't fail the expansion because it thinks
1768 emitting the libcall would be more efficient. */
1769 nops = insn_data[(int) code].n_generator_args;
1770 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1772 create_fixed_operand (&ops[0], x);
1773 create_fixed_operand (&ops[1], y);
1774 /* The check above guarantees that this size conversion is valid. */
1775 create_convert_operand_to (&ops[2], size, mode, true);
1776 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1779 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1780 create_integer_operand (&ops[5], expected_size);
1784 create_integer_operand (&ops[6], min_size);
1785 /* If we can not represent the maximal size,
1786 make parameter NULL. */
1787 if ((HOST_WIDE_INT) max_size != -1)
1788 create_integer_operand (&ops[7], max_size);
1790 create_fixed_operand (&ops[7], NULL);
1794 /* If we can not represent the maximal size,
1795 make parameter NULL. */
1796 if ((HOST_WIDE_INT) probable_max_size != -1)
1797 create_integer_operand (&ops[8], probable_max_size);
1799 create_fixed_operand (&ops[8], NULL);
1801 if (maybe_expand_insn (code, nops, ops))
1803 volatile_ok = save_volatile_ok;
1809 volatile_ok = save_volatile_ok;
1813 /* A subroutine of emit_block_move. Copy the data via an explicit
1814 loop. This is used only when libcalls are forbidden. */
1815 /* ??? It'd be nice to copy in hunks larger than QImode. */
1818 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1819 unsigned int align ATTRIBUTE_UNUSED)
1821 rtx_code_label *cmp_label, *top_label;
1822 rtx iter, x_addr, y_addr, tmp;
1823 machine_mode x_addr_mode = get_address_mode (x);
1824 machine_mode y_addr_mode = get_address_mode (y);
1825 machine_mode iter_mode;
1827 iter_mode = GET_MODE (size);
1828 if (iter_mode == VOIDmode)
1829 iter_mode = word_mode;
1831 top_label = gen_label_rtx ();
1832 cmp_label = gen_label_rtx ();
1833 iter = gen_reg_rtx (iter_mode);
1835 emit_move_insn (iter, const0_rtx);
1837 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1838 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1839 do_pending_stack_adjust ();
1841 emit_jump (cmp_label);
1842 emit_label (top_label);
1844 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1845 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1847 if (x_addr_mode != y_addr_mode)
1848 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1849 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1851 x = change_address (x, QImode, x_addr);
1852 y = change_address (y, QImode, y_addr);
1854 emit_move_insn (x, y);
1856 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1857 true, OPTAB_LIB_WIDEN);
1859 emit_move_insn (iter, tmp);
1861 emit_label (cmp_label);
1863 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1865 profile_probability::guessed_always ()
1866 .apply_scale (9, 10));
1869 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1870 TAILCALL is true if this is a tail call. */
1873 emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src,
1874 rtx size, bool tailcall)
1876 rtx dst_addr, src_addr;
1877 tree call_expr, dst_tree, src_tree, size_tree;
1878 machine_mode size_mode;
1880 /* Since dst and src are passed to a libcall, mark the corresponding
1881 tree EXPR as addressable. */
1882 tree dst_expr = MEM_EXPR (dst);
1883 tree src_expr = MEM_EXPR (src);
1885 mark_addressable (dst_expr);
1887 mark_addressable (src_expr);
1889 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1890 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1891 dst_tree = make_tree (ptr_type_node, dst_addr);
1893 src_addr = copy_addr_to_reg (XEXP (src, 0));
1894 src_addr = convert_memory_address (ptr_mode, src_addr);
1895 src_tree = make_tree (ptr_type_node, src_addr);
1897 size_mode = TYPE_MODE (sizetype);
1898 size = convert_to_mode (size_mode, size, 1);
1899 size = copy_to_mode_reg (size_mode, size);
1900 size_tree = make_tree (sizetype, size);
1902 /* It is incorrect to use the libcall calling conventions for calls to
1903 memcpy/memmove/memcmp because they can be provided by the user. */
1904 tree fn = builtin_decl_implicit (fncode);
1905 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1906 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1908 return expand_call (call_expr, NULL_RTX, false);
1911 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1912 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1913 otherwise return null. */
1916 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
1917 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
1918 HOST_WIDE_INT align)
1920 machine_mode insn_mode = insn_data[icode].operand[0].mode;
1922 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
1925 struct expand_operand ops[5];
1926 create_output_operand (&ops[0], target, insn_mode);
1927 create_fixed_operand (&ops[1], arg1_rtx);
1928 create_fixed_operand (&ops[2], arg2_rtx);
1929 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
1930 TYPE_UNSIGNED (arg3_type));
1931 create_integer_operand (&ops[4], align);
1932 if (maybe_expand_insn (icode, 5, ops))
1933 return ops[0].value;
1937 /* Expand a block compare between X and Y with length LEN using the
1938 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1939 of the expression that was used to calculate the length. ALIGN
1940 gives the known minimum common alignment. */
1943 emit_block_cmp_via_cmpmem (rtx x, rtx y, rtx len, tree len_type, rtx target,
1946 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1947 implementing memcmp because it will stop if it encounters two
1949 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
1951 if (icode == CODE_FOR_nothing)
1954 return expand_cmpstrn_or_cmpmem (icode, target, x, y, len_type, len, align);
1957 /* Emit code to compare a block Y to a block X. This may be done with
1958 string-compare instructions, with multiple scalar instructions,
1959 or with a library call.
1961 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1962 they are. LEN_TYPE is the type of the expression that was used to
1965 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1966 value of a normal memcmp call, instead we can just compare for equality.
1967 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1970 Optionally, the caller can pass a constfn and associated data in Y_CFN
1971 and Y_CFN_DATA. describing that the second operand being compared is a
1972 known constant and how to obtain its data.
1973 Return the result of the comparison, or NULL_RTX if we failed to
1974 perform the operation. */
1977 emit_block_cmp_hints (rtx x, rtx y, rtx len, tree len_type, rtx target,
1978 bool equality_only, by_pieces_constfn y_cfn,
1983 if (CONST_INT_P (len) && INTVAL (len) == 0)
1986 gcc_assert (MEM_P (x) && MEM_P (y));
1987 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1988 gcc_assert (align >= BITS_PER_UNIT);
1990 x = adjust_address (x, BLKmode, 0);
1991 y = adjust_address (y, BLKmode, 0);
1994 && CONST_INT_P (len)
1995 && can_do_by_pieces (INTVAL (len), align, COMPARE_BY_PIECES))
1996 result = compare_by_pieces (x, y, INTVAL (len), target, align,
1999 result = emit_block_cmp_via_cmpmem (x, y, len, len_type, target, align);
2004 /* Copy all or part of a value X into registers starting at REGNO.
2005 The number of registers to be filled is NREGS. */
2008 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
2013 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
2014 x = validize_mem (force_const_mem (mode, x));
2016 /* See if the machine can do this with a load multiple insn. */
2017 if (targetm.have_load_multiple ())
2019 rtx_insn *last = get_last_insn ();
2020 rtx first = gen_rtx_REG (word_mode, regno);
2021 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
2028 delete_insns_since (last);
2031 for (int i = 0; i < nregs; i++)
2032 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2033 operand_subword_force (x, i, mode));
2036 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2037 The number of registers to be filled is NREGS. */
2040 move_block_from_reg (int regno, rtx x, int nregs)
2045 /* See if the machine can do this with a store multiple insn. */
2046 if (targetm.have_store_multiple ())
2048 rtx_insn *last = get_last_insn ();
2049 rtx first = gen_rtx_REG (word_mode, regno);
2050 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
2057 delete_insns_since (last);
2060 for (int i = 0; i < nregs; i++)
2062 rtx tem = operand_subword (x, i, 1, BLKmode);
2066 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2070 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2071 ORIG, where ORIG is a non-consecutive group of registers represented by
2072 a PARALLEL. The clone is identical to the original except in that the
2073 original set of registers is replaced by a new set of pseudo registers.
2074 The new set has the same modes as the original set. */
2077 gen_group_rtx (rtx orig)
2082 gcc_assert (GET_CODE (orig) == PARALLEL);
2084 length = XVECLEN (orig, 0);
2085 tmps = XALLOCAVEC (rtx, length);
2087 /* Skip a NULL entry in first slot. */
2088 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2093 for (; i < length; i++)
2095 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2096 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2098 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2101 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2104 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2105 except that values are placed in TMPS[i], and must later be moved
2106 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2109 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type,
2114 machine_mode m = GET_MODE (orig_src);
2116 gcc_assert (GET_CODE (dst) == PARALLEL);
2119 && !SCALAR_INT_MODE_P (m)
2120 && !MEM_P (orig_src)
2121 && GET_CODE (orig_src) != CONCAT)
2123 scalar_int_mode imode;
2124 if (int_mode_for_mode (GET_MODE (orig_src)).exists (&imode))
2126 src = gen_reg_rtx (imode);
2127 emit_move_insn (gen_lowpart (GET_MODE (orig_src), src), orig_src);
2131 src = assign_stack_temp (GET_MODE (orig_src), ssize);
2132 emit_move_insn (src, orig_src);
2134 emit_group_load_1 (tmps, dst, src, type, ssize);
2138 /* Check for a NULL entry, used to indicate that the parameter goes
2139 both on the stack and in registers. */
2140 if (XEXP (XVECEXP (dst, 0, 0), 0))
2145 /* Process the pieces. */
2146 for (i = start; i < XVECLEN (dst, 0); i++)
2148 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2149 poly_int64 bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2150 poly_int64 bytelen = GET_MODE_SIZE (mode);
2151 poly_int64 shift = 0;
2153 /* Handle trailing fragments that run over the size of the struct.
2154 It's the target's responsibility to make sure that the fragment
2155 cannot be strictly smaller in some cases and strictly larger
2157 gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2158 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2160 /* Arrange to shift the fragment to where it belongs.
2161 extract_bit_field loads to the lsb of the reg. */
2163 #ifdef BLOCK_REG_PADDING
2164 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2165 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2170 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2171 bytelen = ssize - bytepos;
2172 gcc_assert (maybe_gt (bytelen, 0));
2175 /* If we won't be loading directly from memory, protect the real source
2176 from strange tricks we might play; but make sure that the source can
2177 be loaded directly into the destination. */
2179 if (!MEM_P (orig_src)
2180 && (!CONSTANT_P (orig_src)
2181 || (GET_MODE (orig_src) != mode
2182 && GET_MODE (orig_src) != VOIDmode)))
2184 if (GET_MODE (orig_src) == VOIDmode)
2185 src = gen_reg_rtx (mode);
2187 src = gen_reg_rtx (GET_MODE (orig_src));
2189 emit_move_insn (src, orig_src);
2192 /* Optimize the access just a bit. */
2194 && (! targetm.slow_unaligned_access (mode, MEM_ALIGN (src))
2195 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2196 && multiple_p (bytepos * BITS_PER_UNIT, GET_MODE_ALIGNMENT (mode))
2197 && known_eq (bytelen, GET_MODE_SIZE (mode)))
2199 tmps[i] = gen_reg_rtx (mode);
2200 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2202 else if (COMPLEX_MODE_P (mode)
2203 && GET_MODE (src) == mode
2204 && known_eq (bytelen, GET_MODE_SIZE (mode)))
2205 /* Let emit_move_complex do the bulk of the work. */
2207 else if (GET_CODE (src) == CONCAT)
2209 poly_int64 slen = GET_MODE_SIZE (GET_MODE (src));
2210 poly_int64 slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2214 if (can_div_trunc_p (bytepos, slen0, &elt, &subpos)
2215 && known_le (subpos + bytelen, slen0))
2217 /* The following assumes that the concatenated objects all
2218 have the same size. In this case, a simple calculation
2219 can be used to determine the object and the bit field
2221 tmps[i] = XEXP (src, elt);
2222 if (maybe_ne (subpos, 0)
2223 || maybe_ne (subpos + bytelen, slen0)
2224 || (!CONSTANT_P (tmps[i])
2225 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)))
2226 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2227 subpos * BITS_PER_UNIT,
2228 1, NULL_RTX, mode, mode, false,
2235 gcc_assert (known_eq (bytepos, 0));
2236 mem = assign_stack_temp (GET_MODE (src), slen);
2237 emit_move_insn (mem, src);
2238 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
2239 0, 1, NULL_RTX, mode, mode, false,
2243 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2244 SIMD register, which is currently broken. While we get GCC
2245 to emit proper RTL for these cases, let's dump to memory. */
2246 else if (VECTOR_MODE_P (GET_MODE (dst))
2249 poly_uint64 slen = GET_MODE_SIZE (GET_MODE (src));
2252 mem = assign_stack_temp (GET_MODE (src), slen);
2253 emit_move_insn (mem, src);
2254 tmps[i] = adjust_address (mem, mode, bytepos);
2256 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
2257 && XVECLEN (dst, 0) > 1)
2258 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
2259 else if (CONSTANT_P (src))
2261 if (known_eq (bytelen, ssize))
2267 /* TODO: const_wide_int can have sizes other than this... */
2268 gcc_assert (known_eq (2 * bytelen, ssize));
2269 split_double (src, &first, &second);
2276 else if (REG_P (src) && GET_MODE (src) == mode)
2279 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2280 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2281 mode, mode, false, NULL);
2283 if (maybe_ne (shift, 0))
2284 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
2289 /* Emit code to move a block SRC of type TYPE to a block DST,
2290 where DST is non-consecutive registers represented by a PARALLEL.
2291 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2295 emit_group_load (rtx dst, rtx src, tree type, poly_int64 ssize)
2300 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
2301 emit_group_load_1 (tmps, dst, src, type, ssize);
2303 /* Copy the extracted pieces into the proper (probable) hard regs. */
2304 for (i = 0; i < XVECLEN (dst, 0); i++)
2306 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
2309 emit_move_insn (d, tmps[i]);
2313 /* Similar, but load SRC into new pseudos in a format that looks like
2314 PARALLEL. This can later be fed to emit_group_move to get things
2315 in the right place. */
2318 emit_group_load_into_temps (rtx parallel, rtx src, tree type, poly_int64 ssize)
2323 vec = rtvec_alloc (XVECLEN (parallel, 0));
2324 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
2326 /* Convert the vector to look just like the original PARALLEL, except
2327 with the computed values. */
2328 for (i = 0; i < XVECLEN (parallel, 0); i++)
2330 rtx e = XVECEXP (parallel, 0, i);
2331 rtx d = XEXP (e, 0);
2335 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
2336 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
2338 RTVEC_ELT (vec, i) = e;
2341 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
2344 /* Emit code to move a block SRC to block DST, where SRC and DST are
2345 non-consecutive groups of registers, each represented by a PARALLEL. */
2348 emit_group_move (rtx dst, rtx src)
2352 gcc_assert (GET_CODE (src) == PARALLEL
2353 && GET_CODE (dst) == PARALLEL
2354 && XVECLEN (src, 0) == XVECLEN (dst, 0));
2356 /* Skip first entry if NULL. */
2357 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2358 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2359 XEXP (XVECEXP (src, 0, i), 0));
2362 /* Move a group of registers represented by a PARALLEL into pseudos. */
2365 emit_group_move_into_temps (rtx src)
2367 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
2370 for (i = 0; i < XVECLEN (src, 0); i++)
2372 rtx e = XVECEXP (src, 0, i);
2373 rtx d = XEXP (e, 0);
2376 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
2377 RTVEC_ELT (vec, i) = e;
2380 return gen_rtx_PARALLEL (GET_MODE (src), vec);
2383 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2384 where SRC is non-consecutive registers represented by a PARALLEL.
2385 SSIZE represents the total size of block ORIG_DST, or -1 if not
2389 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED,
2393 int start, finish, i;
2394 machine_mode m = GET_MODE (orig_dst);
2396 gcc_assert (GET_CODE (src) == PARALLEL);
2398 if (!SCALAR_INT_MODE_P (m)
2399 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
2401 scalar_int_mode imode;
2402 if (int_mode_for_mode (GET_MODE (orig_dst)).exists (&imode))
2404 dst = gen_reg_rtx (imode);
2405 emit_group_store (dst, src, type, ssize);
2406 dst = gen_lowpart (GET_MODE (orig_dst), dst);
2410 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
2411 emit_group_store (dst, src, type, ssize);
2413 emit_move_insn (orig_dst, dst);
2417 /* Check for a NULL entry, used to indicate that the parameter goes
2418 both on the stack and in registers. */
2419 if (XEXP (XVECEXP (src, 0, 0), 0))
2423 finish = XVECLEN (src, 0);
2425 tmps = XALLOCAVEC (rtx, finish);
2427 /* Copy the (probable) hard regs into pseudos. */
2428 for (i = start; i < finish; i++)
2430 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2431 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
2433 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2434 emit_move_insn (tmps[i], reg);
2440 /* If we won't be storing directly into memory, protect the real destination
2441 from strange tricks we might play. */
2443 if (GET_CODE (dst) == PARALLEL)
2447 /* We can get a PARALLEL dst if there is a conditional expression in
2448 a return statement. In that case, the dst and src are the same,
2449 so no action is necessary. */
2450 if (rtx_equal_p (dst, src))
2453 /* It is unclear if we can ever reach here, but we may as well handle
2454 it. Allocate a temporary, and split this into a store/load to/from
2456 temp = assign_stack_temp (GET_MODE (dst), ssize);
2457 emit_group_store (temp, src, type, ssize);
2458 emit_group_load (dst, temp, type, ssize);
2461 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2463 machine_mode outer = GET_MODE (dst);
2469 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2470 dst = gen_reg_rtx (outer);
2472 /* Make life a bit easier for combine. */
2473 /* If the first element of the vector is the low part
2474 of the destination mode, use a paradoxical subreg to
2475 initialize the destination. */
2478 inner = GET_MODE (tmps[start]);
2479 bytepos = subreg_lowpart_offset (inner, outer);
2480 if (known_eq (INTVAL (XEXP (XVECEXP (src, 0, start), 1)), bytepos))
2482 temp = simplify_gen_subreg (outer, tmps[start],
2486 emit_move_insn (dst, temp);
2493 /* If the first element wasn't the low part, try the last. */
2495 && start < finish - 1)
2497 inner = GET_MODE (tmps[finish - 1]);
2498 bytepos = subreg_lowpart_offset (inner, outer);
2499 if (known_eq (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)),
2502 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2506 emit_move_insn (dst, temp);
2513 /* Otherwise, simply initialize the result to zero. */
2515 emit_move_insn (dst, CONST0_RTX (outer));
2518 /* Process the pieces. */
2519 for (i = start; i < finish; i++)
2521 poly_int64 bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2522 machine_mode mode = GET_MODE (tmps[i]);
2523 poly_int64 bytelen = GET_MODE_SIZE (mode);
2524 poly_uint64 adj_bytelen;
2527 /* Handle trailing fragments that run over the size of the struct.
2528 It's the target's responsibility to make sure that the fragment
2529 cannot be strictly smaller in some cases and strictly larger
2531 gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2532 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2533 adj_bytelen = ssize - bytepos;
2535 adj_bytelen = bytelen;
2537 if (GET_CODE (dst) == CONCAT)
2539 if (known_le (bytepos + adj_bytelen,
2540 GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2541 dest = XEXP (dst, 0);
2542 else if (known_ge (bytepos, GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2544 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2545 dest = XEXP (dst, 1);
2549 machine_mode dest_mode = GET_MODE (dest);
2550 machine_mode tmp_mode = GET_MODE (tmps[i]);
2552 gcc_assert (known_eq (bytepos, 0) && XVECLEN (src, 0));
2554 if (GET_MODE_ALIGNMENT (dest_mode)
2555 >= GET_MODE_ALIGNMENT (tmp_mode))
2557 dest = assign_stack_temp (dest_mode,
2558 GET_MODE_SIZE (dest_mode));
2559 emit_move_insn (adjust_address (dest,
2567 dest = assign_stack_temp (tmp_mode,
2568 GET_MODE_SIZE (tmp_mode));
2569 emit_move_insn (dest, tmps[i]);
2570 dst = adjust_address (dest, dest_mode, bytepos);
2576 /* Handle trailing fragments that run over the size of the struct. */
2577 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2579 /* store_bit_field always takes its value from the lsb.
2580 Move the fragment to the lsb if it's not already there. */
2582 #ifdef BLOCK_REG_PADDING
2583 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2584 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2590 poly_int64 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2591 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2595 /* Make sure not to write past the end of the struct. */
2596 store_bit_field (dest,
2597 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2598 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2599 VOIDmode, tmps[i], false);
2602 /* Optimize the access just a bit. */
2603 else if (MEM_P (dest)
2604 && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (dest))
2605 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2606 && multiple_p (bytepos * BITS_PER_UNIT,
2607 GET_MODE_ALIGNMENT (mode))
2608 && known_eq (bytelen, GET_MODE_SIZE (mode)))
2609 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2612 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2613 0, 0, mode, tmps[i], false);
2616 /* Copy from the pseudo into the (probable) hard reg. */
2617 if (orig_dst != dst)
2618 emit_move_insn (orig_dst, dst);
2621 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2622 of the value stored in X. */
2625 maybe_emit_group_store (rtx x, tree type)
2627 machine_mode mode = TYPE_MODE (type);
2628 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2629 if (GET_CODE (x) == PARALLEL)
2631 rtx result = gen_reg_rtx (mode);
2632 emit_group_store (result, x, type, int_size_in_bytes (type));
2638 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2640 This is used on targets that return BLKmode values in registers. */
2643 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2645 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2646 rtx src = NULL, dst = NULL;
2647 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2648 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2649 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2650 fixed_size_mode mode = as_a <fixed_size_mode> (GET_MODE (srcreg));
2651 fixed_size_mode tmode = as_a <fixed_size_mode> (GET_MODE (target));
2652 fixed_size_mode copy_mode;
2654 /* BLKmode registers created in the back-end shouldn't have survived. */
2655 gcc_assert (mode != BLKmode);
2657 /* If the structure doesn't take up a whole number of words, see whether
2658 SRCREG is padded on the left or on the right. If it's on the left,
2659 set PADDING_CORRECTION to the number of bits to skip.
2661 In most ABIs, the structure will be returned at the least end of
2662 the register, which translates to right padding on little-endian
2663 targets and left padding on big-endian targets. The opposite
2664 holds if the structure is returned at the most significant
2665 end of the register. */
2666 if (bytes % UNITS_PER_WORD != 0
2667 && (targetm.calls.return_in_msb (type)
2669 : BYTES_BIG_ENDIAN))
2671 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2673 /* We can use a single move if we have an exact mode for the size. */
2674 else if (MEM_P (target)
2675 && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (target))
2676 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2677 && bytes == GET_MODE_SIZE (mode))
2679 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2683 /* And if we additionally have the same mode for a register. */
2684 else if (REG_P (target)
2685 && GET_MODE (target) == mode
2686 && bytes == GET_MODE_SIZE (mode))
2688 emit_move_insn (target, srcreg);
2692 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2693 into a new pseudo which is a full word. */
2694 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2696 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2700 /* Copy the structure BITSIZE bits at a time. If the target lives in
2701 memory, take care of not reading/writing past its end by selecting
2702 a copy mode suited to BITSIZE. This should always be possible given
2705 If the target lives in register, make sure not to select a copy mode
2706 larger than the mode of the register.
2708 We could probably emit more efficient code for machines which do not use
2709 strict alignment, but it doesn't seem worth the effort at the current
2712 copy_mode = word_mode;
2715 opt_scalar_int_mode mem_mode = int_mode_for_size (bitsize, 1);
2716 if (mem_mode.exists ())
2717 copy_mode = mem_mode.require ();
2719 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2722 for (bitpos = 0, xbitpos = padding_correction;
2723 bitpos < bytes * BITS_PER_UNIT;
2724 bitpos += bitsize, xbitpos += bitsize)
2726 /* We need a new source operand each time xbitpos is on a
2727 word boundary and when xbitpos == padding_correction
2728 (the first time through). */
2729 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2730 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2732 /* We need a new destination operand each time bitpos is on
2734 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2736 else if (bitpos % BITS_PER_WORD == 0)
2737 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2739 /* Use xbitpos for the source extraction (right justified) and
2740 bitpos for the destination store (left justified). */
2741 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2742 extract_bit_field (src, bitsize,
2743 xbitpos % BITS_PER_WORD, 1,
2744 NULL_RTX, copy_mode, copy_mode,
2750 /* Copy BLKmode value SRC into a register of mode MODE_IN. Return the
2751 register if it contains any data, otherwise return null.
2753 This is used on targets that return BLKmode values in registers. */
2756 copy_blkmode_to_reg (machine_mode mode_in, tree src)
2759 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2760 unsigned int bitsize;
2761 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2762 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2763 fixed_size_mode mode = as_a <fixed_size_mode> (mode_in);
2764 fixed_size_mode dst_mode;
2766 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2768 x = expand_normal (src);
2770 bytes = arg_int_size_in_bytes (TREE_TYPE (src));
2774 /* If the structure doesn't take up a whole number of words, see
2775 whether the register value should be padded on the left or on
2776 the right. Set PADDING_CORRECTION to the number of padding
2777 bits needed on the left side.
2779 In most ABIs, the structure will be returned at the least end of
2780 the register, which translates to right padding on little-endian
2781 targets and left padding on big-endian targets. The opposite
2782 holds if the structure is returned at the most significant
2783 end of the register. */
2784 if (bytes % UNITS_PER_WORD != 0
2785 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2787 : BYTES_BIG_ENDIAN))
2788 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2791 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2792 dst_words = XALLOCAVEC (rtx, n_regs);
2793 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2795 /* Copy the structure BITSIZE bits at a time. */
2796 for (bitpos = 0, xbitpos = padding_correction;
2797 bitpos < bytes * BITS_PER_UNIT;
2798 bitpos += bitsize, xbitpos += bitsize)
2800 /* We need a new destination pseudo each time xbitpos is
2801 on a word boundary and when xbitpos == padding_correction
2802 (the first time through). */
2803 if (xbitpos % BITS_PER_WORD == 0
2804 || xbitpos == padding_correction)
2806 /* Generate an appropriate register. */
2807 dst_word = gen_reg_rtx (word_mode);
2808 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2810 /* Clear the destination before we move anything into it. */
2811 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2814 /* We need a new source operand each time bitpos is on a word
2816 if (bitpos % BITS_PER_WORD == 0)
2817 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2819 /* Use bitpos for the source extraction (left justified) and
2820 xbitpos for the destination store (right justified). */
2821 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2823 extract_bit_field (src_word, bitsize,
2824 bitpos % BITS_PER_WORD, 1,
2825 NULL_RTX, word_mode, word_mode,
2830 if (mode == BLKmode)
2832 /* Find the smallest integer mode large enough to hold the
2833 entire structure. */
2834 opt_scalar_int_mode mode_iter;
2835 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2836 if (GET_MODE_SIZE (mode_iter.require ()) >= bytes)
2839 /* A suitable mode should have been found. */
2840 mode = mode_iter.require ();
2843 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2844 dst_mode = word_mode;
2847 dst = gen_reg_rtx (dst_mode);
2849 for (i = 0; i < n_regs; i++)
2850 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2852 if (mode != dst_mode)
2853 dst = gen_lowpart (mode, dst);
2858 /* Add a USE expression for REG to the (possibly empty) list pointed
2859 to by CALL_FUSAGE. REG must denote a hard register. */
2862 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2864 gcc_assert (REG_P (reg));
2866 if (!HARD_REGISTER_P (reg))
2870 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2873 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2874 to by CALL_FUSAGE. REG must denote a hard register. */
2877 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2879 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2882 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2885 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2886 starting at REGNO. All of these registers must be hard registers. */
2889 use_regs (rtx *call_fusage, int regno, int nregs)
2893 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2895 for (i = 0; i < nregs; i++)
2896 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2899 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2900 PARALLEL REGS. This is for calls that pass values in multiple
2901 non-contiguous locations. The Irix 6 ABI has examples of this. */
2904 use_group_regs (rtx *call_fusage, rtx regs)
2908 for (i = 0; i < XVECLEN (regs, 0); i++)
2910 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2912 /* A NULL entry means the parameter goes both on the stack and in
2913 registers. This can also be a MEM for targets that pass values
2914 partially on the stack and partially in registers. */
2915 if (reg != 0 && REG_P (reg))
2916 use_reg (call_fusage, reg);
2920 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2921 assigment and the code of the expresion on the RHS is CODE. Return
2925 get_def_for_expr (tree name, enum tree_code code)
2929 if (TREE_CODE (name) != SSA_NAME)
2932 def_stmt = get_gimple_for_ssa_name (name);
2934 || gimple_assign_rhs_code (def_stmt) != code)
2940 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2941 assigment and the class of the expresion on the RHS is CLASS. Return
2945 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2949 if (TREE_CODE (name) != SSA_NAME)
2952 def_stmt = get_gimple_for_ssa_name (name);
2954 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2960 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2961 its length in bytes. */
2964 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2965 unsigned int expected_align, HOST_WIDE_INT expected_size,
2966 unsigned HOST_WIDE_INT min_size,
2967 unsigned HOST_WIDE_INT max_size,
2968 unsigned HOST_WIDE_INT probable_max_size)
2970 machine_mode mode = GET_MODE (object);
2973 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2975 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2976 just move a zero. Otherwise, do this a piece at a time. */
2978 && CONST_INT_P (size)
2979 && known_eq (INTVAL (size), GET_MODE_SIZE (mode)))
2981 rtx zero = CONST0_RTX (mode);
2984 emit_move_insn (object, zero);
2988 if (COMPLEX_MODE_P (mode))
2990 zero = CONST0_RTX (GET_MODE_INNER (mode));
2993 write_complex_part (object, zero, 0);
2994 write_complex_part (object, zero, 1);
3000 if (size == const0_rtx)
3003 align = MEM_ALIGN (object);
3005 if (CONST_INT_P (size)
3006 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
3008 optimize_insn_for_speed_p ()))
3009 clear_by_pieces (object, INTVAL (size), align);
3010 else if (set_storage_via_setmem (object, size, const0_rtx, align,
3011 expected_align, expected_size,
3012 min_size, max_size, probable_max_size))
3014 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
3015 return set_storage_via_libcall (object, size, const0_rtx,
3016 method == BLOCK_OP_TAILCALL);
3024 clear_storage (rtx object, rtx size, enum block_op_methods method)
3026 unsigned HOST_WIDE_INT max, min = 0;
3027 if (GET_CODE (size) == CONST_INT)
3028 min = max = UINTVAL (size);
3030 max = GET_MODE_MASK (GET_MODE (size));
3031 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
3035 /* A subroutine of clear_storage. Expand a call to memset.
3036 Return the return value of memset, 0 otherwise. */
3039 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
3041 tree call_expr, fn, object_tree, size_tree, val_tree;
3042 machine_mode size_mode;
3044 object = copy_addr_to_reg (XEXP (object, 0));
3045 object_tree = make_tree (ptr_type_node, object);
3047 if (!CONST_INT_P (val))
3048 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
3049 val_tree = make_tree (integer_type_node, val);
3051 size_mode = TYPE_MODE (sizetype);
3052 size = convert_to_mode (size_mode, size, 1);
3053 size = copy_to_mode_reg (size_mode, size);
3054 size_tree = make_tree (sizetype, size);
3056 /* It is incorrect to use the libcall calling conventions for calls to
3057 memset because it can be provided by the user. */
3058 fn = builtin_decl_implicit (BUILT_IN_MEMSET);
3059 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
3060 CALL_EXPR_TAILCALL (call_expr) = tailcall;
3062 return expand_call (call_expr, NULL_RTX, false);
3065 /* Expand a setmem pattern; return true if successful. */
3068 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
3069 unsigned int expected_align, HOST_WIDE_INT expected_size,
3070 unsigned HOST_WIDE_INT min_size,
3071 unsigned HOST_WIDE_INT max_size,
3072 unsigned HOST_WIDE_INT probable_max_size)
3074 /* Try the most limited insn first, because there's no point
3075 including more than one in the machine description unless
3076 the more limited one has some advantage. */
3078 if (expected_align < align)
3079 expected_align = align;
3080 if (expected_size != -1)
3082 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
3083 expected_size = max_size;
3084 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
3085 expected_size = min_size;
3088 opt_scalar_int_mode mode_iter;
3089 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
3091 scalar_int_mode mode = mode_iter.require ();
3092 enum insn_code code = direct_optab_handler (setmem_optab, mode);
3094 if (code != CODE_FOR_nothing
3095 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3096 here because if SIZE is less than the mode mask, as it is
3097 returned by the macro, it will definitely be less than the
3098 actual mode mask. Since SIZE is within the Pmode address
3099 space, we limit MODE to Pmode. */
3100 && ((CONST_INT_P (size)
3101 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3102 <= (GET_MODE_MASK (mode) >> 1)))
3103 || max_size <= (GET_MODE_MASK (mode) >> 1)
3104 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
3106 struct expand_operand ops[9];
3109 nops = insn_data[(int) code].n_generator_args;
3110 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
3112 create_fixed_operand (&ops[0], object);
3113 /* The check above guarantees that this size conversion is valid. */
3114 create_convert_operand_to (&ops[1], size, mode, true);
3115 create_convert_operand_from (&ops[2], val, byte_mode, true);
3116 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
3119 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
3120 create_integer_operand (&ops[5], expected_size);
3124 create_integer_operand (&ops[6], min_size);
3125 /* If we can not represent the maximal size,
3126 make parameter NULL. */
3127 if ((HOST_WIDE_INT) max_size != -1)
3128 create_integer_operand (&ops[7], max_size);
3130 create_fixed_operand (&ops[7], NULL);
3134 /* If we can not represent the maximal size,
3135 make parameter NULL. */
3136 if ((HOST_WIDE_INT) probable_max_size != -1)
3137 create_integer_operand (&ops[8], probable_max_size);
3139 create_fixed_operand (&ops[8], NULL);
3141 if (maybe_expand_insn (code, nops, ops))
3150 /* Write to one of the components of the complex value CPLX. Write VAL to
3151 the real part if IMAG_P is false, and the imaginary part if its true. */
3154 write_complex_part (rtx cplx, rtx val, bool imag_p)
3160 if (GET_CODE (cplx) == CONCAT)
3162 emit_move_insn (XEXP (cplx, imag_p), val);
3166 cmode = GET_MODE (cplx);
3167 imode = GET_MODE_INNER (cmode);
3168 ibitsize = GET_MODE_BITSIZE (imode);
3170 /* For MEMs simplify_gen_subreg may generate an invalid new address
3171 because, e.g., the original address is considered mode-dependent
3172 by the target, which restricts simplify_subreg from invoking
3173 adjust_address_nv. Instead of preparing fallback support for an
3174 invalid address, we call adjust_address_nv directly. */
3177 emit_move_insn (adjust_address_nv (cplx, imode,
3178 imag_p ? GET_MODE_SIZE (imode) : 0),
3183 /* If the sub-object is at least word sized, then we know that subregging
3184 will work. This special case is important, since store_bit_field
3185 wants to operate on integer modes, and there's rarely an OImode to
3186 correspond to TCmode. */
3187 if (ibitsize >= BITS_PER_WORD
3188 /* For hard regs we have exact predicates. Assume we can split
3189 the original object if it spans an even number of hard regs.
3190 This special case is important for SCmode on 64-bit platforms
3191 where the natural size of floating-point regs is 32-bit. */
3193 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3194 && REG_NREGS (cplx) % 2 == 0))
3196 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3197 imag_p ? GET_MODE_SIZE (imode) : 0);
3200 emit_move_insn (part, val);
3204 /* simplify_gen_subreg may fail for sub-word MEMs. */
3205 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3208 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3212 /* Extract one of the components of the complex value CPLX. Extract the
3213 real part if IMAG_P is false, and the imaginary part if it's true. */
3216 read_complex_part (rtx cplx, bool imag_p)
3222 if (GET_CODE (cplx) == CONCAT)
3223 return XEXP (cplx, imag_p);
3225 cmode = GET_MODE (cplx);
3226 imode = GET_MODE_INNER (cmode);
3227 ibitsize = GET_MODE_BITSIZE (imode);
3229 /* Special case reads from complex constants that got spilled to memory. */
3230 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3232 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3233 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3235 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3236 if (CONSTANT_CLASS_P (part))
3237 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3241 /* For MEMs simplify_gen_subreg may generate an invalid new address
3242 because, e.g., the original address is considered mode-dependent
3243 by the target, which restricts simplify_subreg from invoking
3244 adjust_address_nv. Instead of preparing fallback support for an
3245 invalid address, we call adjust_address_nv directly. */
3247 return adjust_address_nv (cplx, imode,
3248 imag_p ? GET_MODE_SIZE (imode) : 0);
3250 /* If the sub-object is at least word sized, then we know that subregging
3251 will work. This special case is important, since extract_bit_field
3252 wants to operate on integer modes, and there's rarely an OImode to
3253 correspond to TCmode. */
3254 if (ibitsize >= BITS_PER_WORD
3255 /* For hard regs we have exact predicates. Assume we can split
3256 the original object if it spans an even number of hard regs.
3257 This special case is important for SCmode on 64-bit platforms
3258 where the natural size of floating-point regs is 32-bit. */
3260 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3261 && REG_NREGS (cplx) % 2 == 0))
3263 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3264 imag_p ? GET_MODE_SIZE (imode) : 0);
3268 /* simplify_gen_subreg may fail for sub-word MEMs. */
3269 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3272 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3273 true, NULL_RTX, imode, imode, false, NULL);
3276 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3277 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3278 represented in NEW_MODE. If FORCE is true, this will never happen, as
3279 we'll force-create a SUBREG if needed. */
3282 emit_move_change_mode (machine_mode new_mode,
3283 machine_mode old_mode, rtx x, bool force)
3287 if (push_operand (x, GET_MODE (x)))
3289 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3290 MEM_COPY_ATTRIBUTES (ret, x);
3294 /* We don't have to worry about changing the address since the
3295 size in bytes is supposed to be the same. */
3296 if (reload_in_progress)
3298 /* Copy the MEM to change the mode and move any
3299 substitutions from the old MEM to the new one. */
3300 ret = adjust_address_nv (x, new_mode, 0);
3301 copy_replacements (x, ret);
3304 ret = adjust_address (x, new_mode, 0);
3308 /* Note that we do want simplify_subreg's behavior of validating
3309 that the new mode is ok for a hard register. If we were to use
3310 simplify_gen_subreg, we would create the subreg, but would
3311 probably run into the target not being able to implement it. */
3312 /* Except, of course, when FORCE is true, when this is exactly what
3313 we want. Which is needed for CCmodes on some targets. */
3315 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3317 ret = simplify_subreg (new_mode, x, old_mode, 0);
3323 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3324 an integer mode of the same size as MODE. Returns the instruction
3325 emitted, or NULL if such a move could not be generated. */
3328 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3330 scalar_int_mode imode;
3331 enum insn_code code;
3333 /* There must exist a mode of the exact size we require. */
3334 if (!int_mode_for_mode (mode).exists (&imode))
3337 /* The target must support moves in this mode. */
3338 code = optab_handler (mov_optab, imode);
3339 if (code == CODE_FOR_nothing)
3342 x = emit_move_change_mode (imode, mode, x, force);
3345 y = emit_move_change_mode (imode, mode, y, force);
3348 return emit_insn (GEN_FCN (code) (x, y));
3351 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3352 Return an equivalent MEM that does not use an auto-increment. */
3355 emit_move_resolve_push (machine_mode mode, rtx x)
3357 enum rtx_code code = GET_CODE (XEXP (x, 0));
3360 poly_int64 adjust = GET_MODE_SIZE (mode);
3361 #ifdef PUSH_ROUNDING
3362 adjust = PUSH_ROUNDING (adjust);
3364 if (code == PRE_DEC || code == POST_DEC)
3366 else if (code == PRE_MODIFY || code == POST_MODIFY)
3368 rtx expr = XEXP (XEXP (x, 0), 1);
3370 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3371 poly_int64 val = rtx_to_poly_int64 (XEXP (expr, 1));
3372 if (GET_CODE (expr) == MINUS)
3374 gcc_assert (known_eq (adjust, val) || known_eq (adjust, -val));
3378 /* Do not use anti_adjust_stack, since we don't want to update
3379 stack_pointer_delta. */
3380 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3381 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3382 0, OPTAB_LIB_WIDEN);
3383 if (temp != stack_pointer_rtx)
3384 emit_move_insn (stack_pointer_rtx, temp);
3391 temp = stack_pointer_rtx;
3396 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3402 return replace_equiv_address (x, temp);
3405 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3406 X is known to satisfy push_operand, and MODE is known to be complex.
3407 Returns the last instruction emitted. */
3410 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3412 scalar_mode submode = GET_MODE_INNER (mode);
3415 #ifdef PUSH_ROUNDING
3416 poly_int64 submodesize = GET_MODE_SIZE (submode);
3418 /* In case we output to the stack, but the size is smaller than the
3419 machine can push exactly, we need to use move instructions. */
3420 if (maybe_ne (PUSH_ROUNDING (submodesize), submodesize))
3422 x = emit_move_resolve_push (mode, x);
3423 return emit_move_insn (x, y);
3427 /* Note that the real part always precedes the imag part in memory
3428 regardless of machine's endianness. */
3429 switch (GET_CODE (XEXP (x, 0)))
3443 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3444 read_complex_part (y, imag_first));
3445 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3446 read_complex_part (y, !imag_first));
3449 /* A subroutine of emit_move_complex. Perform the move from Y to X
3450 via two moves of the parts. Returns the last instruction emitted. */
3453 emit_move_complex_parts (rtx x, rtx y)
3455 /* Show the output dies here. This is necessary for SUBREGs
3456 of pseudos since we cannot track their lifetimes correctly;
3457 hard regs shouldn't appear here except as return values. */
3458 if (!reload_completed && !reload_in_progress
3459 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3462 write_complex_part (x, read_complex_part (y, false), false);
3463 write_complex_part (x, read_complex_part (y, true), true);
3465 return get_last_insn ();
3468 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3469 MODE is known to be complex. Returns the last instruction emitted. */
3472 emit_move_complex (machine_mode mode, rtx x, rtx y)
3476 /* Need to take special care for pushes, to maintain proper ordering
3477 of the data, and possibly extra padding. */
3478 if (push_operand (x, mode))
3479 return emit_move_complex_push (mode, x, y);
3481 /* See if we can coerce the target into moving both values at once, except
3482 for floating point where we favor moving as parts if this is easy. */
3483 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3484 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3486 && HARD_REGISTER_P (x)
3487 && REG_NREGS (x) == 1)
3489 && HARD_REGISTER_P (y)
3490 && REG_NREGS (y) == 1))
3492 /* Not possible if the values are inherently not adjacent. */
3493 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3495 /* Is possible if both are registers (or subregs of registers). */
3496 else if (register_operand (x, mode) && register_operand (y, mode))
3498 /* If one of the operands is a memory, and alignment constraints
3499 are friendly enough, we may be able to do combined memory operations.
3500 We do not attempt this if Y is a constant because that combination is
3501 usually better with the by-parts thing below. */
3502 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3503 && (!STRICT_ALIGNMENT
3504 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3513 /* For memory to memory moves, optimal behavior can be had with the
3514 existing block move logic. */
3515 if (MEM_P (x) && MEM_P (y))
3517 emit_block_move (x, y, gen_int_mode (GET_MODE_SIZE (mode), Pmode),
3518 BLOCK_OP_NO_LIBCALL);
3519 return get_last_insn ();
3522 ret = emit_move_via_integer (mode, x, y, true);
3527 return emit_move_complex_parts (x, y);
3530 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3531 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3534 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3538 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3541 enum insn_code code = optab_handler (mov_optab, CCmode);
3542 if (code != CODE_FOR_nothing)
3544 x = emit_move_change_mode (CCmode, mode, x, true);
3545 y = emit_move_change_mode (CCmode, mode, y, true);
3546 return emit_insn (GEN_FCN (code) (x, y));
3550 /* Otherwise, find the MODE_INT mode of the same width. */
3551 ret = emit_move_via_integer (mode, x, y, false);
3552 gcc_assert (ret != NULL);
3556 /* Return true if word I of OP lies entirely in the
3557 undefined bits of a paradoxical subreg. */
3560 undefined_operand_subword_p (const_rtx op, int i)
3562 if (GET_CODE (op) != SUBREG)
3564 machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
3565 poly_int64 offset = i * UNITS_PER_WORD + subreg_memory_offset (op);
3566 return (known_ge (offset, GET_MODE_SIZE (innermostmode))
3567 || known_le (offset, -UNITS_PER_WORD));
3570 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3571 MODE is any multi-word or full-word mode that lacks a move_insn
3572 pattern. Note that you will get better code if you define such
3573 patterns, even if they must turn into multiple assembler instructions. */
3576 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3578 rtx_insn *last_insn = 0;
3584 /* This function can only handle cases where the number of words is
3585 known at compile time. */
3586 mode_size = GET_MODE_SIZE (mode).to_constant ();
3587 gcc_assert (mode_size >= UNITS_PER_WORD);
3589 /* If X is a push on the stack, do the push now and replace
3590 X with a reference to the stack pointer. */
3591 if (push_operand (x, mode))
3592 x = emit_move_resolve_push (mode, x);
3594 /* If we are in reload, see if either operand is a MEM whose address
3595 is scheduled for replacement. */
3596 if (reload_in_progress && MEM_P (x)
3597 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3598 x = replace_equiv_address_nv (x, inner);
3599 if (reload_in_progress && MEM_P (y)
3600 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3601 y = replace_equiv_address_nv (y, inner);
3605 need_clobber = false;
3606 for (i = 0; i < CEIL (mode_size, UNITS_PER_WORD); i++)
3608 rtx xpart = operand_subword (x, i, 1, mode);
3611 /* Do not generate code for a move if it would come entirely
3612 from the undefined bits of a paradoxical subreg. */
3613 if (undefined_operand_subword_p (y, i))
3616 ypart = operand_subword (y, i, 1, mode);
3618 /* If we can't get a part of Y, put Y into memory if it is a
3619 constant. Otherwise, force it into a register. Then we must
3620 be able to get a part of Y. */
3621 if (ypart == 0 && CONSTANT_P (y))
3623 y = use_anchored_address (force_const_mem (mode, y));
3624 ypart = operand_subword (y, i, 1, mode);
3626 else if (ypart == 0)
3627 ypart = operand_subword_force (y, i, mode);
3629 gcc_assert (xpart && ypart);
3631 need_clobber |= (GET_CODE (xpart) == SUBREG);
3633 last_insn = emit_move_insn (xpart, ypart);
3639 /* Show the output dies here. This is necessary for SUBREGs
3640 of pseudos since we cannot track their lifetimes correctly;
3641 hard regs shouldn't appear here except as return values.
3642 We never want to emit such a clobber after reload. */
3644 && ! (reload_in_progress || reload_completed)
3645 && need_clobber != 0)
3653 /* Low level part of emit_move_insn.
3654 Called just like emit_move_insn, but assumes X and Y
3655 are basically valid. */
3658 emit_move_insn_1 (rtx x, rtx y)
3660 machine_mode mode = GET_MODE (x);
3661 enum insn_code code;
3663 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3665 code = optab_handler (mov_optab, mode);
3666 if (code != CODE_FOR_nothing)
3667 return emit_insn (GEN_FCN (code) (x, y));
3669 /* Expand complex moves by moving real part and imag part. */
3670 if (COMPLEX_MODE_P (mode))
3671 return emit_move_complex (mode, x, y);
3673 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3674 || ALL_FIXED_POINT_MODE_P (mode))
3676 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3678 /* If we can't find an integer mode, use multi words. */
3682 return emit_move_multi_word (mode, x, y);
3685 if (GET_MODE_CLASS (mode) == MODE_CC)
3686 return emit_move_ccmode (mode, x, y);
3688 /* Try using a move pattern for the corresponding integer mode. This is
3689 only safe when simplify_subreg can convert MODE constants into integer
3690 constants. At present, it can only do this reliably if the value
3691 fits within a HOST_WIDE_INT. */
3693 || known_le (GET_MODE_BITSIZE (mode), HOST_BITS_PER_WIDE_INT))
3695 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3699 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3704 return emit_move_multi_word (mode, x, y);
3707 /* Generate code to copy Y into X.
3708 Both Y and X must have the same mode, except that
3709 Y can be a constant with VOIDmode.
3710 This mode cannot be BLKmode; use emit_block_move for that.
3712 Return the last instruction emitted. */
3715 emit_move_insn (rtx x, rtx y)
3717 machine_mode mode = GET_MODE (x);
3718 rtx y_cst = NULL_RTX;
3719 rtx_insn *last_insn;
3722 gcc_assert (mode != BLKmode
3723 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3728 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3729 && (last_insn = compress_float_constant (x, y)))
3734 if (!targetm.legitimate_constant_p (mode, y))
3736 y = force_const_mem (mode, y);
3738 /* If the target's cannot_force_const_mem prevented the spill,
3739 assume that the target's move expanders will also take care
3740 of the non-legitimate constant. */
3744 y = use_anchored_address (y);
3748 /* If X or Y are memory references, verify that their addresses are valid
3751 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3753 && ! push_operand (x, GET_MODE (x))))
3754 x = validize_mem (x);
3757 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3758 MEM_ADDR_SPACE (y)))
3759 y = validize_mem (y);
3761 gcc_assert (mode != BLKmode);
3763 last_insn = emit_move_insn_1 (x, y);
3765 if (y_cst && REG_P (x)
3766 && (set = single_set (last_insn)) != NULL_RTX
3767 && SET_DEST (set) == x
3768 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3769 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3774 /* Generate the body of an instruction to copy Y into X.
3775 It may be a list of insns, if one insn isn't enough. */
3778 gen_move_insn (rtx x, rtx y)
3783 emit_move_insn_1 (x, y);
3789 /* If Y is representable exactly in a narrower mode, and the target can
3790 perform the extension directly from constant or memory, then emit the
3791 move as an extension. */
3794 compress_float_constant (rtx x, rtx y)
3796 machine_mode dstmode = GET_MODE (x);
3797 machine_mode orig_srcmode = GET_MODE (y);
3798 machine_mode srcmode;
3799 const REAL_VALUE_TYPE *r;
3800 int oldcost, newcost;
3801 bool speed = optimize_insn_for_speed_p ();
3803 r = CONST_DOUBLE_REAL_VALUE (y);
3805 if (targetm.legitimate_constant_p (dstmode, y))
3806 oldcost = set_src_cost (y, orig_srcmode, speed);
3808 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3810 FOR_EACH_MODE_UNTIL (srcmode, orig_srcmode)
3814 rtx_insn *last_insn;
3816 /* Skip if the target can't extend this way. */
3817 ic = can_extend_p (dstmode, srcmode, 0);
3818 if (ic == CODE_FOR_nothing)
3821 /* Skip if the narrowed value isn't exact. */
3822 if (! exact_real_truncate (srcmode, r))
3825 trunc_y = const_double_from_real_value (*r, srcmode);
3827 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3829 /* Skip if the target needs extra instructions to perform
3831 if (!insn_operand_matches (ic, 1, trunc_y))
3833 /* This is valid, but may not be cheaper than the original. */
3834 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3836 if (oldcost < newcost)
3839 else if (float_extend_from_mem[dstmode][srcmode])
3841 trunc_y = force_const_mem (srcmode, trunc_y);
3842 /* This is valid, but may not be cheaper than the original. */
3843 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3845 if (oldcost < newcost)
3847 trunc_y = validize_mem (trunc_y);
3852 /* For CSE's benefit, force the compressed constant pool entry
3853 into a new pseudo. This constant may be used in different modes,
3854 and if not, combine will put things back together for us. */
3855 trunc_y = force_reg (srcmode, trunc_y);
3857 /* If x is a hard register, perform the extension into a pseudo,
3858 so that e.g. stack realignment code is aware of it. */
3860 if (REG_P (x) && HARD_REGISTER_P (x))
3861 target = gen_reg_rtx (dstmode);
3863 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3864 last_insn = get_last_insn ();
3867 set_unique_reg_note (last_insn, REG_EQUAL, y);
3870 return emit_move_insn (x, target);
3877 /* Pushing data onto the stack. */
3879 /* Push a block of length SIZE (perhaps variable)
3880 and return an rtx to address the beginning of the block.
3881 The value may be virtual_outgoing_args_rtx.
3883 EXTRA is the number of bytes of padding to push in addition to SIZE.
3884 BELOW nonzero means this padding comes at low addresses;
3885 otherwise, the padding comes at high addresses. */
3888 push_block (rtx size, poly_int64 extra, int below)
3892 size = convert_modes (Pmode, ptr_mode, size, 1);
3893 if (CONSTANT_P (size))
3894 anti_adjust_stack (plus_constant (Pmode, size, extra));
3895 else if (REG_P (size) && known_eq (extra, 0))
3896 anti_adjust_stack (size);
3899 temp = copy_to_mode_reg (Pmode, size);
3900 if (maybe_ne (extra, 0))
3901 temp = expand_binop (Pmode, add_optab, temp,
3902 gen_int_mode (extra, Pmode),
3903 temp, 0, OPTAB_LIB_WIDEN);
3904 anti_adjust_stack (temp);
3907 if (STACK_GROWS_DOWNWARD)
3909 temp = virtual_outgoing_args_rtx;
3910 if (maybe_ne (extra, 0) && below)
3911 temp = plus_constant (Pmode, temp, extra);
3915 if (CONST_INT_P (size))
3916 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3917 -INTVAL (size) - (below ? 0 : extra));
3918 else if (maybe_ne (extra, 0) && !below)
3919 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3920 negate_rtx (Pmode, plus_constant (Pmode, size,
3923 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3924 negate_rtx (Pmode, size));
3927 return memory_address (NARROWEST_INT_MODE, temp);
3930 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3933 mem_autoinc_base (rtx mem)
3937 rtx addr = XEXP (mem, 0);
3938 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3939 return XEXP (addr, 0);
3944 /* A utility routine used here, in reload, and in try_split. The insns
3945 after PREV up to and including LAST are known to adjust the stack,
3946 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3947 placing notes as appropriate. PREV may be NULL, indicating the
3948 entire insn sequence prior to LAST should be scanned.
3950 The set of allowed stack pointer modifications is small:
3951 (1) One or more auto-inc style memory references (aka pushes),
3952 (2) One or more addition/subtraction with the SP as destination,
3953 (3) A single move insn with the SP as destination,
3954 (4) A call_pop insn,
3955 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3957 Insns in the sequence that do not modify the SP are ignored,
3958 except for noreturn calls.
3960 The return value is the amount of adjustment that can be trivially
3961 verified, via immediate operand or auto-inc. If the adjustment
3962 cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN. */
3965 find_args_size_adjust (rtx_insn *insn)
3970 pat = PATTERN (insn);
3973 /* Look for a call_pop pattern. */
3976 /* We have to allow non-call_pop patterns for the case
3977 of emit_single_push_insn of a TLS address. */
3978 if (GET_CODE (pat) != PARALLEL)
3981 /* All call_pop have a stack pointer adjust in the parallel.
3982 The call itself is always first, and the stack adjust is
3983 usually last, so search from the end. */
3984 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3986 set = XVECEXP (pat, 0, i);
3987 if (GET_CODE (set) != SET)
3989 dest = SET_DEST (set);
3990 if (dest == stack_pointer_rtx)
3993 /* We'd better have found the stack pointer adjust. */
3996 /* Fall through to process the extracted SET and DEST
3997 as if it was a standalone insn. */
3999 else if (GET_CODE (pat) == SET)
4001 else if ((set = single_set (insn)) != NULL)
4003 else if (GET_CODE (pat) == PARALLEL)
4005 /* ??? Some older ports use a parallel with a stack adjust
4006 and a store for a PUSH_ROUNDING pattern, rather than a
4007 PRE/POST_MODIFY rtx. Don't force them to update yet... */
4008 /* ??? See h8300 and m68k, pushqi1. */
4009 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
4011 set = XVECEXP (pat, 0, i);
4012 if (GET_CODE (set) != SET)
4014 dest = SET_DEST (set);
4015 if (dest == stack_pointer_rtx)
4018 /* We do not expect an auto-inc of the sp in the parallel. */
4019 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
4020 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4021 != stack_pointer_rtx);
4029 dest = SET_DEST (set);
4031 /* Look for direct modifications of the stack pointer. */
4032 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
4034 /* Look for a trivial adjustment, otherwise assume nothing. */
4035 /* Note that the SPU restore_stack_block pattern refers to
4036 the stack pointer in V4SImode. Consider that non-trivial. */
4037 if (SCALAR_INT_MODE_P (GET_MODE (dest))
4038 && GET_CODE (SET_SRC (set)) == PLUS
4039 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
4040 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
4041 return INTVAL (XEXP (SET_SRC (set), 1));
4042 /* ??? Reload can generate no-op moves, which will be cleaned
4043 up later. Recognize it and continue searching. */
4044 else if (rtx_equal_p (dest, SET_SRC (set)))
4047 return HOST_WIDE_INT_MIN;
4053 /* Otherwise only think about autoinc patterns. */
4054 if (mem_autoinc_base (dest) == stack_pointer_rtx)
4057 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4058 != stack_pointer_rtx);
4060 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
4061 mem = SET_SRC (set);
4065 addr = XEXP (mem, 0);
4066 switch (GET_CODE (addr))
4070 return GET_MODE_SIZE (GET_MODE (mem));
4073 return -GET_MODE_SIZE (GET_MODE (mem));
4076 addr = XEXP (addr, 1);
4077 gcc_assert (GET_CODE (addr) == PLUS);
4078 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
4079 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
4080 return INTVAL (XEXP (addr, 1));
4088 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last,
4089 poly_int64 end_args_size)
4091 poly_int64 args_size = end_args_size;
4092 bool saw_unknown = false;
4095 for (insn = last; insn != prev; insn = PREV_INSN (insn))
4097 if (!NONDEBUG_INSN_P (insn))
4100 /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4101 a call argument containing a TLS address that itself requires
4102 a call to __tls_get_addr. The handling of stack_pointer_delta
4103 in emit_single_push_insn is supposed to ensure that any such
4104 notes are already correct. */
4105 rtx note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4106 gcc_assert (!note || known_eq (args_size, get_args_size (note)));
4108 poly_int64 this_delta = find_args_size_adjust (insn);
4109 if (known_eq (this_delta, 0))
4112 || ACCUMULATE_OUTGOING_ARGS
4113 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
4117 gcc_assert (!saw_unknown);
4118 if (known_eq (this_delta, HOST_WIDE_INT_MIN))
4122 add_args_size_note (insn, args_size);
4123 if (STACK_GROWS_DOWNWARD)
4124 this_delta = -poly_uint64 (this_delta);
4127 args_size = HOST_WIDE_INT_MIN;
4129 args_size -= this_delta;
4135 #ifdef PUSH_ROUNDING
4136 /* Emit single push insn. */
4139 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4142 poly_int64 rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4144 enum insn_code icode;
4146 /* If there is push pattern, use it. Otherwise try old way of throwing
4147 MEM representing push operation to move expander. */
4148 icode = optab_handler (push_optab, mode);
4149 if (icode != CODE_FOR_nothing)
4151 struct expand_operand ops[1];
4153 create_input_operand (&ops[0], x, mode);
4154 if (maybe_expand_insn (icode, 1, ops))
4157 if (known_eq (GET_MODE_SIZE (mode), rounded_size))
4158 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4159 /* If we are to pad downward, adjust the stack pointer first and
4160 then store X into the stack location using an offset. This is
4161 because emit_move_insn does not know how to pad; it does not have
4163 else if (targetm.calls.function_arg_padding (mode, type) == PAD_DOWNWARD)
4165 emit_move_insn (stack_pointer_rtx,
4166 expand_binop (Pmode,
4167 STACK_GROWS_DOWNWARD ? sub_optab
4170 gen_int_mode (rounded_size, Pmode),
4171 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4173 poly_int64 offset = rounded_size - GET_MODE_SIZE (mode);
4174 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4175 /* We have already decremented the stack pointer, so get the
4177 offset += rounded_size;
4179 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4180 /* We have already incremented the stack pointer, so get the
4182 offset -= rounded_size;
4184 dest_addr = plus_constant (Pmode, stack_pointer_rtx, offset);
4188 if (STACK_GROWS_DOWNWARD)
4189 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4190 dest_addr = plus_constant (Pmode, stack_pointer_rtx, -rounded_size);
4192 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4193 dest_addr = plus_constant (Pmode, stack_pointer_rtx, rounded_size);
4195 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4198 dest = gen_rtx_MEM (mode, dest_addr);
4202 set_mem_attributes (dest, type, 1);
4204 if (cfun->tail_call_marked)
4205 /* Function incoming arguments may overlap with sibling call
4206 outgoing arguments and we cannot allow reordering of reads
4207 from function arguments with stores to outgoing arguments
4208 of sibling calls. */
4209 set_mem_alias_set (dest, 0);
4211 emit_move_insn (dest, x);
4214 /* Emit and annotate a single push insn. */
4217 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4219 poly_int64 delta, old_delta = stack_pointer_delta;
4220 rtx_insn *prev = get_last_insn ();
4223 emit_single_push_insn_1 (mode, x, type);
4225 /* Adjust stack_pointer_delta to describe the situation after the push
4226 we just performed. Note that we must do this after the push rather
4227 than before the push in case calculating X needs pushes and pops of
4228 its own (e.g. if calling __tls_get_addr). The REG_ARGS_SIZE notes
4229 for such pushes and pops must not include the effect of the future
4231 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4233 last = get_last_insn ();
4235 /* Notice the common case where we emitted exactly one insn. */
4236 if (PREV_INSN (last) == prev)
4238 add_args_size_note (last, stack_pointer_delta);
4242 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4243 gcc_assert (known_eq (delta, HOST_WIDE_INT_MIN)
4244 || known_eq (delta, old_delta));
4248 /* If reading SIZE bytes from X will end up reading from
4249 Y return the number of bytes that overlap. Return -1
4250 if there is no overlap or -2 if we can't determine
4251 (for example when X and Y have different base registers). */
4254 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4256 rtx tmp = plus_constant (Pmode, x, size);
4257 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4259 if (!CONST_INT_P (sub))
4262 HOST_WIDE_INT val = INTVAL (sub);
4264 return IN_RANGE (val, 1, size) ? val : -1;
4267 /* Generate code to push X onto the stack, assuming it has mode MODE and
4269 MODE is redundant except when X is a CONST_INT (since they don't
4271 SIZE is an rtx for the size of data to be copied (in bytes),
4272 needed only if X is BLKmode.
4273 Return true if successful. May return false if asked to push a
4274 partial argument during a sibcall optimization (as specified by
4275 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4278 ALIGN (in bits) is maximum alignment we can assume.
4280 If PARTIAL and REG are both nonzero, then copy that many of the first
4281 bytes of X into registers starting with REG, and push the rest of X.
4282 The amount of space pushed is decreased by PARTIAL bytes.
4283 REG must be a hard register in this case.
4284 If REG is zero but PARTIAL is not, take any all others actions for an
4285 argument partially in registers, but do not actually load any
4288 EXTRA is the amount in bytes of extra space to leave next to this arg.
4289 This is ignored if an argument block has already been allocated.
4291 On a machine that lacks real push insns, ARGS_ADDR is the address of
4292 the bottom of the argument block for this call. We use indexing off there
4293 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4294 argument block has not been preallocated.
4296 ARGS_SO_FAR is the size of args previously pushed for this call.
4298 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4299 for arguments passed in registers. If nonzero, it will be the number
4300 of bytes required. */
4303 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4304 unsigned int align, int partial, rtx reg, poly_int64 extra,
4305 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4306 rtx alignment_pad, bool sibcall_p)
4309 pad_direction stack_direction
4310 = STACK_GROWS_DOWNWARD ? PAD_DOWNWARD : PAD_UPWARD;
4312 /* Decide where to pad the argument: PAD_DOWNWARD for below,
4313 PAD_UPWARD for above, or PAD_NONE for don't pad it.
4314 Default is below for small data on big-endian machines; else above. */
4315 pad_direction where_pad = targetm.calls.function_arg_padding (mode, type);
4317 /* Invert direction if stack is post-decrement.
4319 if (STACK_PUSH_CODE == POST_DEC)
4320 if (where_pad != PAD_NONE)
4321 where_pad = (where_pad == PAD_DOWNWARD ? PAD_UPWARD : PAD_DOWNWARD);
4325 int nregs = partial / UNITS_PER_WORD;
4326 rtx *tmp_regs = NULL;
4327 int overlapping = 0;
4330 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4332 /* Copy a block into the stack, entirely or partially. */
4339 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4340 used = partial - offset;
4342 if (mode != BLKmode)
4344 /* A value is to be stored in an insufficiently aligned
4345 stack slot; copy via a suitably aligned slot if
4347 size = gen_int_mode (GET_MODE_SIZE (mode), Pmode);
4348 if (!MEM_P (xinner))
4350 temp = assign_temp (type, 1, 1);
4351 emit_move_insn (temp, xinner);
4358 /* USED is now the # of bytes we need not copy to the stack
4359 because registers will take care of them. */
4362 xinner = adjust_address (xinner, BLKmode, used);
4364 /* If the partial register-part of the arg counts in its stack size,
4365 skip the part of stack space corresponding to the registers.
4366 Otherwise, start copying to the beginning of the stack space,
4367 by setting SKIP to 0. */
4368 skip = (reg_parm_stack_space == 0) ? 0 : used;
4370 #ifdef PUSH_ROUNDING
4371 /* Do it with several push insns if that doesn't take lots of insns
4372 and if there is no difficulty with push insns that skip bytes
4373 on the stack for alignment purposes. */
4376 && CONST_INT_P (size)
4378 && MEM_ALIGN (xinner) >= align
4379 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4380 /* Here we avoid the case of a structure whose weak alignment
4381 forces many pushes of a small amount of data,
4382 and such small pushes do rounding that causes trouble. */
4383 && ((!targetm.slow_unaligned_access (word_mode, align))
4384 || align >= BIGGEST_ALIGNMENT
4385 || known_eq (PUSH_ROUNDING (align / BITS_PER_UNIT),
4386 align / BITS_PER_UNIT))
4387 && known_eq (PUSH_ROUNDING (INTVAL (size)), INTVAL (size)))
4389 /* Push padding now if padding above and stack grows down,
4390 or if padding below and stack grows up.
4391 But if space already allocated, this has already been done. */
4392 if (maybe_ne (extra, 0)
4394 && where_pad != PAD_NONE
4395 && where_pad != stack_direction)
4396 anti_adjust_stack (gen_int_mode (extra, Pmode));
4398 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4401 #endif /* PUSH_ROUNDING */
4405 /* Otherwise make space on the stack and copy the data
4406 to the address of that space. */
4408 /* Deduct words put into registers from the size we must copy. */
4411 if (CONST_INT_P (size))
4412 size = GEN_INT (INTVAL (size) - used);
4414 size = expand_binop (GET_MODE (size), sub_optab, size,
4415 gen_int_mode (used, GET_MODE (size)),
4416 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4419 /* Get the address of the stack space.
4420 In this case, we do not deal with EXTRA separately.
4421 A single stack adjust will do. */
4424 temp = push_block (size, extra, where_pad == PAD_DOWNWARD);
4427 else if (CONST_INT_P (args_so_far))
4428 temp = memory_address (BLKmode,
4429 plus_constant (Pmode, args_addr,
4430 skip + INTVAL (args_so_far)));
4432 temp = memory_address (BLKmode,
4433 plus_constant (Pmode,
4434 gen_rtx_PLUS (Pmode,
4439 if (!ACCUMULATE_OUTGOING_ARGS)
4441 /* If the source is referenced relative to the stack pointer,
4442 copy it to another register to stabilize it. We do not need
4443 to do this if we know that we won't be changing sp. */
4445 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4446 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4447 temp = copy_to_reg (temp);
4450 target = gen_rtx_MEM (BLKmode, temp);
4452 /* We do *not* set_mem_attributes here, because incoming arguments
4453 may overlap with sibling call outgoing arguments and we cannot
4454 allow reordering of reads from function arguments with stores
4455 to outgoing arguments of sibling calls. We do, however, want
4456 to record the alignment of the stack slot. */
4457 /* ALIGN may well be better aligned than TYPE, e.g. due to
4458 PARM_BOUNDARY. Assume the caller isn't lying. */
4459 set_mem_align (target, align);
4461 /* If part should go in registers and pushing to that part would
4462 overwrite some of the values that need to go into regs, load the
4463 overlapping values into temporary pseudos to be moved into the hard
4464 regs at the end after the stack pushing has completed.
4465 We cannot load them directly into the hard regs here because
4466 they can be clobbered by the block move expansions.
4469 if (partial > 0 && reg != 0 && mode == BLKmode
4470 && GET_CODE (reg) != PARALLEL)
4472 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4473 if (overlapping > 0)
4475 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4476 overlapping /= UNITS_PER_WORD;
4478 tmp_regs = XALLOCAVEC (rtx, overlapping);
4480 for (int i = 0; i < overlapping; i++)
4481 tmp_regs[i] = gen_reg_rtx (word_mode);
4483 for (int i = 0; i < overlapping; i++)
4484 emit_move_insn (tmp_regs[i],
4485 operand_subword_force (target, i, mode));
4487 else if (overlapping == -1)
4489 /* Could not determine whether there is overlap.
4490 Fail the sibcall. */
4498 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4501 else if (partial > 0)
4503 /* Scalar partly in registers. This case is only supported
4504 for fixed-wdth modes. */
4505 int size = GET_MODE_SIZE (mode).to_constant ();
4506 size /= UNITS_PER_WORD;
4509 /* # bytes of start of argument
4510 that we must make space for but need not store. */
4511 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4512 int args_offset = INTVAL (args_so_far);
4515 /* Push padding now if padding above and stack grows down,
4516 or if padding below and stack grows up.
4517 But if space already allocated, this has already been done. */
4518 if (maybe_ne (extra, 0)
4520 && where_pad != PAD_NONE
4521 && where_pad != stack_direction)
4522 anti_adjust_stack (gen_int_mode (extra, Pmode));
4524 /* If we make space by pushing it, we might as well push
4525 the real data. Otherwise, we can leave OFFSET nonzero
4526 and leave the space uninitialized. */
4530 /* Now NOT_STACK gets the number of words that we don't need to
4531 allocate on the stack. Convert OFFSET to words too. */
4532 not_stack = (partial - offset) / UNITS_PER_WORD;
4533 offset /= UNITS_PER_WORD;
4535 /* If the partial register-part of the arg counts in its stack size,
4536 skip the part of stack space corresponding to the registers.
4537 Otherwise, start copying to the beginning of the stack space,
4538 by setting SKIP to 0. */
4539 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4541 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4542 x = validize_mem (force_const_mem (mode, x));
4544 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4545 SUBREGs of such registers are not allowed. */
4546 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4547 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4548 x = copy_to_reg (x);
4550 /* Loop over all the words allocated on the stack for this arg. */
4551 /* We can do it by words, because any scalar bigger than a word
4552 has a size a multiple of a word. */
4553 for (i = size - 1; i >= not_stack; i--)
4554 if (i >= not_stack + offset)
4555 if (!emit_push_insn (operand_subword_force (x, i, mode),
4556 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4558 GEN_INT (args_offset + ((i - not_stack + skip)
4560 reg_parm_stack_space, alignment_pad, sibcall_p))
4568 /* Push padding now if padding above and stack grows down,
4569 or if padding below and stack grows up.
4570 But if space already allocated, this has already been done. */
4571 if (maybe_ne (extra, 0)
4573 && where_pad != PAD_NONE
4574 && where_pad != stack_direction)
4575 anti_adjust_stack (gen_int_mode (extra, Pmode));
4577 #ifdef PUSH_ROUNDING
4578 if (args_addr == 0 && PUSH_ARGS)
4579 emit_single_push_insn (mode, x, type);
4583 addr = simplify_gen_binary (PLUS, Pmode, args_addr, args_so_far);
4584 dest = gen_rtx_MEM (mode, memory_address (mode, addr));
4586 /* We do *not* set_mem_attributes here, because incoming arguments
4587 may overlap with sibling call outgoing arguments and we cannot
4588 allow reordering of reads from function arguments with stores
4589 to outgoing arguments of sibling calls. We do, however, want
4590 to record the alignment of the stack slot. */
4591 /* ALIGN may well be better aligned than TYPE, e.g. due to
4592 PARM_BOUNDARY. Assume the caller isn't lying. */
4593 set_mem_align (dest, align);
4595 emit_move_insn (dest, x);
4599 /* Move the partial arguments into the registers and any overlapping
4600 values that we moved into the pseudos in tmp_regs. */
4601 if (partial > 0 && reg != 0)
4603 /* Handle calls that pass values in multiple non-contiguous locations.
4604 The Irix 6 ABI has examples of this. */
4605 if (GET_CODE (reg) == PARALLEL)
4606 emit_group_load (reg, x, type, -1);
4609 gcc_assert (partial % UNITS_PER_WORD == 0);
4610 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4612 for (int i = 0; i < overlapping; i++)
4613 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4614 + nregs - overlapping + i),
4620 if (maybe_ne (extra, 0) && args_addr == 0 && where_pad == stack_direction)
4621 anti_adjust_stack (gen_int_mode (extra, Pmode));
4623 if (alignment_pad && args_addr == 0)
4624 anti_adjust_stack (alignment_pad);
4629 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4633 get_subtarget (rtx x)
4637 /* Only registers can be subtargets. */
4639 /* Don't use hard regs to avoid extending their life. */
4640 || REGNO (x) < FIRST_PSEUDO_REGISTER
4644 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4645 FIELD is a bitfield. Returns true if the optimization was successful,
4646 and there's nothing else to do. */
4649 optimize_bitfield_assignment_op (poly_uint64 pbitsize,
4650 poly_uint64 pbitpos,
4651 poly_uint64 pbitregion_start,
4652 poly_uint64 pbitregion_end,
4653 machine_mode mode1, rtx str_rtx,
4654 tree to, tree src, bool reverse)
4656 /* str_mode is not guaranteed to be a scalar type. */
4657 machine_mode str_mode = GET_MODE (str_rtx);
4658 unsigned int str_bitsize;
4663 enum tree_code code;
4665 unsigned HOST_WIDE_INT bitsize, bitpos, bitregion_start, bitregion_end;
4666 if (mode1 != VOIDmode
4667 || !pbitsize.is_constant (&bitsize)
4668 || !pbitpos.is_constant (&bitpos)
4669 || !pbitregion_start.is_constant (&bitregion_start)
4670 || !pbitregion_end.is_constant (&bitregion_end)
4671 || bitsize >= BITS_PER_WORD
4672 || !GET_MODE_BITSIZE (str_mode).is_constant (&str_bitsize)
4673 || str_bitsize > BITS_PER_WORD
4674 || TREE_SIDE_EFFECTS (to)
4675 || TREE_THIS_VOLATILE (to))
4679 if (TREE_CODE (src) != SSA_NAME)
4681 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4684 srcstmt = get_gimple_for_ssa_name (src);
4686 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4689 code = gimple_assign_rhs_code (srcstmt);
4691 op0 = gimple_assign_rhs1 (srcstmt);
4693 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4694 to find its initialization. Hopefully the initialization will
4695 be from a bitfield load. */
4696 if (TREE_CODE (op0) == SSA_NAME)
4698 gimple *op0stmt = get_gimple_for_ssa_name (op0);
4700 /* We want to eventually have OP0 be the same as TO, which
4701 should be a bitfield. */
4703 || !is_gimple_assign (op0stmt)
4704 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4706 op0 = gimple_assign_rhs1 (op0stmt);
4709 op1 = gimple_assign_rhs2 (srcstmt);
4711 if (!operand_equal_p (to, op0, 0))
4714 if (MEM_P (str_rtx))
4716 unsigned HOST_WIDE_INT offset1;
4718 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4719 str_bitsize = BITS_PER_WORD;
4721 scalar_int_mode best_mode;
4722 if (!get_best_mode (bitsize, bitpos, bitregion_start, bitregion_end,
4723 MEM_ALIGN (str_rtx), str_bitsize, false, &best_mode))
4725 str_mode = best_mode;
4726 str_bitsize = GET_MODE_BITSIZE (best_mode);
4729 bitpos %= str_bitsize;
4730 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4731 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4733 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4736 /* If the bit field covers the whole REG/MEM, store_field
4737 will likely generate better code. */
4738 if (bitsize >= str_bitsize)
4741 /* We can't handle fields split across multiple entities. */
4742 if (bitpos + bitsize > str_bitsize)
4745 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4746 bitpos = str_bitsize - bitpos - bitsize;
4752 /* For now, just optimize the case of the topmost bitfield
4753 where we don't need to do any masking and also
4754 1 bit bitfields where xor can be used.
4755 We might win by one instruction for the other bitfields
4756 too if insv/extv instructions aren't used, so that
4757 can be added later. */
4758 if ((reverse || bitpos + bitsize != str_bitsize)
4759 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4762 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4763 value = convert_modes (str_mode,
4764 TYPE_MODE (TREE_TYPE (op1)), value,
4765 TYPE_UNSIGNED (TREE_TYPE (op1)));
4767 /* We may be accessing data outside the field, which means
4768 we can alias adjacent data. */
4769 if (MEM_P (str_rtx))
4771 str_rtx = shallow_copy_rtx (str_rtx);
4772 set_mem_alias_set (str_rtx, 0);
4773 set_mem_expr (str_rtx, 0);
4776 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4778 value = expand_and (str_mode, value, const1_rtx, NULL);
4782 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4784 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4786 value = flip_storage_order (str_mode, value);
4787 result = expand_binop (str_mode, binop, str_rtx,
4788 value, str_rtx, 1, OPTAB_WIDEN);
4789 if (result != str_rtx)
4790 emit_move_insn (str_rtx, result);
4795 if (TREE_CODE (op1) != INTEGER_CST)
4797 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4798 value = convert_modes (str_mode,
4799 TYPE_MODE (TREE_TYPE (op1)), value,
4800 TYPE_UNSIGNED (TREE_TYPE (op1)));
4802 /* We may be accessing data outside the field, which means
4803 we can alias adjacent data. */
4804 if (MEM_P (str_rtx))
4806 str_rtx = shallow_copy_rtx (str_rtx);
4807 set_mem_alias_set (str_rtx, 0);
4808 set_mem_expr (str_rtx, 0);
4811 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4812 if (bitpos + bitsize != str_bitsize)
4814 rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << bitsize) - 1,
4816 value = expand_and (str_mode, value, mask, NULL_RTX);
4818 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4820 value = flip_storage_order (str_mode, value);
4821 result = expand_binop (str_mode, binop, str_rtx,
4822 value, str_rtx, 1, OPTAB_WIDEN);
4823 if (result != str_rtx)
4824 emit_move_insn (str_rtx, result);
4834 /* In the C++ memory model, consecutive bit fields in a structure are
4835 considered one memory location.
4837 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4838 returns the bit range of consecutive bits in which this COMPONENT_REF
4839 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4840 and *OFFSET may be adjusted in the process.
4842 If the access does not need to be restricted, 0 is returned in both
4843 *BITSTART and *BITEND. */
4846 get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp,
4847 poly_int64_pod *bitpos, tree *offset)
4849 poly_int64 bitoffset;
4852 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4854 field = TREE_OPERAND (exp, 1);
4855 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4856 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4857 need to limit the range we can access. */
4860 *bitstart = *bitend = 0;
4864 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4865 part of a larger bit field, then the representative does not serve any
4866 useful purpose. This can occur in Ada. */
4867 if (handled_component_p (TREE_OPERAND (exp, 0)))
4870 poly_int64 rbitsize, rbitpos;
4872 int unsignedp, reversep, volatilep = 0;
4873 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4874 &roffset, &rmode, &unsignedp, &reversep,
4876 if (!multiple_p (rbitpos, BITS_PER_UNIT))
4878 *bitstart = *bitend = 0;
4883 /* Compute the adjustment to bitpos from the offset of the field
4884 relative to the representative. DECL_FIELD_OFFSET of field and
4885 repr are the same by construction if they are not constants,
4886 see finish_bitfield_layout. */
4887 poly_uint64 field_offset, repr_offset;
4888 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
4889 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
4890 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
4893 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4894 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4896 /* If the adjustment is larger than bitpos, we would have a negative bit
4897 position for the lower bound and this may wreak havoc later. Adjust
4898 offset and bitpos to make the lower bound non-negative in that case. */
4899 if (maybe_gt (bitoffset, *bitpos))
4901 poly_int64 adjust_bits = upper_bound (bitoffset, *bitpos) - *bitpos;
4902 poly_int64 adjust_bytes = exact_div (adjust_bits, BITS_PER_UNIT);
4904 *bitpos += adjust_bits;
4905 if (*offset == NULL_TREE)
4906 *offset = size_int (-adjust_bytes);
4908 *offset = size_binop (MINUS_EXPR, *offset, size_int (adjust_bytes));
4912 *bitstart = *bitpos - bitoffset;
4914 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4917 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4918 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4919 DECL_RTL was not set yet, return NORTL. */
4922 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4924 if (TREE_CODE (addr) != ADDR_EXPR)
4927 tree base = TREE_OPERAND (addr, 0);
4930 || TREE_ADDRESSABLE (base)
4931 || DECL_MODE (base) == BLKmode)
4934 if (!DECL_RTL_SET_P (base))
4937 return (!MEM_P (DECL_RTL (base)));
4940 /* Returns true if the MEM_REF REF refers to an object that does not
4941 reside in memory and has non-BLKmode. */
4944 mem_ref_refers_to_non_mem_p (tree ref)
4946 tree base = TREE_OPERAND (ref, 0);
4947 return addr_expr_of_non_mem_decl_p_1 (base, false);
4950 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4951 is true, try generating a nontemporal store. */
4954 expand_assignment (tree to, tree from, bool nontemporal)
4960 enum insn_code icode;
4962 /* Don't crash if the lhs of the assignment was erroneous. */
4963 if (TREE_CODE (to) == ERROR_MARK)
4965 expand_normal (from);
4969 /* Optimize away no-op moves without side-effects. */
4970 if (operand_equal_p (to, from, 0))
4973 /* Handle misaligned stores. */
4974 mode = TYPE_MODE (TREE_TYPE (to));
4975 if ((TREE_CODE (to) == MEM_REF
4976 || TREE_CODE (to) == TARGET_MEM_REF)
4978 && !mem_ref_refers_to_non_mem_p (to)
4979 && ((align = get_object_alignment (to))
4980 < GET_MODE_ALIGNMENT (mode))
4981 && (((icode = optab_handler (movmisalign_optab, mode))
4982 != CODE_FOR_nothing)
4983 || targetm.slow_unaligned_access (mode, align)))
4987 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4988 reg = force_not_mem (reg);
4989 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4990 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4991 reg = flip_storage_order (mode, reg);
4993 if (icode != CODE_FOR_nothing)
4995 struct expand_operand ops[2];
4997 create_fixed_operand (&ops[0], mem);
4998 create_input_operand (&ops[1], reg, mode);
4999 /* The movmisalign<mode> pattern cannot fail, else the assignment
5000 would silently be omitted. */
5001 expand_insn (icode, 2, ops);
5004 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
5009 /* Assignment of a structure component needs special treatment
5010 if the structure component's rtx is not simply a MEM.
5011 Assignment of an array element at a constant index, and assignment of
5012 an array element in an unaligned packed structure field, has the same
5013 problem. Same for (partially) storing into a non-memory object. */
5014 if (handled_component_p (to)
5015 || (TREE_CODE (to) == MEM_REF
5016 && (REF_REVERSE_STORAGE_ORDER (to)
5017 || mem_ref_refers_to_non_mem_p (to)))
5018 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
5021 poly_int64 bitsize, bitpos;
5022 poly_uint64 bitregion_start = 0;
5023 poly_uint64 bitregion_end = 0;
5025 int unsignedp, reversep, volatilep = 0;
5029 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
5030 &unsignedp, &reversep, &volatilep);
5032 /* Make sure bitpos is not negative, it can wreak havoc later. */
5033 if (maybe_lt (bitpos, 0))
5035 gcc_assert (offset == NULL_TREE);
5036 offset = size_int (bits_to_bytes_round_down (bitpos));
5037 bitpos = num_trailing_bits (bitpos);
5040 if (TREE_CODE (to) == COMPONENT_REF
5041 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
5042 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
5043 /* The C++ memory model naturally applies to byte-aligned fields.
5044 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5045 BITSIZE are not byte-aligned, there is no need to limit the range
5046 we can access. This can occur with packed structures in Ada. */
5047 else if (maybe_gt (bitsize, 0)
5048 && multiple_p (bitsize, BITS_PER_UNIT)
5049 && multiple_p (bitpos, BITS_PER_UNIT))
5051 bitregion_start = bitpos;
5052 bitregion_end = bitpos + bitsize - 1;
5055 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
5057 /* If the field has a mode, we want to access it in the
5058 field's mode, not the computed mode.
5059 If a MEM has VOIDmode (external with incomplete type),
5060 use BLKmode for it instead. */
5063 if (mode1 != VOIDmode)
5064 to_rtx = adjust_address (to_rtx, mode1, 0);
5065 else if (GET_MODE (to_rtx) == VOIDmode)
5066 to_rtx = adjust_address (to_rtx, BLKmode, 0);
5071 machine_mode address_mode;
5074 if (!MEM_P (to_rtx))
5076 /* We can get constant negative offsets into arrays with broken
5077 user code. Translate this to a trap instead of ICEing. */
5078 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
5079 expand_builtin_trap ();
5080 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
5083 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
5084 address_mode = get_address_mode (to_rtx);
5085 if (GET_MODE (offset_rtx) != address_mode)
5087 /* We cannot be sure that the RTL in offset_rtx is valid outside
5088 of a memory address context, so force it into a register
5089 before attempting to convert it to the desired mode. */
5090 offset_rtx = force_operand (offset_rtx, NULL_RTX);
5091 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5094 /* If we have an expression in OFFSET_RTX and a non-zero
5095 byte offset in BITPOS, adding the byte offset before the
5096 OFFSET_RTX results in better intermediate code, which makes
5097 later rtl optimization passes perform better.
5099 We prefer intermediate code like this:
5101 r124:DI=r123:DI+0x18
5106 r124:DI=r123:DI+0x10
5107 [r124:DI+0x8]=r121:DI
5109 This is only done for aligned data values, as these can
5110 be expected to result in single move instructions. */
5112 if (mode1 != VOIDmode
5113 && maybe_ne (bitpos, 0)
5114 && maybe_gt (bitsize, 0)
5115 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
5116 && multiple_p (bitpos, bitsize)
5117 && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
5118 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
5120 to_rtx = adjust_address (to_rtx, mode1, bytepos);
5121 bitregion_start = 0;
5122 if (known_ge (bitregion_end, poly_uint64 (bitpos)))
5123 bitregion_end -= bitpos;
5127 to_rtx = offset_address (to_rtx, offset_rtx,
5128 highest_pow2_factor_for_target (to,
5132 /* No action is needed if the target is not a memory and the field
5133 lies completely outside that target. This can occur if the source
5134 code contains an out-of-bounds access to a small array. */
5136 && GET_MODE (to_rtx) != BLKmode
5137 && known_ge (bitpos, GET_MODE_PRECISION (GET_MODE (to_rtx))))
5139 expand_normal (from);
5142 /* Handle expand_expr of a complex value returning a CONCAT. */
5143 else if (GET_CODE (to_rtx) == CONCAT)
5145 machine_mode to_mode = GET_MODE (to_rtx);
5146 gcc_checking_assert (COMPLEX_MODE_P (to_mode));
5147 poly_int64 mode_bitsize = GET_MODE_BITSIZE (to_mode);
5148 unsigned short inner_bitsize = GET_MODE_UNIT_BITSIZE (to_mode);
5149 if (TYPE_MODE (TREE_TYPE (from)) == to_mode
5150 && known_eq (bitpos, 0)
5151 && known_eq (bitsize, mode_bitsize))
5152 result = store_expr (from, to_rtx, false, nontemporal, reversep);
5153 else if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE_INNER (to_mode)
5154 && known_eq (bitsize, inner_bitsize)
5155 && (known_eq (bitpos, 0)
5156 || known_eq (bitpos, inner_bitsize)))
5157 result = store_expr (from, XEXP (to_rtx, maybe_ne (bitpos, 0)),
5158 false, nontemporal, reversep);
5159 else if (known_le (bitpos + bitsize, inner_bitsize))
5160 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
5161 bitregion_start, bitregion_end,
5162 mode1, from, get_alias_set (to),
5163 nontemporal, reversep);
5164 else if (known_ge (bitpos, inner_bitsize))
5165 result = store_field (XEXP (to_rtx, 1), bitsize,
5166 bitpos - inner_bitsize,
5167 bitregion_start, bitregion_end,
5168 mode1, from, get_alias_set (to),
5169 nontemporal, reversep);
5170 else if (known_eq (bitpos, 0) && known_eq (bitsize, mode_bitsize))
5172 result = expand_normal (from);
5173 if (GET_CODE (result) == CONCAT)
5175 to_mode = GET_MODE_INNER (to_mode);
5176 machine_mode from_mode = GET_MODE_INNER (GET_MODE (result));
5178 = simplify_gen_subreg (to_mode, XEXP (result, 0),
5181 = simplify_gen_subreg (to_mode, XEXP (result, 1),
5183 if (!from_real || !from_imag)
5184 goto concat_store_slow;
5185 emit_move_insn (XEXP (to_rtx, 0), from_real);
5186 emit_move_insn (XEXP (to_rtx, 1), from_imag);
5191 = simplify_gen_subreg (to_mode, result,
5192 TYPE_MODE (TREE_TYPE (from)), 0);
5195 emit_move_insn (XEXP (to_rtx, 0),
5196 read_complex_part (from_rtx, false));
5197 emit_move_insn (XEXP (to_rtx, 1),
5198 read_complex_part (from_rtx, true));
5202 machine_mode to_mode
5203 = GET_MODE_INNER (GET_MODE (to_rtx));
5205 = simplify_gen_subreg (to_mode, result,
5206 TYPE_MODE (TREE_TYPE (from)),
5209 = simplify_gen_subreg (to_mode, result,
5210 TYPE_MODE (TREE_TYPE (from)),
5211 GET_MODE_SIZE (to_mode));
5212 if (!from_real || !from_imag)
5213 goto concat_store_slow;
5214 emit_move_insn (XEXP (to_rtx, 0), from_real);
5215 emit_move_insn (XEXP (to_rtx, 1), from_imag);
5222 rtx temp = assign_stack_temp (to_mode,
5223 GET_MODE_SIZE (GET_MODE (to_rtx)));
5224 write_complex_part (temp, XEXP (to_rtx, 0), false);
5225 write_complex_part (temp, XEXP (to_rtx, 1), true);
5226 result = store_field (temp, bitsize, bitpos,
5227 bitregion_start, bitregion_end,
5228 mode1, from, get_alias_set (to),
5229 nontemporal, reversep);
5230 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5231 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5234 /* For calls to functions returning variable length structures, if TO_RTX
5235 is not a MEM, go through a MEM because we must not create temporaries
5237 else if (!MEM_P (to_rtx)
5238 && TREE_CODE (from) == CALL_EXPR
5239 && COMPLETE_TYPE_P (TREE_TYPE (from))
5240 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) != INTEGER_CST)
5242 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5243 GET_MODE_SIZE (GET_MODE (to_rtx)));
5244 result = store_field (temp, bitsize, bitpos, bitregion_start,
5245 bitregion_end, mode1, from, get_alias_set (to),
5246 nontemporal, reversep);
5247 emit_move_insn (to_rtx, temp);
5253 /* If the field is at offset zero, we could have been given the
5254 DECL_RTX of the parent struct. Don't munge it. */
5255 to_rtx = shallow_copy_rtx (to_rtx);
5256 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5258 MEM_VOLATILE_P (to_rtx) = 1;
5261 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5262 bitregion_start, bitregion_end,
5263 mode1, to_rtx, to, from,
5267 result = store_field (to_rtx, bitsize, bitpos,
5268 bitregion_start, bitregion_end,
5269 mode1, from, get_alias_set (to),
5270 nontemporal, reversep);
5274 preserve_temp_slots (result);
5279 /* If the rhs is a function call and its value is not an aggregate,
5280 call the function before we start to compute the lhs.
5281 This is needed for correct code for cases such as
5282 val = setjmp (buf) on machines where reference to val
5283 requires loading up part of an address in a separate insn.
5285 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5286 since it might be a promoted variable where the zero- or sign- extension
5287 needs to be done. Handling this in the normal way is safe because no
5288 computation is done before the call. The same is true for SSA names. */
5289 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5290 && COMPLETE_TYPE_P (TREE_TYPE (from))
5291 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5293 || TREE_CODE (to) == PARM_DECL
5294 || TREE_CODE (to) == RESULT_DECL)
5295 && REG_P (DECL_RTL (to)))
5296 || TREE_CODE (to) == SSA_NAME))
5302 value = expand_normal (from);
5304 /* Split value and bounds to store them separately. */
5305 chkp_split_slot (value, &value, &bounds);
5308 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5310 /* Handle calls that return values in multiple non-contiguous locations.
5311 The Irix 6 ABI has examples of this. */
5312 if (GET_CODE (to_rtx) == PARALLEL)
5314 if (GET_CODE (value) == PARALLEL)
5315 emit_group_move (to_rtx, value);
5317 emit_group_load (to_rtx, value, TREE_TYPE (from),
5318 int_size_in_bytes (TREE_TYPE (from)));
5320 else if (GET_CODE (value) == PARALLEL)
5321 emit_group_store (to_rtx, value, TREE_TYPE (from),
5322 int_size_in_bytes (TREE_TYPE (from)));
5323 else if (GET_MODE (to_rtx) == BLKmode)
5325 /* Handle calls that return BLKmode values in registers. */
5327 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5329 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5333 if (POINTER_TYPE_P (TREE_TYPE (to)))
5334 value = convert_memory_address_addr_space
5335 (as_a <scalar_int_mode> (GET_MODE (to_rtx)), value,
5336 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5338 emit_move_insn (to_rtx, value);
5341 /* Store bounds if required. */
5343 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5345 gcc_assert (MEM_P (to_rtx));
5346 chkp_emit_bounds_store (bounds, value, to_rtx);
5349 preserve_temp_slots (to_rtx);
5354 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5355 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5357 /* Don't move directly into a return register. */
5358 if (TREE_CODE (to) == RESULT_DECL
5359 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5365 /* If the source is itself a return value, it still is in a pseudo at
5366 this point so we can move it back to the return register directly. */
5368 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5369 && TREE_CODE (from) != CALL_EXPR)
5370 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5372 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5374 /* Handle calls that return values in multiple non-contiguous locations.
5375 The Irix 6 ABI has examples of this. */
5376 if (GET_CODE (to_rtx) == PARALLEL)
5378 if (GET_CODE (temp) == PARALLEL)
5379 emit_group_move (to_rtx, temp);
5381 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5382 int_size_in_bytes (TREE_TYPE (from)));
5385 emit_move_insn (to_rtx, temp);
5387 preserve_temp_slots (to_rtx);
5392 /* In case we are returning the contents of an object which overlaps
5393 the place the value is being stored, use a safe function when copying
5394 a value through a pointer into a structure value return block. */
5395 if (TREE_CODE (to) == RESULT_DECL
5396 && TREE_CODE (from) == INDIRECT_REF
5397 && ADDR_SPACE_GENERIC_P
5398 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5399 && refs_may_alias_p (to, from)
5400 && cfun->returns_struct
5401 && !cfun->returns_pcc_struct)
5406 size = expr_size (from);
5407 from_rtx = expand_normal (from);
5409 emit_block_move_via_libcall (XEXP (to_rtx, 0), XEXP (from_rtx, 0), size);
5411 preserve_temp_slots (to_rtx);
5416 /* Compute FROM and store the value in the rtx we got. */
5419 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5420 preserve_temp_slots (result);
5425 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5426 succeeded, false otherwise. */
5429 emit_storent_insn (rtx to, rtx from)
5431 struct expand_operand ops[2];
5432 machine_mode mode = GET_MODE (to);
5433 enum insn_code code = optab_handler (storent_optab, mode);
5435 if (code == CODE_FOR_nothing)
5438 create_fixed_operand (&ops[0], to);
5439 create_input_operand (&ops[1], from, mode);
5440 return maybe_expand_insn (code, 2, ops);
5443 /* Generate code for computing expression EXP,
5444 and storing the value into TARGET.
5446 If the mode is BLKmode then we may return TARGET itself.
5447 It turns out that in BLKmode it doesn't cause a problem.
5448 because C has no operators that could combine two different
5449 assignments into the same BLKmode object with different values
5450 with no sequence point. Will other languages need this to
5453 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5454 stack, and block moves may need to be treated specially.
5456 If NONTEMPORAL is true, try using a nontemporal store instruction.
5458 If REVERSE is true, the store is to be done in reverse order.
5460 If BTARGET is not NULL then computed bounds of EXP are
5461 associated with BTARGET. */
5464 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5465 bool nontemporal, bool reverse, tree btarget)
5468 rtx alt_rtl = NULL_RTX;
5469 location_t loc = curr_insn_location ();
5471 if (VOID_TYPE_P (TREE_TYPE (exp)))
5473 /* C++ can generate ?: expressions with a throw expression in one
5474 branch and an rvalue in the other. Here, we resolve attempts to
5475 store the throw expression's nonexistent result. */
5476 gcc_assert (!call_param_p);
5477 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5480 if (TREE_CODE (exp) == COMPOUND_EXPR)
5482 /* Perform first part of compound expression, then assign from second
5484 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5485 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5486 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5487 call_param_p, nontemporal, reverse,
5490 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5492 /* For conditional expression, get safe form of the target. Then
5493 test the condition, doing the appropriate assignment on either
5494 side. This avoids the creation of unnecessary temporaries.
5495 For non-BLKmode, it is more efficient not to do this. */
5497 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5499 do_pending_stack_adjust ();
5501 jumpifnot (TREE_OPERAND (exp, 0), lab1,
5502 profile_probability::uninitialized ());
5503 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5504 nontemporal, reverse, btarget);
5505 emit_jump_insn (targetm.gen_jump (lab2));
5508 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5509 nontemporal, reverse, btarget);
5515 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5516 /* If this is a scalar in a register that is stored in a wider mode
5517 than the declared mode, compute the result into its declared mode
5518 and then convert to the wider mode. Our value is the computed
5521 rtx inner_target = 0;
5522 scalar_int_mode outer_mode = subreg_unpromoted_mode (target);
5523 scalar_int_mode inner_mode = subreg_promoted_mode (target);
5525 /* We can do the conversion inside EXP, which will often result
5526 in some optimizations. Do the conversion in two steps: first
5527 change the signedness, if needed, then the extend. But don't
5528 do this if the type of EXP is a subtype of something else
5529 since then the conversion might involve more than just
5530 converting modes. */
5531 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5532 && TREE_TYPE (TREE_TYPE (exp)) == 0
5533 && GET_MODE_PRECISION (outer_mode)
5534 == TYPE_PRECISION (TREE_TYPE (exp)))
5536 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5537 TYPE_UNSIGNED (TREE_TYPE (exp))))
5539 /* Some types, e.g. Fortran's logical*4, won't have a signed
5540 version, so use the mode instead. */
5542 = (signed_or_unsigned_type_for
5543 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5545 ntype = lang_hooks.types.type_for_mode
5546 (TYPE_MODE (TREE_TYPE (exp)),
5547 SUBREG_PROMOTED_SIGN (target));
5549 exp = fold_convert_loc (loc, ntype, exp);
5552 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5553 (inner_mode, SUBREG_PROMOTED_SIGN (target)),
5556 inner_target = SUBREG_REG (target);
5559 temp = expand_expr (exp, inner_target, VOIDmode,
5560 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5562 /* Handle bounds returned by call. */
5563 if (TREE_CODE (exp) == CALL_EXPR)
5566 chkp_split_slot (temp, &temp, &bounds);
5567 if (bounds && btarget)
5569 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5570 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5571 chkp_set_rtl_bounds (btarget, tmp);
5575 /* If TEMP is a VOIDmode constant, use convert_modes to make
5576 sure that we properly convert it. */
5577 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5579 temp = convert_modes (outer_mode, TYPE_MODE (TREE_TYPE (exp)),
5580 temp, SUBREG_PROMOTED_SIGN (target));
5581 temp = convert_modes (inner_mode, outer_mode, temp,
5582 SUBREG_PROMOTED_SIGN (target));
5585 convert_move (SUBREG_REG (target), temp,
5586 SUBREG_PROMOTED_SIGN (target));
5590 else if ((TREE_CODE (exp) == STRING_CST
5591 || (TREE_CODE (exp) == MEM_REF
5592 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5593 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5595 && integer_zerop (TREE_OPERAND (exp, 1))))
5596 && !nontemporal && !call_param_p
5599 /* Optimize initialization of an array with a STRING_CST. */
5600 HOST_WIDE_INT exp_len, str_copy_len;
5602 tree str = TREE_CODE (exp) == STRING_CST
5603 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5605 exp_len = int_expr_size (exp);
5609 if (TREE_STRING_LENGTH (str) <= 0)
5612 str_copy_len = strlen (TREE_STRING_POINTER (str));
5613 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5616 str_copy_len = TREE_STRING_LENGTH (str);
5617 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5618 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5620 str_copy_len += STORE_MAX_PIECES - 1;
5621 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5623 str_copy_len = MIN (str_copy_len, exp_len);
5624 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5625 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5626 MEM_ALIGN (target), false))
5631 dest_mem = store_by_pieces (dest_mem,
5632 str_copy_len, builtin_strncpy_read_str,
5634 TREE_STRING_POINTER (str)),
5635 MEM_ALIGN (target), false,
5636 exp_len > str_copy_len ? 1 : 0);
5637 if (exp_len > str_copy_len)
5638 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5639 GEN_INT (exp_len - str_copy_len),
5648 /* If we want to use a nontemporal or a reverse order store, force the
5649 value into a register first. */
5650 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5651 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5653 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5656 /* Handle bounds returned by call. */
5657 if (TREE_CODE (exp) == CALL_EXPR)
5660 chkp_split_slot (temp, &temp, &bounds);
5661 if (bounds && btarget)
5663 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5664 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5665 chkp_set_rtl_bounds (btarget, tmp);
5670 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5671 the same as that of TARGET, adjust the constant. This is needed, for
5672 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5673 only a word-sized value. */
5674 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5675 && TREE_CODE (exp) != ERROR_MARK
5676 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5678 if (GET_MODE_CLASS (GET_MODE (target))
5679 != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp)))
5680 && known_eq (GET_MODE_BITSIZE (GET_MODE (target)),
5681 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)))))
5683 rtx t = simplify_gen_subreg (GET_MODE (target), temp,
5684 TYPE_MODE (TREE_TYPE (exp)), 0);
5688 if (GET_MODE (temp) == VOIDmode)
5689 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5690 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5693 /* If value was not generated in the target, store it there.
5694 Convert the value to TARGET's type first if necessary and emit the
5695 pending incrementations that have been queued when expanding EXP.
5696 Note that we cannot emit the whole queue blindly because this will
5697 effectively disable the POST_INC optimization later.
5699 If TEMP and TARGET compare equal according to rtx_equal_p, but
5700 one or both of them are volatile memory refs, we have to distinguish
5702 - expand_expr has used TARGET. In this case, we must not generate
5703 another copy. This can be detected by TARGET being equal according
5705 - expand_expr has not used TARGET - that means that the source just
5706 happens to have the same RTX form. Since temp will have been created
5707 by expand_expr, it will compare unequal according to == .
5708 We must generate a copy in this case, to reach the correct number
5709 of volatile memory references. */
5711 if ((! rtx_equal_p (temp, target)
5712 || (temp != target && (side_effects_p (temp)
5713 || side_effects_p (target))))
5714 && TREE_CODE (exp) != ERROR_MARK
5715 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5716 but TARGET is not valid memory reference, TEMP will differ
5717 from TARGET although it is really the same location. */
5719 && rtx_equal_p (alt_rtl, target)
5720 && !side_effects_p (alt_rtl)
5721 && !side_effects_p (target))
5722 /* If there's nothing to copy, don't bother. Don't call
5723 expr_size unless necessary, because some front-ends (C++)
5724 expr_size-hook must not be given objects that are not
5725 supposed to be bit-copied or bit-initialized. */
5726 && expr_size (exp) != const0_rtx)
5728 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5730 if (GET_MODE (target) == BLKmode)
5732 /* Handle calls that return BLKmode values in registers. */
5733 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5734 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5736 store_bit_field (target,
5737 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5738 0, 0, 0, GET_MODE (temp), temp, reverse);
5741 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5744 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5746 /* Handle copying a string constant into an array. The string
5747 constant may be shorter than the array. So copy just the string's
5748 actual length, and clear the rest. First get the size of the data
5749 type of the string, which is actually the size of the target. */
5750 rtx size = expr_size (exp);
5752 if (CONST_INT_P (size)
5753 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5754 emit_block_move (target, temp, size,
5756 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5759 machine_mode pointer_mode
5760 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5761 machine_mode address_mode = get_address_mode (target);
5763 /* Compute the size of the data to copy from the string. */
5765 = size_binop_loc (loc, MIN_EXPR,
5766 make_tree (sizetype, size),
5767 size_int (TREE_STRING_LENGTH (exp)));
5769 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5771 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5772 rtx_code_label *label = 0;
5774 /* Copy that much. */
5775 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5776 TYPE_UNSIGNED (sizetype));
5777 emit_block_move (target, temp, copy_size_rtx,
5779 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5781 /* Figure out how much is left in TARGET that we have to clear.
5782 Do all calculations in pointer_mode. */
5783 if (CONST_INT_P (copy_size_rtx))
5785 size = plus_constant (address_mode, size,
5786 -INTVAL (copy_size_rtx));
5787 target = adjust_address (target, BLKmode,
5788 INTVAL (copy_size_rtx));
5792 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5793 copy_size_rtx, NULL_RTX, 0,
5796 if (GET_MODE (copy_size_rtx) != address_mode)
5797 copy_size_rtx = convert_to_mode (address_mode,
5799 TYPE_UNSIGNED (sizetype));
5801 target = offset_address (target, copy_size_rtx,
5802 highest_pow2_factor (copy_size));
5803 label = gen_label_rtx ();
5804 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5805 GET_MODE (size), 0, label);
5808 if (size != const0_rtx)
5809 clear_storage (target, size, BLOCK_OP_NORMAL);
5815 /* Handle calls that return values in multiple non-contiguous locations.
5816 The Irix 6 ABI has examples of this. */
5817 else if (GET_CODE (target) == PARALLEL)
5819 if (GET_CODE (temp) == PARALLEL)
5820 emit_group_move (target, temp);
5822 emit_group_load (target, temp, TREE_TYPE (exp),
5823 int_size_in_bytes (TREE_TYPE (exp)));
5825 else if (GET_CODE (temp) == PARALLEL)
5826 emit_group_store (target, temp, TREE_TYPE (exp),
5827 int_size_in_bytes (TREE_TYPE (exp)));
5828 else if (GET_MODE (temp) == BLKmode)
5829 emit_block_move (target, temp, expr_size (exp),
5831 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5832 /* If we emit a nontemporal store, there is nothing else to do. */
5833 else if (nontemporal && emit_storent_insn (target, temp))
5838 temp = flip_storage_order (GET_MODE (target), temp);
5839 temp = force_operand (temp, target);
5841 emit_move_insn (target, temp);
5848 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5850 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5853 return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5857 /* Return true if field F of structure TYPE is a flexible array. */
5860 flexible_array_member_p (const_tree f, const_tree type)
5865 return (DECL_CHAIN (f) == NULL
5866 && TREE_CODE (tf) == ARRAY_TYPE
5868 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5869 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5870 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5871 && int_size_in_bytes (type) >= 0);
5874 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5875 must have in order for it to completely initialize a value of type TYPE.
5876 Return -1 if the number isn't known.
5878 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5880 static HOST_WIDE_INT
5881 count_type_elements (const_tree type, bool for_ctor_p)
5883 switch (TREE_CODE (type))
5889 nelts = array_type_nelts (type);
5890 if (nelts && tree_fits_uhwi_p (nelts))
5892 unsigned HOST_WIDE_INT n;
5894 n = tree_to_uhwi (nelts) + 1;
5895 if (n == 0 || for_ctor_p)
5898 return n * count_type_elements (TREE_TYPE (type), false);
5900 return for_ctor_p ? -1 : 1;
5905 unsigned HOST_WIDE_INT n;
5909 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5910 if (TREE_CODE (f) == FIELD_DECL)
5913 n += count_type_elements (TREE_TYPE (f), false);
5914 else if (!flexible_array_member_p (f, type))
5915 /* Don't count flexible arrays, which are not supposed
5916 to be initialized. */
5924 case QUAL_UNION_TYPE:
5929 gcc_assert (!for_ctor_p);
5930 /* Estimate the number of scalars in each field and pick the
5931 maximum. Other estimates would do instead; the idea is simply
5932 to make sure that the estimate is not sensitive to the ordering
5935 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5936 if (TREE_CODE (f) == FIELD_DECL)
5938 m = count_type_elements (TREE_TYPE (f), false);
5939 /* If the field doesn't span the whole union, add an extra
5940 scalar for the rest. */
5941 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5942 TYPE_SIZE (type)) != 1)
5955 unsigned HOST_WIDE_INT nelts;
5956 if (TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
5964 case FIXED_POINT_TYPE:
5969 case REFERENCE_TYPE:
5985 /* Helper for categorize_ctor_elements. Identical interface. */
5988 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5989 HOST_WIDE_INT *p_unique_nz_elts,
5990 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5992 unsigned HOST_WIDE_INT idx;
5993 HOST_WIDE_INT nz_elts, unique_nz_elts, init_elts, num_fields;
5994 tree value, purpose, elt_type;
5996 /* Whether CTOR is a valid constant initializer, in accordance with what
5997 initializer_constant_valid_p does. If inferred from the constructor
5998 elements, true until proven otherwise. */
5999 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
6000 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
6006 elt_type = NULL_TREE;
6008 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
6010 HOST_WIDE_INT mult = 1;
6012 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
6014 tree lo_index = TREE_OPERAND (purpose, 0);
6015 tree hi_index = TREE_OPERAND (purpose, 1);
6017 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
6018 mult = (tree_to_uhwi (hi_index)
6019 - tree_to_uhwi (lo_index) + 1);
6022 elt_type = TREE_TYPE (value);
6024 switch (TREE_CODE (value))
6028 HOST_WIDE_INT nz = 0, unz = 0, ic = 0;
6030 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &unz,
6033 nz_elts += mult * nz;
6034 unique_nz_elts += unz;
6035 init_elts += mult * ic;
6037 if (const_from_elts_p && const_p)
6038 const_p = const_elt_p;
6045 if (!initializer_zerop (value))
6054 nz_elts += mult * TREE_STRING_LENGTH (value);
6055 unique_nz_elts += TREE_STRING_LENGTH (value);
6056 init_elts += mult * TREE_STRING_LENGTH (value);
6060 if (!initializer_zerop (TREE_REALPART (value)))
6065 if (!initializer_zerop (TREE_IMAGPART (value)))
6070 init_elts += 2 * mult;
6075 /* We can only construct constant-length vectors using
6077 unsigned int nunits = VECTOR_CST_NELTS (value).to_constant ();
6078 for (unsigned int i = 0; i < nunits; ++i)
6080 tree v = VECTOR_CST_ELT (value, i);
6081 if (!initializer_zerop (v))
6093 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
6094 nz_elts += mult * tc;
6095 unique_nz_elts += tc;
6096 init_elts += mult * tc;
6098 if (const_from_elts_p && const_p)
6100 = initializer_constant_valid_p (value,
6102 TYPE_REVERSE_STORAGE_ORDER
6110 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
6111 num_fields, elt_type))
6112 *p_complete = false;
6114 *p_nz_elts += nz_elts;
6115 *p_unique_nz_elts += unique_nz_elts;
6116 *p_init_elts += init_elts;
6121 /* Examine CTOR to discover:
6122 * how many scalar fields are set to nonzero values,
6123 and place it in *P_NZ_ELTS;
6124 * the same, but counting RANGE_EXPRs as multiplier of 1 instead of
6125 high - low + 1 (this can be useful for callers to determine ctors
6126 that could be cheaply initialized with - perhaps nested - loops
6127 compared to copied from huge read-only data),
6128 and place it in *P_UNIQUE_NZ_ELTS;
6129 * how many scalar fields in total are in CTOR,
6130 and place it in *P_ELT_COUNT.
6131 * whether the constructor is complete -- in the sense that every
6132 meaningful byte is explicitly given a value --
6133 and place it in *P_COMPLETE.
6135 Return whether or not CTOR is a valid static constant initializer, the same
6136 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6139 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6140 HOST_WIDE_INT *p_unique_nz_elts,
6141 HOST_WIDE_INT *p_init_elts, bool *p_complete)
6144 *p_unique_nz_elts = 0;
6148 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_unique_nz_elts,
6149 p_init_elts, p_complete);
6152 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6153 of which had type LAST_TYPE. Each element was itself a complete
6154 initializer, in the sense that every meaningful byte was explicitly
6155 given a value. Return true if the same is true for the constructor
6159 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
6160 const_tree last_type)
6162 if (TREE_CODE (type) == UNION_TYPE
6163 || TREE_CODE (type) == QUAL_UNION_TYPE)
6168 gcc_assert (num_elts == 1 && last_type);
6170 /* ??? We could look at each element of the union, and find the
6171 largest element. Which would avoid comparing the size of the
6172 initialized element against any tail padding in the union.
6173 Doesn't seem worth the effort... */
6174 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
6177 return count_type_elements (type, true) == num_elts;
6180 /* Return 1 if EXP contains mostly (3/4) zeros. */
6183 mostly_zeros_p (const_tree exp)
6185 if (TREE_CODE (exp) == CONSTRUCTOR)
6187 HOST_WIDE_INT nz_elts, unz_elts, init_elts;
6190 categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
6192 return !complete_p || nz_elts < init_elts / 4;
6195 return initializer_zerop (exp);
6198 /* Return 1 if EXP contains all zeros. */
6201 all_zeros_p (const_tree exp)
6203 if (TREE_CODE (exp) == CONSTRUCTOR)
6205 HOST_WIDE_INT nz_elts, unz_elts, init_elts;
6208 categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
6210 return nz_elts == 0;
6213 return initializer_zerop (exp);
6216 /* Helper function for store_constructor.
6217 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6218 CLEARED is as for store_constructor.
6219 ALIAS_SET is the alias set to use for any stores.
6220 If REVERSE is true, the store is to be done in reverse order.
6222 This provides a recursive shortcut back to store_constructor when it isn't
6223 necessary to go through store_field. This is so that we can pass through
6224 the cleared field to let store_constructor know that we may not have to
6225 clear a substructure if the outer structure has already been cleared. */
6228 store_constructor_field (rtx target, poly_uint64 bitsize, poly_int64 bitpos,
6229 poly_uint64 bitregion_start,
6230 poly_uint64 bitregion_end,
6232 tree exp, int cleared,
6233 alias_set_type alias_set, bool reverse)
6236 poly_uint64 bytesize;
6237 if (TREE_CODE (exp) == CONSTRUCTOR
6238 /* We can only call store_constructor recursively if the size and
6239 bit position are on a byte boundary. */
6240 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
6241 && maybe_ne (bitsize, 0U)
6242 && multiple_p (bitsize, BITS_PER_UNIT, &bytesize)
6243 /* If we have a nonzero bitpos for a register target, then we just
6244 let store_field do the bitfield handling. This is unlikely to
6245 generate unnecessary clear instructions anyways. */
6246 && (known_eq (bitpos, 0) || MEM_P (target)))
6250 machine_mode target_mode = GET_MODE (target);
6251 if (target_mode != BLKmode
6252 && !multiple_p (bitpos, GET_MODE_ALIGNMENT (target_mode)))
6253 target_mode = BLKmode;
6254 target = adjust_address (target, target_mode, bytepos);
6258 /* Update the alias set, if required. */
6259 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
6260 && MEM_ALIAS_SET (target) != 0)
6262 target = copy_rtx (target);
6263 set_mem_alias_set (target, alias_set);
6266 store_constructor (exp, target, cleared, bytesize, reverse);
6269 store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
6270 exp, alias_set, false, reverse);
6274 /* Returns the number of FIELD_DECLs in TYPE. */
6277 fields_length (const_tree type)
6279 tree t = TYPE_FIELDS (type);
6282 for (; t; t = DECL_CHAIN (t))
6283 if (TREE_CODE (t) == FIELD_DECL)
6290 /* Store the value of constructor EXP into the rtx TARGET.
6291 TARGET is either a REG or a MEM; we know it cannot conflict, since
6292 safe_from_p has been called.
6293 CLEARED is true if TARGET is known to have been zero'd.
6294 SIZE is the number of bytes of TARGET we are allowed to modify: this
6295 may not be the same as the size of EXP if we are assigning to a field
6296 which has been packed to exclude padding bits.
6297 If REVERSE is true, the store is to be done in reverse order. */
6300 store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
6303 tree type = TREE_TYPE (exp);
6304 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6305 poly_int64 bitregion_end = known_gt (size, 0) ? size * BITS_PER_UNIT - 1 : 0;
6307 switch (TREE_CODE (type))
6311 case QUAL_UNION_TYPE:
6313 unsigned HOST_WIDE_INT idx;
6316 /* The storage order is specified for every aggregate type. */
6317 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6319 /* If size is zero or the target is already cleared, do nothing. */
6320 if (known_eq (size, 0) || cleared)
6322 /* We either clear the aggregate or indicate the value is dead. */
6323 else if ((TREE_CODE (type) == UNION_TYPE
6324 || TREE_CODE (type) == QUAL_UNION_TYPE)
6325 && ! CONSTRUCTOR_ELTS (exp))
6326 /* If the constructor is empty, clear the union. */
6328 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6332 /* If we are building a static constructor into a register,
6333 set the initial value as zero so we can fold the value into
6334 a constant. But if more than one register is involved,
6335 this probably loses. */
6336 else if (REG_P (target) && TREE_STATIC (exp)
6337 && known_le (GET_MODE_SIZE (GET_MODE (target)),
6338 REGMODE_NATURAL_SIZE (GET_MODE (target))))
6340 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6344 /* If the constructor has fewer fields than the structure or
6345 if we are initializing the structure to mostly zeros, clear
6346 the whole structure first. Don't do this if TARGET is a
6347 register whose mode size isn't equal to SIZE since
6348 clear_storage can't handle this case. */
6349 else if (known_size_p (size)
6350 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type))
6351 || mostly_zeros_p (exp))
6353 || known_eq (GET_MODE_SIZE (GET_MODE (target)), size)))
6355 clear_storage (target, gen_int_mode (size, Pmode),
6360 if (REG_P (target) && !cleared)
6361 emit_clobber (target);
6363 /* Store each element of the constructor into the
6364 corresponding field of TARGET. */
6365 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6368 HOST_WIDE_INT bitsize;
6369 HOST_WIDE_INT bitpos = 0;
6371 rtx to_rtx = target;
6373 /* Just ignore missing fields. We cleared the whole
6374 structure, above, if any fields are missing. */
6378 if (cleared && initializer_zerop (value))
6381 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6382 bitsize = tree_to_uhwi (DECL_SIZE (field));
6386 mode = DECL_MODE (field);
6387 if (DECL_BIT_FIELD (field))
6390 offset = DECL_FIELD_OFFSET (field);
6391 if (tree_fits_shwi_p (offset)
6392 && tree_fits_shwi_p (bit_position (field)))
6394 bitpos = int_bit_position (field);
6400 /* If this initializes a field that is smaller than a
6401 word, at the start of a word, try to widen it to a full
6402 word. This special case allows us to output C++ member
6403 function initializations in a form that the optimizers
6405 if (WORD_REGISTER_OPERATIONS
6407 && bitsize < BITS_PER_WORD
6408 && bitpos % BITS_PER_WORD == 0
6409 && GET_MODE_CLASS (mode) == MODE_INT
6410 && TREE_CODE (value) == INTEGER_CST
6412 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6414 tree type = TREE_TYPE (value);
6416 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6418 type = lang_hooks.types.type_for_mode
6419 (word_mode, TYPE_UNSIGNED (type));
6420 value = fold_convert (type, value);
6421 /* Make sure the bits beyond the original bitsize are zero
6422 so that we can correctly avoid extra zeroing stores in
6423 later constructor elements. */
6425 = wide_int_to_tree (type, wi::mask (bitsize, false,
6427 value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6430 if (BYTES_BIG_ENDIAN)
6432 = fold_build2 (LSHIFT_EXPR, type, value,
6433 build_int_cst (type,
6434 BITS_PER_WORD - bitsize));
6435 bitsize = BITS_PER_WORD;
6439 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6440 && DECL_NONADDRESSABLE_P (field))
6442 to_rtx = copy_rtx (to_rtx);
6443 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6446 store_constructor_field (to_rtx, bitsize, bitpos,
6447 0, bitregion_end, mode,
6449 get_alias_set (TREE_TYPE (field)),
6457 unsigned HOST_WIDE_INT i;
6460 tree elttype = TREE_TYPE (type);
6462 HOST_WIDE_INT minelt = 0;
6463 HOST_WIDE_INT maxelt = 0;
6465 /* The storage order is specified for every aggregate type. */
6466 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6468 domain = TYPE_DOMAIN (type);
6469 const_bounds_p = (TYPE_MIN_VALUE (domain)
6470 && TYPE_MAX_VALUE (domain)
6471 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6472 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6474 /* If we have constant bounds for the range of the type, get them. */
6477 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6478 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6481 /* If the constructor has fewer elements than the array, clear
6482 the whole array first. Similarly if this is static
6483 constructor of a non-BLKmode object. */
6486 else if (REG_P (target) && TREE_STATIC (exp))
6490 unsigned HOST_WIDE_INT idx;
6492 HOST_WIDE_INT count = 0, zero_count = 0;
6493 need_to_clear = ! const_bounds_p;
6495 /* This loop is a more accurate version of the loop in
6496 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6497 is also needed to check for missing elements. */
6498 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6500 HOST_WIDE_INT this_node_count;
6505 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6507 tree lo_index = TREE_OPERAND (index, 0);
6508 tree hi_index = TREE_OPERAND (index, 1);
6510 if (! tree_fits_uhwi_p (lo_index)
6511 || ! tree_fits_uhwi_p (hi_index))
6517 this_node_count = (tree_to_uhwi (hi_index)
6518 - tree_to_uhwi (lo_index) + 1);
6521 this_node_count = 1;
6523 count += this_node_count;
6524 if (mostly_zeros_p (value))
6525 zero_count += this_node_count;
6528 /* Clear the entire array first if there are any missing
6529 elements, or if the incidence of zero elements is >=
6532 && (count < maxelt - minelt + 1
6533 || 4 * zero_count >= 3 * count))
6537 if (need_to_clear && maybe_gt (size, 0))
6540 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6542 clear_storage (target, gen_int_mode (size, Pmode),
6547 if (!cleared && REG_P (target))
6548 /* Inform later passes that the old value is dead. */
6549 emit_clobber (target);
6551 /* Store each element of the constructor into the
6552 corresponding element of TARGET, determined by counting the
6554 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6558 HOST_WIDE_INT bitpos;
6559 rtx xtarget = target;
6561 if (cleared && initializer_zerop (value))
6564 mode = TYPE_MODE (elttype);
6565 if (mode == BLKmode)
6566 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6567 ? tree_to_uhwi (TYPE_SIZE (elttype))
6570 bitsize = GET_MODE_BITSIZE (mode);
6572 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6574 tree lo_index = TREE_OPERAND (index, 0);
6575 tree hi_index = TREE_OPERAND (index, 1);
6576 rtx index_r, pos_rtx;
6577 HOST_WIDE_INT lo, hi, count;
6580 /* If the range is constant and "small", unroll the loop. */
6582 && tree_fits_shwi_p (lo_index)
6583 && tree_fits_shwi_p (hi_index)
6584 && (lo = tree_to_shwi (lo_index),
6585 hi = tree_to_shwi (hi_index),
6586 count = hi - lo + 1,
6589 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6590 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6593 lo -= minelt; hi -= minelt;
6594 for (; lo <= hi; lo++)
6596 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6599 && !MEM_KEEP_ALIAS_SET_P (target)
6600 && TREE_CODE (type) == ARRAY_TYPE
6601 && TYPE_NONALIASED_COMPONENT (type))
6603 target = copy_rtx (target);
6604 MEM_KEEP_ALIAS_SET_P (target) = 1;
6607 store_constructor_field
6608 (target, bitsize, bitpos, 0, bitregion_end,
6609 mode, value, cleared,
6610 get_alias_set (elttype), reverse);
6615 rtx_code_label *loop_start = gen_label_rtx ();
6616 rtx_code_label *loop_end = gen_label_rtx ();
6619 expand_normal (hi_index);
6621 index = build_decl (EXPR_LOCATION (exp),
6622 VAR_DECL, NULL_TREE, domain);
6623 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6624 SET_DECL_RTL (index, index_r);
6625 store_expr (lo_index, index_r, 0, false, reverse);
6627 /* Build the head of the loop. */
6628 do_pending_stack_adjust ();
6629 emit_label (loop_start);
6631 /* Assign value to element index. */
6633 fold_convert (ssizetype,
6634 fold_build2 (MINUS_EXPR,
6637 TYPE_MIN_VALUE (domain)));
6640 size_binop (MULT_EXPR, position,
6641 fold_convert (ssizetype,
6642 TYPE_SIZE_UNIT (elttype)));
6644 pos_rtx = expand_normal (position);
6645 xtarget = offset_address (target, pos_rtx,
6646 highest_pow2_factor (position));
6647 xtarget = adjust_address (xtarget, mode, 0);
6648 if (TREE_CODE (value) == CONSTRUCTOR)
6649 store_constructor (value, xtarget, cleared,
6650 exact_div (bitsize, BITS_PER_UNIT),
6653 store_expr (value, xtarget, 0, false, reverse);
6655 /* Generate a conditional jump to exit the loop. */
6656 exit_cond = build2 (LT_EXPR, integer_type_node,
6658 jumpif (exit_cond, loop_end,
6659 profile_probability::uninitialized ());
6661 /* Update the loop counter, and jump to the head of
6663 expand_assignment (index,
6664 build2 (PLUS_EXPR, TREE_TYPE (index),
6665 index, integer_one_node),
6668 emit_jump (loop_start);
6670 /* Build the end of the loop. */
6671 emit_label (loop_end);
6674 else if ((index != 0 && ! tree_fits_shwi_p (index))
6675 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6680 index = ssize_int (1);
6683 index = fold_convert (ssizetype,
6684 fold_build2 (MINUS_EXPR,
6687 TYPE_MIN_VALUE (domain)));
6690 size_binop (MULT_EXPR, index,
6691 fold_convert (ssizetype,
6692 TYPE_SIZE_UNIT (elttype)));
6693 xtarget = offset_address (target,
6694 expand_normal (position),
6695 highest_pow2_factor (position));
6696 xtarget = adjust_address (xtarget, mode, 0);
6697 store_expr (value, xtarget, 0, false, reverse);
6702 bitpos = ((tree_to_shwi (index) - minelt)
6703 * tree_to_uhwi (TYPE_SIZE (elttype)));
6705 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6707 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6708 && TREE_CODE (type) == ARRAY_TYPE
6709 && TYPE_NONALIASED_COMPONENT (type))
6711 target = copy_rtx (target);
6712 MEM_KEEP_ALIAS_SET_P (target) = 1;
6714 store_constructor_field (target, bitsize, bitpos, 0,
6715 bitregion_end, mode, value,
6716 cleared, get_alias_set (elttype),
6725 unsigned HOST_WIDE_INT idx;
6726 constructor_elt *ce;
6729 insn_code icode = CODE_FOR_nothing;
6731 tree elttype = TREE_TYPE (type);
6732 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6733 machine_mode eltmode = TYPE_MODE (elttype);
6734 HOST_WIDE_INT bitsize;
6735 HOST_WIDE_INT bitpos;
6736 rtvec vector = NULL;
6738 unsigned HOST_WIDE_INT const_n_elts;
6739 alias_set_type alias;
6740 bool vec_vec_init_p = false;
6741 machine_mode mode = GET_MODE (target);
6743 gcc_assert (eltmode != BLKmode);
6745 /* Try using vec_duplicate_optab for uniform vectors. */
6746 if (!TREE_SIDE_EFFECTS (exp)
6747 && VECTOR_MODE_P (mode)
6748 && eltmode == GET_MODE_INNER (mode)
6749 && ((icode = optab_handler (vec_duplicate_optab, mode))
6750 != CODE_FOR_nothing)
6751 && (elt = uniform_vector_p (exp)))
6753 struct expand_operand ops[2];
6754 create_output_operand (&ops[0], target, mode);
6755 create_input_operand (&ops[1], expand_normal (elt), eltmode);
6756 expand_insn (icode, 2, ops);
6757 if (!rtx_equal_p (target, ops[0].value))
6758 emit_move_insn (target, ops[0].value);
6762 n_elts = TYPE_VECTOR_SUBPARTS (type);
6764 && VECTOR_MODE_P (mode)
6765 && n_elts.is_constant (&const_n_elts))
6767 machine_mode emode = eltmode;
6769 if (CONSTRUCTOR_NELTS (exp)
6770 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value))
6773 tree etype = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value);
6774 gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp)
6775 * TYPE_VECTOR_SUBPARTS (etype),
6777 emode = TYPE_MODE (etype);
6779 icode = convert_optab_handler (vec_init_optab, mode, emode);
6780 if (icode != CODE_FOR_nothing)
6782 unsigned int i, n = const_n_elts;
6784 if (emode != eltmode)
6786 n = CONSTRUCTOR_NELTS (exp);
6787 vec_vec_init_p = true;
6789 vector = rtvec_alloc (n);
6790 for (i = 0; i < n; i++)
6791 RTVEC_ELT (vector, i) = CONST0_RTX (emode);
6795 /* If the constructor has fewer elements than the vector,
6796 clear the whole array first. Similarly if this is static
6797 constructor of a non-BLKmode object. */
6800 else if (REG_P (target) && TREE_STATIC (exp))
6804 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6807 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6809 tree sz = TYPE_SIZE (TREE_TYPE (value));
6811 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR, sz,
6812 TYPE_SIZE (elttype)));
6814 count += n_elts_here;
6815 if (mostly_zeros_p (value))
6816 zero_count += n_elts_here;
6819 /* Clear the entire vector first if there are any missing elements,
6820 or if the incidence of zero elements is >= 75%. */
6821 need_to_clear = (maybe_lt (count, n_elts)
6822 || 4 * zero_count >= 3 * count);
6825 if (need_to_clear && maybe_gt (size, 0) && !vector)
6828 emit_move_insn (target, CONST0_RTX (mode));
6830 clear_storage (target, gen_int_mode (size, Pmode),
6835 /* Inform later passes that the old value is dead. */
6836 if (!cleared && !vector && REG_P (target))
6837 emit_move_insn (target, CONST0_RTX (mode));
6840 alias = MEM_ALIAS_SET (target);
6842 alias = get_alias_set (elttype);
6844 /* Store each element of the constructor into the corresponding
6845 element of TARGET, determined by counting the elements. */
6846 for (idx = 0, i = 0;
6847 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6848 idx++, i += bitsize / elt_size)
6850 HOST_WIDE_INT eltpos;
6851 tree value = ce->value;
6853 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6854 if (cleared && initializer_zerop (value))
6858 eltpos = tree_to_uhwi (ce->index);
6866 gcc_assert (ce->index == NULL_TREE);
6867 gcc_assert (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE);
6871 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6872 RTVEC_ELT (vector, eltpos) = expand_normal (value);
6876 machine_mode value_mode
6877 = (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6878 ? TYPE_MODE (TREE_TYPE (value)) : eltmode);
6879 bitpos = eltpos * elt_size;
6880 store_constructor_field (target, bitsize, bitpos, 0,
6881 bitregion_end, value_mode,
6882 value, cleared, alias, reverse);
6887 emit_insn (GEN_FCN (icode) (target,
6888 gen_rtx_PARALLEL (mode, vector)));
6897 /* Store the value of EXP (an expression tree)
6898 into a subfield of TARGET which has mode MODE and occupies
6899 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6900 If MODE is VOIDmode, it means that we are storing into a bit-field.
6902 BITREGION_START is bitpos of the first bitfield in this region.
6903 BITREGION_END is the bitpos of the ending bitfield in this region.
6904 These two fields are 0, if the C++ memory model does not apply,
6905 or we are not interested in keeping track of bitfield regions.
6907 Always return const0_rtx unless we have something particular to
6910 ALIAS_SET is the alias set for the destination. This value will
6911 (in general) be different from that for TARGET, since TARGET is a
6912 reference to the containing structure.
6914 If NONTEMPORAL is true, try generating a nontemporal store.
6916 If REVERSE is true, the store is to be done in reverse order. */
6919 store_field (rtx target, poly_int64 bitsize, poly_int64 bitpos,
6920 poly_uint64 bitregion_start, poly_uint64 bitregion_end,
6921 machine_mode mode, tree exp,
6922 alias_set_type alias_set, bool nontemporal, bool reverse)
6924 if (TREE_CODE (exp) == ERROR_MARK)
6927 /* If we have nothing to store, do nothing unless the expression has
6928 side-effects. Don't do that for zero sized addressable lhs of
6930 if (known_eq (bitsize, 0)
6931 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6932 || TREE_CODE (exp) != CALL_EXPR))
6933 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6935 if (GET_CODE (target) == CONCAT)
6937 /* We're storing into a struct containing a single __complex. */
6939 gcc_assert (known_eq (bitpos, 0));
6940 return store_expr (exp, target, 0, nontemporal, reverse);
6943 /* If the structure is in a register or if the component
6944 is a bit field, we cannot use addressing to access it.
6945 Use bit-field techniques or SUBREG to store in it. */
6947 poly_int64 decl_bitsize;
6948 if (mode == VOIDmode
6949 || (mode != BLKmode && ! direct_store[(int) mode]
6950 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6951 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6953 || GET_CODE (target) == SUBREG
6954 /* If the field isn't aligned enough to store as an ordinary memref,
6955 store it as a bit field. */
6957 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6958 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
6959 && targetm.slow_unaligned_access (mode, MEM_ALIGN (target)))
6960 || !multiple_p (bitpos, BITS_PER_UNIT)))
6961 || (known_size_p (bitsize)
6963 && maybe_gt (GET_MODE_BITSIZE (mode), bitsize))
6964 /* If the RHS and field are a constant size and the size of the
6965 RHS isn't the same size as the bitfield, we must use bitfield
6967 || (known_size_p (bitsize)
6968 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
6969 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
6971 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6972 we will handle specially below. */
6973 && !(TREE_CODE (exp) == CONSTRUCTOR
6974 && multiple_p (bitsize, BITS_PER_UNIT))
6975 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6976 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6977 includes some extra padding. store_expr / expand_expr will in
6978 that case call get_inner_reference that will have the bitsize
6979 we check here and thus the block move will not clobber the
6980 padding that shouldn't be clobbered. In the future we could
6981 replace the TREE_ADDRESSABLE check with a check that
6982 get_base_address needs to live in memory. */
6983 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6984 || TREE_CODE (exp) != COMPONENT_REF
6985 || !multiple_p (bitsize, BITS_PER_UNIT)
6986 || !multiple_p (bitpos, BITS_PER_UNIT)
6987 || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp, 1)),
6989 || maybe_ne (decl_bitsize, bitsize)))
6990 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6991 decl we must use bitfield operations. */
6992 || (known_size_p (bitsize)
6993 && TREE_CODE (exp) == MEM_REF
6994 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6995 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6996 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6997 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
7002 /* If EXP is a NOP_EXPR of precision less than its mode, then that
7003 implies a mask operation. If the precision is the same size as
7004 the field we're storing into, that mask is redundant. This is
7005 particularly common with bit field assignments generated by the
7007 nop_def = get_def_for_expr (exp, NOP_EXPR);
7010 tree type = TREE_TYPE (exp);
7011 if (INTEGRAL_TYPE_P (type)
7012 && maybe_ne (TYPE_PRECISION (type),
7013 GET_MODE_BITSIZE (TYPE_MODE (type)))
7014 && known_eq (bitsize, TYPE_PRECISION (type)))
7016 tree op = gimple_assign_rhs1 (nop_def);
7017 type = TREE_TYPE (op);
7018 if (INTEGRAL_TYPE_P (type)
7019 && known_ge (TYPE_PRECISION (type), bitsize))
7024 temp = expand_normal (exp);
7026 /* We don't support variable-sized BLKmode bitfields, since our
7027 handling of BLKmode is bound up with the ability to break
7028 things into words. */
7029 gcc_assert (mode != BLKmode || bitsize.is_constant ());
7031 /* Handle calls that return values in multiple non-contiguous locations.
7032 The Irix 6 ABI has examples of this. */
7033 if (GET_CODE (temp) == PARALLEL)
7035 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
7036 machine_mode temp_mode = GET_MODE (temp);
7037 if (temp_mode == BLKmode || temp_mode == VOIDmode)
7038 temp_mode = smallest_int_mode_for_size (size * BITS_PER_UNIT);
7039 rtx temp_target = gen_reg_rtx (temp_mode);
7040 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
7044 /* Handle calls that return BLKmode values in registers. */
7045 else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
7047 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
7048 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
7052 /* If the value has aggregate type and an integral mode then, if BITSIZE
7053 is narrower than this mode and this is for big-endian data, we first
7054 need to put the value into the low-order bits for store_bit_field,
7055 except when MODE is BLKmode and BITSIZE larger than the word size
7056 (see the handling of fields larger than a word in store_bit_field).
7057 Moreover, the field may be not aligned on a byte boundary; in this
7058 case, if it has reverse storage order, it needs to be accessed as a
7059 scalar field with reverse storage order and we must first put the
7060 value into target order. */
7061 scalar_int_mode temp_mode;
7062 if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
7063 && is_int_mode (GET_MODE (temp), &temp_mode))
7065 HOST_WIDE_INT size = GET_MODE_BITSIZE (temp_mode);
7067 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
7070 temp = flip_storage_order (temp_mode, temp);
7072 gcc_checking_assert (known_le (bitsize, size));
7073 if (maybe_lt (bitsize, size)
7074 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
7075 /* Use of to_constant for BLKmode was checked above. */
7076 && !(mode == BLKmode && bitsize.to_constant () > BITS_PER_WORD))
7077 temp = expand_shift (RSHIFT_EXPR, temp_mode, temp,
7078 size - bitsize, NULL_RTX, 1);
7081 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
7082 if (mode != VOIDmode && mode != BLKmode
7083 && mode != TYPE_MODE (TREE_TYPE (exp)))
7084 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
7086 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7087 and BITPOS must be aligned on a byte boundary. If so, we simply do
7088 a block copy. Likewise for a BLKmode-like TARGET. */
7089 if (GET_MODE (temp) == BLKmode
7090 && (GET_MODE (target) == BLKmode
7092 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
7093 && multiple_p (bitpos, BITS_PER_UNIT)
7094 && multiple_p (bitsize, BITS_PER_UNIT))))
7096 gcc_assert (MEM_P (target) && MEM_P (temp));
7097 poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
7098 poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
7100 target = adjust_address (target, VOIDmode, bytepos);
7101 emit_block_move (target, temp,
7102 gen_int_mode (bytesize, Pmode),
7108 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7109 word size, we need to load the value (see again store_bit_field). */
7110 if (GET_MODE (temp) == BLKmode && known_le (bitsize, BITS_PER_WORD))
7112 scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize);
7113 temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
7114 temp_mode, false, NULL);
7117 /* Store the value in the bitfield. */
7118 store_bit_field (target, bitsize, bitpos,
7119 bitregion_start, bitregion_end,
7120 mode, temp, reverse);
7126 /* Now build a reference to just the desired component. */
7127 rtx to_rtx = adjust_address (target, mode,
7128 exact_div (bitpos, BITS_PER_UNIT));
7130 if (to_rtx == target)
7131 to_rtx = copy_rtx (to_rtx);
7133 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
7134 set_mem_alias_set (to_rtx, alias_set);
7136 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7137 into a target smaller than its type; handle that case now. */
7138 if (TREE_CODE (exp) == CONSTRUCTOR && known_size_p (bitsize))
7140 poly_int64 bytesize = exact_div (bitsize, BITS_PER_UNIT);
7141 store_constructor (exp, to_rtx, 0, bytesize, reverse);
7145 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
7149 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7150 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7151 codes and find the ultimate containing object, which we return.
7153 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7154 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7155 storage order of the field.
7156 If the position of the field is variable, we store a tree
7157 giving the variable offset (in units) in *POFFSET.
7158 This offset is in addition to the bit position.
7159 If the position is not variable, we store 0 in *POFFSET.
7161 If any of the extraction expressions is volatile,
7162 we store 1 in *PVOLATILEP. Otherwise we don't change that.
7164 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7165 Otherwise, it is a mode that can be used to access the field.
7167 If the field describes a variable-sized object, *PMODE is set to
7168 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
7169 this case, but the address of the object can be found. */
7172 get_inner_reference (tree exp, poly_int64_pod *pbitsize,
7173 poly_int64_pod *pbitpos, tree *poffset,
7174 machine_mode *pmode, int *punsignedp,
7175 int *preversep, int *pvolatilep)
7178 machine_mode mode = VOIDmode;
7179 bool blkmode_bitfield = false;
7180 tree offset = size_zero_node;
7181 poly_offset_int bit_offset = 0;
7183 /* First get the mode, signedness, storage order and size. We do this from
7184 just the outermost expression. */
7186 if (TREE_CODE (exp) == COMPONENT_REF)
7188 tree field = TREE_OPERAND (exp, 1);
7189 size_tree = DECL_SIZE (field);
7190 if (flag_strict_volatile_bitfields > 0
7191 && TREE_THIS_VOLATILE (exp)
7192 && DECL_BIT_FIELD_TYPE (field)
7193 && DECL_MODE (field) != BLKmode)
7194 /* Volatile bitfields should be accessed in the mode of the
7195 field's type, not the mode computed based on the bit
7197 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7198 else if (!DECL_BIT_FIELD (field))
7200 mode = DECL_MODE (field);
7201 /* For vector fields re-check the target flags, as DECL_MODE
7202 could have been set with different target flags than
7203 the current function has. */
7205 && VECTOR_TYPE_P (TREE_TYPE (field))
7206 && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field))))
7207 mode = TYPE_MODE (TREE_TYPE (field));
7209 else if (DECL_MODE (field) == BLKmode)
7210 blkmode_bitfield = true;
7212 *punsignedp = DECL_UNSIGNED (field);
7214 else if (TREE_CODE (exp) == BIT_FIELD_REF)
7216 size_tree = TREE_OPERAND (exp, 1);
7217 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
7218 || TYPE_UNSIGNED (TREE_TYPE (exp)));
7220 /* For vector types, with the correct size of access, use the mode of
7222 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
7223 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
7224 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
7225 mode = TYPE_MODE (TREE_TYPE (exp));
7229 mode = TYPE_MODE (TREE_TYPE (exp));
7230 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
7232 if (mode == BLKmode)
7233 size_tree = TYPE_SIZE (TREE_TYPE (exp));
7235 *pbitsize = GET_MODE_BITSIZE (mode);
7240 if (! tree_fits_uhwi_p (size_tree))
7241 mode = BLKmode, *pbitsize = -1;
7243 *pbitsize = tree_to_uhwi (size_tree);
7246 *preversep = reverse_storage_order_for_component_p (exp);
7248 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7249 and find the ultimate containing object. */
7252 switch (TREE_CODE (exp))
7255 bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
7260 tree field = TREE_OPERAND (exp, 1);
7261 tree this_offset = component_ref_field_offset (exp);
7263 /* If this field hasn't been filled in yet, don't go past it.
7264 This should only happen when folding expressions made during
7265 type construction. */
7266 if (this_offset == 0)
7269 offset = size_binop (PLUS_EXPR, offset, this_offset);
7270 bit_offset += wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field));
7272 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7277 case ARRAY_RANGE_REF:
7279 tree index = TREE_OPERAND (exp, 1);
7280 tree low_bound = array_ref_low_bound (exp);
7281 tree unit_size = array_ref_element_size (exp);
7283 /* We assume all arrays have sizes that are a multiple of a byte.
7284 First subtract the lower bound, if any, in the type of the
7285 index, then convert to sizetype and multiply by the size of
7286 the array element. */
7287 if (! integer_zerop (low_bound))
7288 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
7291 offset = size_binop (PLUS_EXPR, offset,
7292 size_binop (MULT_EXPR,
7293 fold_convert (sizetype, index),
7302 bit_offset += *pbitsize;
7305 case VIEW_CONVERT_EXPR:
7309 /* Hand back the decl for MEM[&decl, off]. */
7310 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
7312 tree off = TREE_OPERAND (exp, 1);
7313 if (!integer_zerop (off))
7315 poly_offset_int boff = mem_ref_offset (exp);
7316 boff <<= LOG2_BITS_PER_UNIT;
7319 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7327 /* If any reference in the chain is volatile, the effect is volatile. */
7328 if (TREE_THIS_VOLATILE (exp))
7331 exp = TREE_OPERAND (exp, 0);
7335 /* If OFFSET is constant, see if we can return the whole thing as a
7336 constant bit position. Make sure to handle overflow during
7338 if (poly_int_tree_p (offset))
7340 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset),
7341 TYPE_PRECISION (sizetype));
7342 tem <<= LOG2_BITS_PER_UNIT;
7344 if (tem.to_shwi (pbitpos))
7345 *poffset = offset = NULL_TREE;
7348 /* Otherwise, split it up. */
7351 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7352 if (!bit_offset.to_shwi (pbitpos) || maybe_lt (*pbitpos, 0))
7354 *pbitpos = num_trailing_bits (bit_offset.force_shwi ());
7355 poly_offset_int bytes = bits_to_bytes_round_down (bit_offset);
7356 offset = size_binop (PLUS_EXPR, offset,
7357 build_int_cst (sizetype, bytes.force_shwi ()));
7363 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7364 if (mode == VOIDmode
7366 && multiple_p (*pbitpos, BITS_PER_UNIT)
7367 && multiple_p (*pbitsize, BITS_PER_UNIT))
7375 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7377 static unsigned HOST_WIDE_INT
7378 target_align (const_tree target)
7380 /* We might have a chain of nested references with intermediate misaligning
7381 bitfields components, so need to recurse to find out. */
7383 unsigned HOST_WIDE_INT this_align, outer_align;
7385 switch (TREE_CODE (target))
7391 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7392 outer_align = target_align (TREE_OPERAND (target, 0));
7393 return MIN (this_align, outer_align);
7396 case ARRAY_RANGE_REF:
7397 this_align = TYPE_ALIGN (TREE_TYPE (target));
7398 outer_align = target_align (TREE_OPERAND (target, 0));
7399 return MIN (this_align, outer_align);
7402 case NON_LVALUE_EXPR:
7403 case VIEW_CONVERT_EXPR:
7404 this_align = TYPE_ALIGN (TREE_TYPE (target));
7405 outer_align = target_align (TREE_OPERAND (target, 0));
7406 return MAX (this_align, outer_align);
7409 return TYPE_ALIGN (TREE_TYPE (target));
7414 /* Given an rtx VALUE that may contain additions and multiplications, return
7415 an equivalent value that just refers to a register, memory, or constant.
7416 This is done by generating instructions to perform the arithmetic and
7417 returning a pseudo-register containing the value.
7419 The returned value may be a REG, SUBREG, MEM or constant. */
7422 force_operand (rtx value, rtx target)
7425 /* Use subtarget as the target for operand 0 of a binary operation. */
7426 rtx subtarget = get_subtarget (target);
7427 enum rtx_code code = GET_CODE (value);
7429 /* Check for subreg applied to an expression produced by loop optimizer. */
7431 && !REG_P (SUBREG_REG (value))
7432 && !MEM_P (SUBREG_REG (value)))
7435 = simplify_gen_subreg (GET_MODE (value),
7436 force_reg (GET_MODE (SUBREG_REG (value)),
7437 force_operand (SUBREG_REG (value),
7439 GET_MODE (SUBREG_REG (value)),
7440 SUBREG_BYTE (value));
7441 code = GET_CODE (value);
7444 /* Check for a PIC address load. */
7445 if ((code == PLUS || code == MINUS)
7446 && XEXP (value, 0) == pic_offset_table_rtx
7447 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7448 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7449 || GET_CODE (XEXP (value, 1)) == CONST))
7452 subtarget = gen_reg_rtx (GET_MODE (value));
7453 emit_move_insn (subtarget, value);
7457 if (ARITHMETIC_P (value))
7459 op2 = XEXP (value, 1);
7460 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7462 if (code == MINUS && CONST_INT_P (op2))
7465 op2 = negate_rtx (GET_MODE (value), op2);
7468 /* Check for an addition with OP2 a constant integer and our first
7469 operand a PLUS of a virtual register and something else. In that
7470 case, we want to emit the sum of the virtual register and the
7471 constant first and then add the other value. This allows virtual
7472 register instantiation to simply modify the constant rather than
7473 creating another one around this addition. */
7474 if (code == PLUS && CONST_INT_P (op2)
7475 && GET_CODE (XEXP (value, 0)) == PLUS
7476 && REG_P (XEXP (XEXP (value, 0), 0))
7477 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7478 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7480 rtx temp = expand_simple_binop (GET_MODE (value), code,
7481 XEXP (XEXP (value, 0), 0), op2,
7482 subtarget, 0, OPTAB_LIB_WIDEN);
7483 return expand_simple_binop (GET_MODE (value), code, temp,
7484 force_operand (XEXP (XEXP (value,
7486 target, 0, OPTAB_LIB_WIDEN);
7489 op1 = force_operand (XEXP (value, 0), subtarget);
7490 op2 = force_operand (op2, NULL_RTX);
7494 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7496 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7497 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7498 target, 1, OPTAB_LIB_WIDEN);
7500 return expand_divmod (0,
7501 FLOAT_MODE_P (GET_MODE (value))
7502 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7503 GET_MODE (value), op1, op2, target, 0);
7505 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7508 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7511 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7514 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7515 target, 0, OPTAB_LIB_WIDEN);
7517 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7518 target, 1, OPTAB_LIB_WIDEN);
7521 if (UNARY_P (value))
7524 target = gen_reg_rtx (GET_MODE (value));
7525 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7532 case FLOAT_TRUNCATE:
7533 convert_move (target, op1, code == ZERO_EXTEND);
7538 expand_fix (target, op1, code == UNSIGNED_FIX);
7542 case UNSIGNED_FLOAT:
7543 expand_float (target, op1, code == UNSIGNED_FLOAT);
7547 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7551 #ifdef INSN_SCHEDULING
7552 /* On machines that have insn scheduling, we want all memory reference to be
7553 explicit, so we need to deal with such paradoxical SUBREGs. */
7554 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7556 = simplify_gen_subreg (GET_MODE (value),
7557 force_reg (GET_MODE (SUBREG_REG (value)),
7558 force_operand (SUBREG_REG (value),
7560 GET_MODE (SUBREG_REG (value)),
7561 SUBREG_BYTE (value));
7567 /* Subroutine of expand_expr: return nonzero iff there is no way that
7568 EXP can reference X, which is being modified. TOP_P is nonzero if this
7569 call is going to be used to determine whether we need a temporary
7570 for EXP, as opposed to a recursive call to this function.
7572 It is always safe for this routine to return zero since it merely
7573 searches for optimization opportunities. */
7576 safe_from_p (const_rtx x, tree exp, int top_p)
7582 /* If EXP has varying size, we MUST use a target since we currently
7583 have no way of allocating temporaries of variable size
7584 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7585 So we assume here that something at a higher level has prevented a
7586 clash. This is somewhat bogus, but the best we can do. Only
7587 do this when X is BLKmode and when we are at the top level. */
7588 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7589 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7590 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7591 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7592 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7594 && GET_MODE (x) == BLKmode)
7595 /* If X is in the outgoing argument area, it is always safe. */
7597 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7598 || (GET_CODE (XEXP (x, 0)) == PLUS
7599 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7602 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7603 find the underlying pseudo. */
7604 if (GET_CODE (x) == SUBREG)
7607 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7611 /* Now look at our tree code and possibly recurse. */
7612 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7614 case tcc_declaration:
7615 exp_rtl = DECL_RTL_IF_SET (exp);
7621 case tcc_exceptional:
7622 if (TREE_CODE (exp) == TREE_LIST)
7626 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7628 exp = TREE_CHAIN (exp);
7631 if (TREE_CODE (exp) != TREE_LIST)
7632 return safe_from_p (x, exp, 0);
7635 else if (TREE_CODE (exp) == CONSTRUCTOR)
7637 constructor_elt *ce;
7638 unsigned HOST_WIDE_INT idx;
7640 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7641 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7642 || !safe_from_p (x, ce->value, 0))
7646 else if (TREE_CODE (exp) == ERROR_MARK)
7647 return 1; /* An already-visited SAVE_EXPR? */
7652 /* The only case we look at here is the DECL_INITIAL inside a
7654 return (TREE_CODE (exp) != DECL_EXPR
7655 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7656 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7657 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7660 case tcc_comparison:
7661 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7666 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7668 case tcc_expression:
7671 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7672 the expression. If it is set, we conflict iff we are that rtx or
7673 both are in memory. Otherwise, we check all operands of the
7674 expression recursively. */
7676 switch (TREE_CODE (exp))
7679 /* If the operand is static or we are static, we can't conflict.
7680 Likewise if we don't conflict with the operand at all. */
7681 if (staticp (TREE_OPERAND (exp, 0))
7682 || TREE_STATIC (exp)
7683 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7686 /* Otherwise, the only way this can conflict is if we are taking
7687 the address of a DECL a that address if part of X, which is
7689 exp = TREE_OPERAND (exp, 0);
7692 if (!DECL_RTL_SET_P (exp)
7693 || !MEM_P (DECL_RTL (exp)))
7696 exp_rtl = XEXP (DECL_RTL (exp), 0);
7702 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7703 get_alias_set (exp)))
7708 /* Assume that the call will clobber all hard registers and
7710 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7715 case WITH_CLEANUP_EXPR:
7716 case CLEANUP_POINT_EXPR:
7717 /* Lowered by gimplify.c. */
7721 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7727 /* If we have an rtx, we do not need to scan our operands. */
7731 nops = TREE_OPERAND_LENGTH (exp);
7732 for (i = 0; i < nops; i++)
7733 if (TREE_OPERAND (exp, i) != 0
7734 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7740 /* Should never get a type here. */
7744 /* If we have an rtl, find any enclosed object. Then see if we conflict
7748 if (GET_CODE (exp_rtl) == SUBREG)
7750 exp_rtl = SUBREG_REG (exp_rtl);
7752 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7756 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7757 are memory and they conflict. */
7758 return ! (rtx_equal_p (x, exp_rtl)
7759 || (MEM_P (x) && MEM_P (exp_rtl)
7760 && true_dependence (exp_rtl, VOIDmode, x)));
7763 /* If we reach here, it is safe. */
7768 /* Return the highest power of two that EXP is known to be a multiple of.
7769 This is used in updating alignment of MEMs in array references. */
7771 unsigned HOST_WIDE_INT
7772 highest_pow2_factor (const_tree exp)
7774 unsigned HOST_WIDE_INT ret;
7775 int trailing_zeros = tree_ctz (exp);
7776 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7777 return BIGGEST_ALIGNMENT;
7778 ret = HOST_WIDE_INT_1U << trailing_zeros;
7779 if (ret > BIGGEST_ALIGNMENT)
7780 return BIGGEST_ALIGNMENT;
7784 /* Similar, except that the alignment requirements of TARGET are
7785 taken into account. Assume it is at least as aligned as its
7786 type, unless it is a COMPONENT_REF in which case the layout of
7787 the structure gives the alignment. */
7789 static unsigned HOST_WIDE_INT
7790 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7792 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7793 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7795 return MAX (factor, talign);
7798 /* Convert the tree comparison code TCODE to the rtl one where the
7799 signedness is UNSIGNEDP. */
7801 static enum rtx_code
7802 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7814 code = unsignedp ? LTU : LT;
7817 code = unsignedp ? LEU : LE;
7820 code = unsignedp ? GTU : GT;
7823 code = unsignedp ? GEU : GE;
7825 case UNORDERED_EXPR:
7856 /* Subroutine of expand_expr. Expand the two operands of a binary
7857 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7858 The value may be stored in TARGET if TARGET is nonzero. The
7859 MODIFIER argument is as documented by expand_expr. */
7862 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7863 enum expand_modifier modifier)
7865 if (! safe_from_p (target, exp1, 1))
7867 if (operand_equal_p (exp0, exp1, 0))
7869 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7870 *op1 = copy_rtx (*op0);
7874 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7875 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7880 /* Return a MEM that contains constant EXP. DEFER is as for
7881 output_constant_def and MODIFIER is as for expand_expr. */
7884 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7888 mem = output_constant_def (exp, defer);
7889 if (modifier != EXPAND_INITIALIZER)
7890 mem = use_anchored_address (mem);
7894 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7895 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7898 expand_expr_addr_expr_1 (tree exp, rtx target, scalar_int_mode tmode,
7899 enum expand_modifier modifier, addr_space_t as)
7901 rtx result, subtarget;
7903 poly_int64 bitsize, bitpos;
7904 int unsignedp, reversep, volatilep = 0;
7907 /* If we are taking the address of a constant and are at the top level,
7908 we have to use output_constant_def since we can't call force_const_mem
7910 /* ??? This should be considered a front-end bug. We should not be
7911 generating ADDR_EXPR of something that isn't an LVALUE. The only
7912 exception here is STRING_CST. */
7913 if (CONSTANT_CLASS_P (exp))
7915 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7916 if (modifier < EXPAND_SUM)
7917 result = force_operand (result, target);
7921 /* Everything must be something allowed by is_gimple_addressable. */
7922 switch (TREE_CODE (exp))
7925 /* This case will happen via recursion for &a->b. */
7926 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7930 tree tem = TREE_OPERAND (exp, 0);
7931 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7932 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7933 return expand_expr (tem, target, tmode, modifier);
7936 case TARGET_MEM_REF:
7937 return addr_for_mem_ref (exp, as, true);
7940 /* Expand the initializer like constants above. */
7941 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7943 if (modifier < EXPAND_SUM)
7944 result = force_operand (result, target);
7948 /* The real part of the complex number is always first, therefore
7949 the address is the same as the address of the parent object. */
7952 inner = TREE_OPERAND (exp, 0);
7956 /* The imaginary part of the complex number is always second.
7957 The expression is therefore always offset by the size of the
7960 bitpos = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp)));
7961 inner = TREE_OPERAND (exp, 0);
7964 case COMPOUND_LITERAL_EXPR:
7965 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7966 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7967 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7968 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7969 the initializers aren't gimplified. */
7970 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7971 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7972 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7973 target, tmode, modifier, as);
7976 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7977 expand_expr, as that can have various side effects; LABEL_DECLs for
7978 example, may not have their DECL_RTL set yet. Expand the rtl of
7979 CONSTRUCTORs too, which should yield a memory reference for the
7980 constructor's contents. Assume language specific tree nodes can
7981 be expanded in some interesting way. */
7982 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7984 || TREE_CODE (exp) == CONSTRUCTOR
7985 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7987 result = expand_expr (exp, target, tmode,
7988 modifier == EXPAND_INITIALIZER
7989 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7991 /* If the DECL isn't in memory, then the DECL wasn't properly
7992 marked TREE_ADDRESSABLE, which will be either a front-end
7993 or a tree optimizer bug. */
7995 gcc_assert (MEM_P (result));
7996 result = XEXP (result, 0);
7998 /* ??? Is this needed anymore? */
8000 TREE_USED (exp) = 1;
8002 if (modifier != EXPAND_INITIALIZER
8003 && modifier != EXPAND_CONST_ADDRESS
8004 && modifier != EXPAND_SUM)
8005 result = force_operand (result, target);
8009 /* Pass FALSE as the last argument to get_inner_reference although
8010 we are expanding to RTL. The rationale is that we know how to
8011 handle "aligning nodes" here: we can just bypass them because
8012 they won't change the final object whose address will be returned
8013 (they actually exist only for that purpose). */
8014 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
8015 &unsignedp, &reversep, &volatilep);
8019 /* We must have made progress. */
8020 gcc_assert (inner != exp);
8022 subtarget = offset || maybe_ne (bitpos, 0) ? NULL_RTX : target;
8023 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
8024 inner alignment, force the inner to be sufficiently aligned. */
8025 if (CONSTANT_CLASS_P (inner)
8026 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
8028 inner = copy_node (inner);
8029 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
8030 SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
8031 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
8033 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
8039 if (modifier != EXPAND_NORMAL)
8040 result = force_operand (result, NULL);
8041 tmp = expand_expr (offset, NULL_RTX, tmode,
8042 modifier == EXPAND_INITIALIZER
8043 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
8045 /* expand_expr is allowed to return an object in a mode other
8046 than TMODE. If it did, we need to convert. */
8047 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
8048 tmp = convert_modes (tmode, GET_MODE (tmp),
8049 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
8050 result = convert_memory_address_addr_space (tmode, result, as);
8051 tmp = convert_memory_address_addr_space (tmode, tmp, as);
8053 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8054 result = simplify_gen_binary (PLUS, tmode, result, tmp);
8057 subtarget = maybe_ne (bitpos, 0) ? NULL_RTX : target;
8058 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
8059 1, OPTAB_LIB_WIDEN);
8063 if (maybe_ne (bitpos, 0))
8065 /* Someone beforehand should have rejected taking the address
8066 of an object that isn't byte-aligned. */
8067 poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
8068 result = convert_memory_address_addr_space (tmode, result, as);
8069 result = plus_constant (tmode, result, bytepos);
8070 if (modifier < EXPAND_SUM)
8071 result = force_operand (result, target);
8077 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
8078 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
8081 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
8082 enum expand_modifier modifier)
8084 addr_space_t as = ADDR_SPACE_GENERIC;
8085 scalar_int_mode address_mode = Pmode;
8086 scalar_int_mode pointer_mode = ptr_mode;
8090 /* Target mode of VOIDmode says "whatever's natural". */
8091 if (tmode == VOIDmode)
8092 tmode = TYPE_MODE (TREE_TYPE (exp));
8094 if (POINTER_TYPE_P (TREE_TYPE (exp)))
8096 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
8097 address_mode = targetm.addr_space.address_mode (as);
8098 pointer_mode = targetm.addr_space.pointer_mode (as);
8101 /* We can get called with some Weird Things if the user does silliness
8102 like "(short) &a". In that case, convert_memory_address won't do
8103 the right thing, so ignore the given target mode. */
8104 scalar_int_mode new_tmode = (tmode == pointer_mode
8108 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
8109 new_tmode, modifier, as);
8111 /* Despite expand_expr claims concerning ignoring TMODE when not
8112 strictly convenient, stuff breaks if we don't honor it. Note
8113 that combined with the above, we only do this for pointer modes. */
8114 rmode = GET_MODE (result);
8115 if (rmode == VOIDmode)
8117 if (rmode != new_tmode)
8118 result = convert_memory_address_addr_space (new_tmode, result, as);
8123 /* Generate code for computing CONSTRUCTOR EXP.
8124 An rtx for the computed value is returned. If AVOID_TEMP_MEM
8125 is TRUE, instead of creating a temporary variable in memory
8126 NULL is returned and the caller needs to handle it differently. */
8129 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
8130 bool avoid_temp_mem)
8132 tree type = TREE_TYPE (exp);
8133 machine_mode mode = TYPE_MODE (type);
8135 /* Try to avoid creating a temporary at all. This is possible
8136 if all of the initializer is zero.
8137 FIXME: try to handle all [0..255] initializers we can handle
8139 if (TREE_STATIC (exp)
8140 && !TREE_ADDRESSABLE (exp)
8141 && target != 0 && mode == BLKmode
8142 && all_zeros_p (exp))
8144 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
8148 /* All elts simple constants => refer to a constant in memory. But
8149 if this is a non-BLKmode mode, let it store a field at a time
8150 since that should make a CONST_INT, CONST_WIDE_INT or
8151 CONST_DOUBLE when we fold. Likewise, if we have a target we can
8152 use, it is best to store directly into the target unless the type
8153 is large enough that memcpy will be used. If we are making an
8154 initializer and all operands are constant, put it in memory as
8157 FIXME: Avoid trying to fill vector constructors piece-meal.
8158 Output them with output_constant_def below unless we're sure
8159 they're zeros. This should go away when vector initializers
8160 are treated like VECTOR_CST instead of arrays. */
8161 if ((TREE_STATIC (exp)
8162 && ((mode == BLKmode
8163 && ! (target != 0 && safe_from_p (target, exp, 1)))
8164 || TREE_ADDRESSABLE (exp)
8165 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
8166 && (! can_move_by_pieces
8167 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
8169 && ! mostly_zeros_p (exp))))
8170 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
8171 && TREE_CONSTANT (exp)))
8178 constructor = expand_expr_constant (exp, 1, modifier);
8180 if (modifier != EXPAND_CONST_ADDRESS
8181 && modifier != EXPAND_INITIALIZER
8182 && modifier != EXPAND_SUM)
8183 constructor = validize_mem (constructor);
8188 /* Handle calls that pass values in multiple non-contiguous
8189 locations. The Irix 6 ABI has examples of this. */
8190 if (target == 0 || ! safe_from_p (target, exp, 1)
8191 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
8196 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
8199 store_constructor (exp, target, 0, int_expr_size (exp), false);
8204 /* expand_expr: generate code for computing expression EXP.
8205 An rtx for the computed value is returned. The value is never null.
8206 In the case of a void EXP, const0_rtx is returned.
8208 The value may be stored in TARGET if TARGET is nonzero.
8209 TARGET is just a suggestion; callers must assume that
8210 the rtx returned may not be the same as TARGET.
8212 If TARGET is CONST0_RTX, it means that the value will be ignored.
8214 If TMODE is not VOIDmode, it suggests generating the
8215 result in mode TMODE. But this is done only when convenient.
8216 Otherwise, TMODE is ignored and the value generated in its natural mode.
8217 TMODE is just a suggestion; callers must assume that
8218 the rtx returned may not have mode TMODE.
8220 Note that TARGET may have neither TMODE nor MODE. In that case, it
8221 probably will not be used.
8223 If MODIFIER is EXPAND_SUM then when EXP is an addition
8224 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8225 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8226 products as above, or REG or MEM, or constant.
8227 Ordinarily in such cases we would output mul or add instructions
8228 and then return a pseudo reg containing the sum.
8230 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8231 it also marks a label as absolutely required (it can't be dead).
8232 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8233 This is used for outputting expressions used in initializers.
8235 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8236 with a constant address even if that address is not normally legitimate.
8237 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8239 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8240 a call parameter. Such targets require special care as we haven't yet
8241 marked TARGET so that it's safe from being trashed by libcalls. We
8242 don't want to use TARGET for anything but the final result;
8243 Intermediate values must go elsewhere. Additionally, calls to
8244 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8246 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8247 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8248 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8249 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8252 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8253 In this case, we don't adjust a returned MEM rtx that wouldn't be
8254 sufficiently aligned for its mode; instead, it's up to the caller
8255 to deal with it afterwards. This is used to make sure that unaligned
8256 base objects for which out-of-bounds accesses are supported, for
8257 example record types with trailing arrays, aren't realigned behind
8258 the back of the caller.
8259 The normal operating mode is to pass FALSE for this parameter. */
8262 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8263 enum expand_modifier modifier, rtx *alt_rtl,
8264 bool inner_reference_p)
8268 /* Handle ERROR_MARK before anybody tries to access its type. */
8269 if (TREE_CODE (exp) == ERROR_MARK
8270 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8272 ret = CONST0_RTX (tmode);
8273 return ret ? ret : const0_rtx;
8276 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8281 /* Try to expand the conditional expression which is represented by
8282 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8283 return the rtl reg which represents the result. Otherwise return
8287 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8288 tree treeop1 ATTRIBUTE_UNUSED,
8289 tree treeop2 ATTRIBUTE_UNUSED)
8292 rtx op00, op01, op1, op2;
8293 enum rtx_code comparison_code;
8294 machine_mode comparison_mode;
8297 tree type = TREE_TYPE (treeop1);
8298 int unsignedp = TYPE_UNSIGNED (type);
8299 machine_mode mode = TYPE_MODE (type);
8300 machine_mode orig_mode = mode;
8301 static bool expanding_cond_expr_using_cmove = false;
8303 /* Conditional move expansion can end up TERing two operands which,
8304 when recursively hitting conditional expressions can result in
8305 exponential behavior if the cmove expansion ultimatively fails.
8306 It's hardly profitable to TER a cmove into a cmove so avoid doing
8307 that by failing early if we end up recursing. */
8308 if (expanding_cond_expr_using_cmove)
8311 /* If we cannot do a conditional move on the mode, try doing it
8312 with the promoted mode. */
8313 if (!can_conditionally_move_p (mode))
8315 mode = promote_mode (type, mode, &unsignedp);
8316 if (!can_conditionally_move_p (mode))
8318 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8321 temp = assign_temp (type, 0, 1);
8323 expanding_cond_expr_using_cmove = true;
8325 expand_operands (treeop1, treeop2,
8326 temp, &op1, &op2, EXPAND_NORMAL);
8328 if (TREE_CODE (treeop0) == SSA_NAME
8329 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8331 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8332 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8333 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8334 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8335 comparison_mode = TYPE_MODE (type);
8336 unsignedp = TYPE_UNSIGNED (type);
8337 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8339 else if (COMPARISON_CLASS_P (treeop0))
8341 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8342 enum tree_code cmpcode = TREE_CODE (treeop0);
8343 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8344 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8345 unsignedp = TYPE_UNSIGNED (type);
8346 comparison_mode = TYPE_MODE (type);
8347 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8351 op00 = expand_normal (treeop0);
8353 comparison_code = NE;
8354 comparison_mode = GET_MODE (op00);
8355 if (comparison_mode == VOIDmode)
8356 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8358 expanding_cond_expr_using_cmove = false;
8360 if (GET_MODE (op1) != mode)
8361 op1 = gen_lowpart (mode, op1);
8363 if (GET_MODE (op2) != mode)
8364 op2 = gen_lowpart (mode, op2);
8366 /* Try to emit the conditional move. */
8367 insn = emit_conditional_move (temp, comparison_code,
8368 op00, op01, comparison_mode,
8372 /* If we could do the conditional move, emit the sequence,
8376 rtx_insn *seq = get_insns ();
8379 return convert_modes (orig_mode, mode, temp, 0);
8382 /* Otherwise discard the sequence and fall back to code with
8389 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8390 enum expand_modifier modifier)
8392 rtx op0, op1, op2, temp;
8393 rtx_code_label *lab;
8397 scalar_int_mode int_mode;
8398 enum tree_code code = ops->code;
8400 rtx subtarget, original_target;
8402 bool reduce_bit_field;
8403 location_t loc = ops->location;
8404 tree treeop0, treeop1, treeop2;
8405 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8406 ? reduce_to_bit_field_precision ((expr), \
8412 mode = TYPE_MODE (type);
8413 unsignedp = TYPE_UNSIGNED (type);
8419 /* We should be called only on simple (binary or unary) expressions,
8420 exactly those that are valid in gimple expressions that aren't
8421 GIMPLE_SINGLE_RHS (or invalid). */
8422 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8423 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8424 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8426 ignore = (target == const0_rtx
8427 || ((CONVERT_EXPR_CODE_P (code)
8428 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8429 && TREE_CODE (type) == VOID_TYPE));
8431 /* We should be called only if we need the result. */
8432 gcc_assert (!ignore);
8434 /* An operation in what may be a bit-field type needs the
8435 result to be reduced to the precision of the bit-field type,
8436 which is narrower than that of the type's mode. */
8437 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8438 && !type_has_mode_precision_p (type));
8440 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8443 /* Use subtarget as the target for operand 0 of a binary operation. */
8444 subtarget = get_subtarget (target);
8445 original_target = target;
8449 case NON_LVALUE_EXPR:
8452 if (treeop0 == error_mark_node)
8455 if (TREE_CODE (type) == UNION_TYPE)
8457 tree valtype = TREE_TYPE (treeop0);
8459 /* If both input and output are BLKmode, this conversion isn't doing
8460 anything except possibly changing memory attribute. */
8461 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8463 rtx result = expand_expr (treeop0, target, tmode,
8466 result = copy_rtx (result);
8467 set_mem_attributes (result, type, 0);
8473 if (TYPE_MODE (type) != BLKmode)
8474 target = gen_reg_rtx (TYPE_MODE (type));
8476 target = assign_temp (type, 1, 1);
8480 /* Store data into beginning of memory target. */
8481 store_expr (treeop0,
8482 adjust_address (target, TYPE_MODE (valtype), 0),
8483 modifier == EXPAND_STACK_PARM,
8484 false, TYPE_REVERSE_STORAGE_ORDER (type));
8488 gcc_assert (REG_P (target)
8489 && !TYPE_REVERSE_STORAGE_ORDER (type));
8491 /* Store this field into a union of the proper type. */
8492 poly_uint64 op0_size
8493 = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0)));
8494 poly_uint64 union_size = GET_MODE_BITSIZE (mode);
8495 store_field (target,
8496 /* The conversion must be constructed so that
8497 we know at compile time how many bits
8499 ordered_min (op0_size, union_size),
8500 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8504 /* Return the entire union. */
8508 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8510 op0 = expand_expr (treeop0, target, VOIDmode,
8513 /* If the signedness of the conversion differs and OP0 is
8514 a promoted SUBREG, clear that indication since we now
8515 have to do the proper extension. */
8516 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8517 && GET_CODE (op0) == SUBREG)
8518 SUBREG_PROMOTED_VAR_P (op0) = 0;
8520 return REDUCE_BIT_FIELD (op0);
8523 op0 = expand_expr (treeop0, NULL_RTX, mode,
8524 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8525 if (GET_MODE (op0) == mode)
8528 /* If OP0 is a constant, just convert it into the proper mode. */
8529 else if (CONSTANT_P (op0))
8531 tree inner_type = TREE_TYPE (treeop0);
8532 machine_mode inner_mode = GET_MODE (op0);
8534 if (inner_mode == VOIDmode)
8535 inner_mode = TYPE_MODE (inner_type);
8537 if (modifier == EXPAND_INITIALIZER)
8538 op0 = lowpart_subreg (mode, op0, inner_mode);
8540 op0= convert_modes (mode, inner_mode, op0,
8541 TYPE_UNSIGNED (inner_type));
8544 else if (modifier == EXPAND_INITIALIZER)
8545 op0 = gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8546 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8548 else if (target == 0)
8549 op0 = convert_to_mode (mode, op0,
8550 TYPE_UNSIGNED (TREE_TYPE
8554 convert_move (target, op0,
8555 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8559 return REDUCE_BIT_FIELD (op0);
8561 case ADDR_SPACE_CONVERT_EXPR:
8563 tree treeop0_type = TREE_TYPE (treeop0);
8565 gcc_assert (POINTER_TYPE_P (type));
8566 gcc_assert (POINTER_TYPE_P (treeop0_type));
8568 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8569 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8571 /* Conversions between pointers to the same address space should
8572 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8573 gcc_assert (as_to != as_from);
8575 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8577 /* Ask target code to handle conversion between pointers
8578 to overlapping address spaces. */
8579 if (targetm.addr_space.subset_p (as_to, as_from)
8580 || targetm.addr_space.subset_p (as_from, as_to))
8582 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8586 /* For disjoint address spaces, converting anything but a null
8587 pointer invokes undefined behavior. We truncate or extend the
8588 value as if we'd converted via integers, which handles 0 as
8589 required, and all others as the programmer likely expects. */
8590 #ifndef POINTERS_EXTEND_UNSIGNED
8591 const int POINTERS_EXTEND_UNSIGNED = 1;
8593 op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8594 op0, POINTERS_EXTEND_UNSIGNED);
8600 case POINTER_PLUS_EXPR:
8601 /* Even though the sizetype mode and the pointer's mode can be different
8602 expand is able to handle this correctly and get the correct result out
8603 of the PLUS_EXPR code. */
8604 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8605 if sizetype precision is smaller than pointer precision. */
8606 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8607 treeop1 = fold_convert_loc (loc, type,
8608 fold_convert_loc (loc, ssizetype,
8610 /* If sizetype precision is larger than pointer precision, truncate the
8611 offset to have matching modes. */
8612 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8613 treeop1 = fold_convert_loc (loc, type, treeop1);
8617 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8618 something else, make sure we add the register to the constant and
8619 then to the other thing. This case can occur during strength
8620 reduction and doing it this way will produce better code if the
8621 frame pointer or argument pointer is eliminated.
8623 fold-const.c will ensure that the constant is always in the inner
8624 PLUS_EXPR, so the only case we need to do anything about is if
8625 sp, ap, or fp is our second argument, in which case we must swap
8626 the innermost first argument and our second argument. */
8628 if (TREE_CODE (treeop0) == PLUS_EXPR
8629 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8631 && (DECL_RTL (treeop1) == frame_pointer_rtx
8632 || DECL_RTL (treeop1) == stack_pointer_rtx
8633 || DECL_RTL (treeop1) == arg_pointer_rtx))
8638 /* If the result is to be ptr_mode and we are adding an integer to
8639 something, we might be forming a constant. So try to use
8640 plus_constant. If it produces a sum and we can't accept it,
8641 use force_operand. This allows P = &ARR[const] to generate
8642 efficient code on machines where a SYMBOL_REF is not a valid
8645 If this is an EXPAND_SUM call, always return the sum. */
8646 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8647 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8649 if (modifier == EXPAND_STACK_PARM)
8651 if (TREE_CODE (treeop0) == INTEGER_CST
8652 && HWI_COMPUTABLE_MODE_P (mode)
8653 && TREE_CONSTANT (treeop1))
8657 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8659 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8661 /* Use wi::shwi to ensure that the constant is
8662 truncated according to the mode of OP1, then sign extended
8663 to a HOST_WIDE_INT. Using the constant directly can result
8664 in non-canonical RTL in a 64x32 cross compile. */
8665 wc = TREE_INT_CST_LOW (treeop0);
8667 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8668 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8669 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8670 op1 = force_operand (op1, target);
8671 return REDUCE_BIT_FIELD (op1);
8674 else if (TREE_CODE (treeop1) == INTEGER_CST
8675 && HWI_COMPUTABLE_MODE_P (mode)
8676 && TREE_CONSTANT (treeop0))
8680 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8682 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8683 (modifier == EXPAND_INITIALIZER
8684 ? EXPAND_INITIALIZER : EXPAND_SUM));
8685 if (! CONSTANT_P (op0))
8687 op1 = expand_expr (treeop1, NULL_RTX,
8688 VOIDmode, modifier);
8689 /* Return a PLUS if modifier says it's OK. */
8690 if (modifier == EXPAND_SUM
8691 || modifier == EXPAND_INITIALIZER)
8692 return simplify_gen_binary (PLUS, mode, op0, op1);
8695 /* Use wi::shwi to ensure that the constant is
8696 truncated according to the mode of OP1, then sign extended
8697 to a HOST_WIDE_INT. Using the constant directly can result
8698 in non-canonical RTL in a 64x32 cross compile. */
8699 wc = TREE_INT_CST_LOW (treeop1);
8701 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8702 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8703 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8704 op0 = force_operand (op0, target);
8705 return REDUCE_BIT_FIELD (op0);
8709 /* Use TER to expand pointer addition of a negated value
8710 as pointer subtraction. */
8711 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8712 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8713 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8714 && TREE_CODE (treeop1) == SSA_NAME
8715 && TYPE_MODE (TREE_TYPE (treeop0))
8716 == TYPE_MODE (TREE_TYPE (treeop1)))
8718 gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8721 treeop1 = gimple_assign_rhs1 (def);
8727 /* No sense saving up arithmetic to be done
8728 if it's all in the wrong mode to form part of an address.
8729 And force_operand won't know whether to sign-extend or
8731 if (modifier != EXPAND_INITIALIZER
8732 && (modifier != EXPAND_SUM || mode != ptr_mode))
8734 expand_operands (treeop0, treeop1,
8735 subtarget, &op0, &op1, modifier);
8736 if (op0 == const0_rtx)
8738 if (op1 == const0_rtx)
8743 expand_operands (treeop0, treeop1,
8744 subtarget, &op0, &op1, modifier);
8745 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8748 case POINTER_DIFF_EXPR:
8750 /* For initializers, we are allowed to return a MINUS of two
8751 symbolic constants. Here we handle all cases when both operands
8753 /* Handle difference of two symbolic constants,
8754 for the sake of an initializer. */
8755 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8756 && really_constant_p (treeop0)
8757 && really_constant_p (treeop1))
8759 expand_operands (treeop0, treeop1,
8760 NULL_RTX, &op0, &op1, modifier);
8761 return simplify_gen_binary (MINUS, mode, op0, op1);
8764 /* No sense saving up arithmetic to be done
8765 if it's all in the wrong mode to form part of an address.
8766 And force_operand won't know whether to sign-extend or
8768 if (modifier != EXPAND_INITIALIZER
8769 && (modifier != EXPAND_SUM || mode != ptr_mode))
8772 expand_operands (treeop0, treeop1,
8773 subtarget, &op0, &op1, modifier);
8775 /* Convert A - const to A + (-const). */
8776 if (CONST_INT_P (op1))
8778 op1 = negate_rtx (mode, op1);
8779 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8784 case WIDEN_MULT_PLUS_EXPR:
8785 case WIDEN_MULT_MINUS_EXPR:
8786 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8787 op2 = expand_normal (treeop2);
8788 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8792 case WIDEN_MULT_EXPR:
8793 /* If first operand is constant, swap them.
8794 Thus the following special case checks need only
8795 check the second operand. */
8796 if (TREE_CODE (treeop0) == INTEGER_CST)
8797 std::swap (treeop0, treeop1);
8799 /* First, check if we have a multiplication of one signed and one
8800 unsigned operand. */
8801 if (TREE_CODE (treeop1) != INTEGER_CST
8802 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8803 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8805 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8806 this_optab = usmul_widen_optab;
8807 if (find_widening_optab_handler (this_optab, mode, innermode)
8808 != CODE_FOR_nothing)
8810 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8811 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8814 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8816 /* op0 and op1 might still be constant, despite the above
8817 != INTEGER_CST check. Handle it. */
8818 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8820 op0 = convert_modes (mode, innermode, op0, true);
8821 op1 = convert_modes (mode, innermode, op1, false);
8822 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8823 target, unsignedp));
8828 /* Check for a multiplication with matching signedness. */
8829 else if ((TREE_CODE (treeop1) == INTEGER_CST
8830 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8831 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8832 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8834 tree op0type = TREE_TYPE (treeop0);
8835 machine_mode innermode = TYPE_MODE (op0type);
8836 bool zextend_p = TYPE_UNSIGNED (op0type);
8837 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8838 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8840 if (TREE_CODE (treeop0) != INTEGER_CST)
8842 if (find_widening_optab_handler (this_optab, mode, innermode)
8843 != CODE_FOR_nothing)
8845 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8847 /* op0 and op1 might still be constant, despite the above
8848 != INTEGER_CST check. Handle it. */
8849 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8852 op0 = convert_modes (mode, innermode, op0, zextend_p);
8854 = convert_modes (mode, innermode, op1,
8855 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8856 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8860 temp = expand_widening_mult (mode, op0, op1, target,
8861 unsignedp, this_optab);
8862 return REDUCE_BIT_FIELD (temp);
8864 if (find_widening_optab_handler (other_optab, mode, innermode)
8866 && innermode == word_mode)
8869 op0 = expand_normal (treeop0);
8870 op1 = expand_normal (treeop1);
8871 /* op0 and op1 might be constants, despite the above
8872 != INTEGER_CST check. Handle it. */
8873 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8874 goto widen_mult_const;
8875 if (TREE_CODE (treeop1) == INTEGER_CST)
8876 op1 = convert_modes (mode, word_mode, op1,
8877 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8878 temp = expand_binop (mode, other_optab, op0, op1, target,
8879 unsignedp, OPTAB_LIB_WIDEN);
8880 hipart = gen_highpart (word_mode, temp);
8881 htem = expand_mult_highpart_adjust (word_mode, hipart,
8885 emit_move_insn (hipart, htem);
8886 return REDUCE_BIT_FIELD (temp);
8890 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8891 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8892 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8893 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8897 optab opt = fma_optab;
8898 gimple *def0, *def2;
8900 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8902 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8904 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8907 gcc_assert (fn != NULL_TREE);
8908 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8909 return expand_builtin (call_expr, target, subtarget, mode, false);
8912 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8913 /* The multiplication is commutative - look at its 2nd operand
8914 if the first isn't fed by a negate. */
8917 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8918 /* Swap operands if the 2nd operand is fed by a negate. */
8920 std::swap (treeop0, treeop1);
8922 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8927 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8930 op0 = expand_normal (gimple_assign_rhs1 (def0));
8931 op2 = expand_normal (gimple_assign_rhs1 (def2));
8934 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8937 op0 = expand_normal (gimple_assign_rhs1 (def0));
8940 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8943 op2 = expand_normal (gimple_assign_rhs1 (def2));
8947 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8949 op2 = expand_normal (treeop2);
8950 op1 = expand_normal (treeop1);
8952 return expand_ternary_op (TYPE_MODE (type), opt,
8953 op0, op1, op2, target, 0);
8957 /* If this is a fixed-point operation, then we cannot use the code
8958 below because "expand_mult" doesn't support sat/no-sat fixed-point
8960 if (ALL_FIXED_POINT_MODE_P (mode))
8963 /* If first operand is constant, swap them.
8964 Thus the following special case checks need only
8965 check the second operand. */
8966 if (TREE_CODE (treeop0) == INTEGER_CST)
8967 std::swap (treeop0, treeop1);
8969 /* Attempt to return something suitable for generating an
8970 indexed address, for machines that support that. */
8972 if (modifier == EXPAND_SUM && mode == ptr_mode
8973 && tree_fits_shwi_p (treeop1))
8975 tree exp1 = treeop1;
8977 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8981 op0 = force_operand (op0, NULL_RTX);
8983 op0 = copy_to_mode_reg (mode, op0);
8985 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8986 gen_int_mode (tree_to_shwi (exp1),
8987 TYPE_MODE (TREE_TYPE (exp1)))));
8990 if (modifier == EXPAND_STACK_PARM)
8993 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8994 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8996 case TRUNC_MOD_EXPR:
8997 case FLOOR_MOD_EXPR:
8999 case ROUND_MOD_EXPR:
9001 case TRUNC_DIV_EXPR:
9002 case FLOOR_DIV_EXPR:
9004 case ROUND_DIV_EXPR:
9005 case EXACT_DIV_EXPR:
9007 /* If this is a fixed-point operation, then we cannot use the code
9008 below because "expand_divmod" doesn't support sat/no-sat fixed-point
9010 if (ALL_FIXED_POINT_MODE_P (mode))
9013 if (modifier == EXPAND_STACK_PARM)
9015 /* Possible optimization: compute the dividend with EXPAND_SUM
9016 then if the divisor is constant can optimize the case
9017 where some terms of the dividend have coeffs divisible by it. */
9018 expand_operands (treeop0, treeop1,
9019 subtarget, &op0, &op1, EXPAND_NORMAL);
9020 bool mod_p = code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR
9021 || code == CEIL_MOD_EXPR || code == ROUND_MOD_EXPR;
9022 if (SCALAR_INT_MODE_P (mode)
9024 && get_range_pos_neg (treeop0) == 1
9025 && get_range_pos_neg (treeop1) == 1)
9027 /* If both arguments are known to be positive when interpreted
9028 as signed, we can expand it as both signed and unsigned
9029 division or modulo. Choose the cheaper sequence in that case. */
9030 bool speed_p = optimize_insn_for_speed_p ();
9031 do_pending_stack_adjust ();
9033 rtx uns_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 1);
9034 rtx_insn *uns_insns = get_insns ();
9037 rtx sgn_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 0);
9038 rtx_insn *sgn_insns = get_insns ();
9040 unsigned uns_cost = seq_cost (uns_insns, speed_p);
9041 unsigned sgn_cost = seq_cost (sgn_insns, speed_p);
9043 /* If costs are the same then use as tie breaker the other
9045 if (uns_cost == sgn_cost)
9047 uns_cost = seq_cost (uns_insns, !speed_p);
9048 sgn_cost = seq_cost (sgn_insns, !speed_p);
9051 if (uns_cost < sgn_cost || (uns_cost == sgn_cost && unsignedp))
9053 emit_insn (uns_insns);
9056 emit_insn (sgn_insns);
9059 return expand_divmod (mod_p, code, mode, op0, op1, target, unsignedp);
9064 case MULT_HIGHPART_EXPR:
9065 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9066 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
9070 case FIXED_CONVERT_EXPR:
9071 op0 = expand_normal (treeop0);
9072 if (target == 0 || modifier == EXPAND_STACK_PARM)
9073 target = gen_reg_rtx (mode);
9075 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
9076 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9077 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
9078 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
9080 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
9083 case FIX_TRUNC_EXPR:
9084 op0 = expand_normal (treeop0);
9085 if (target == 0 || modifier == EXPAND_STACK_PARM)
9086 target = gen_reg_rtx (mode);
9087 expand_fix (target, op0, unsignedp);
9091 op0 = expand_normal (treeop0);
9092 if (target == 0 || modifier == EXPAND_STACK_PARM)
9093 target = gen_reg_rtx (mode);
9094 /* expand_float can't figure out what to do if FROM has VOIDmode.
9095 So give it the correct mode. With -O, cse will optimize this. */
9096 if (GET_MODE (op0) == VOIDmode)
9097 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
9099 expand_float (target, op0,
9100 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9104 op0 = expand_expr (treeop0, subtarget,
9105 VOIDmode, EXPAND_NORMAL);
9106 if (modifier == EXPAND_STACK_PARM)
9108 temp = expand_unop (mode,
9109 optab_for_tree_code (NEGATE_EXPR, type,
9113 return REDUCE_BIT_FIELD (temp);
9116 op0 = expand_expr (treeop0, subtarget,
9117 VOIDmode, EXPAND_NORMAL);
9118 if (modifier == EXPAND_STACK_PARM)
9121 /* ABS_EXPR is not valid for complex arguments. */
9122 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9123 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
9125 /* Unsigned abs is simply the operand. Testing here means we don't
9126 risk generating incorrect code below. */
9127 if (TYPE_UNSIGNED (type))
9130 return expand_abs (mode, op0, target, unsignedp,
9131 safe_from_p (target, treeop0, 1));
9135 target = original_target;
9137 || modifier == EXPAND_STACK_PARM
9138 || (MEM_P (target) && MEM_VOLATILE_P (target))
9139 || GET_MODE (target) != mode
9141 && REGNO (target) < FIRST_PSEUDO_REGISTER))
9142 target = gen_reg_rtx (mode);
9143 expand_operands (treeop0, treeop1,
9144 target, &op0, &op1, EXPAND_NORMAL);
9146 /* First try to do it with a special MIN or MAX instruction.
9147 If that does not win, use a conditional jump to select the proper
9149 this_optab = optab_for_tree_code (code, type, optab_default);
9150 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9155 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9156 and similarly for MAX <x, y>. */
9157 if (VECTOR_TYPE_P (type))
9159 tree t0 = make_tree (type, op0);
9160 tree t1 = make_tree (type, op1);
9161 tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
9163 return expand_vec_cond_expr (type, comparison, t0, t1,
9167 /* At this point, a MEM target is no longer useful; we will get better
9170 if (! REG_P (target))
9171 target = gen_reg_rtx (mode);
9173 /* If op1 was placed in target, swap op0 and op1. */
9174 if (target != op0 && target == op1)
9175 std::swap (op0, op1);
9177 /* We generate better code and avoid problems with op1 mentioning
9178 target by forcing op1 into a pseudo if it isn't a constant. */
9179 if (! CONSTANT_P (op1))
9180 op1 = force_reg (mode, op1);
9183 enum rtx_code comparison_code;
9186 if (code == MAX_EXPR)
9187 comparison_code = unsignedp ? GEU : GE;
9189 comparison_code = unsignedp ? LEU : LE;
9191 /* Canonicalize to comparisons against 0. */
9192 if (op1 == const1_rtx)
9194 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9195 or (a != 0 ? a : 1) for unsigned.
9196 For MIN we are safe converting (a <= 1 ? a : 1)
9197 into (a <= 0 ? a : 1) */
9198 cmpop1 = const0_rtx;
9199 if (code == MAX_EXPR)
9200 comparison_code = unsignedp ? NE : GT;
9202 if (op1 == constm1_rtx && !unsignedp)
9204 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9205 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9206 cmpop1 = const0_rtx;
9207 if (code == MIN_EXPR)
9208 comparison_code = LT;
9211 /* Use a conditional move if possible. */
9212 if (can_conditionally_move_p (mode))
9218 /* Try to emit the conditional move. */
9219 insn = emit_conditional_move (target, comparison_code,
9224 /* If we could do the conditional move, emit the sequence,
9228 rtx_insn *seq = get_insns ();
9234 /* Otherwise discard the sequence and fall back to code with
9240 emit_move_insn (target, op0);
9242 lab = gen_label_rtx ();
9243 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
9244 unsignedp, mode, NULL_RTX, NULL, lab,
9245 profile_probability::uninitialized ());
9247 emit_move_insn (target, op1);
9252 op0 = expand_expr (treeop0, subtarget,
9253 VOIDmode, EXPAND_NORMAL);
9254 if (modifier == EXPAND_STACK_PARM)
9256 /* In case we have to reduce the result to bitfield precision
9257 for unsigned bitfield expand this as XOR with a proper constant
9259 if (reduce_bit_field && TYPE_UNSIGNED (type))
9261 int_mode = SCALAR_INT_TYPE_MODE (type);
9262 wide_int mask = wi::mask (TYPE_PRECISION (type),
9263 false, GET_MODE_PRECISION (int_mode));
9265 temp = expand_binop (int_mode, xor_optab, op0,
9266 immed_wide_int_const (mask, int_mode),
9267 target, 1, OPTAB_LIB_WIDEN);
9270 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
9274 /* ??? Can optimize bitwise operations with one arg constant.
9275 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9276 and (a bitwise1 b) bitwise2 b (etc)
9277 but that is probably not worth while. */
9286 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9287 || type_has_mode_precision_p (type));
9293 /* If this is a fixed-point operation, then we cannot use the code
9294 below because "expand_shift" doesn't support sat/no-sat fixed-point
9296 if (ALL_FIXED_POINT_MODE_P (mode))
9299 if (! safe_from_p (subtarget, treeop1, 1))
9301 if (modifier == EXPAND_STACK_PARM)
9303 op0 = expand_expr (treeop0, subtarget,
9304 VOIDmode, EXPAND_NORMAL);
9306 /* Left shift optimization when shifting across word_size boundary.
9308 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9309 there isn't native instruction to support this wide mode
9310 left shift. Given below scenario:
9312 Type A = (Type) B << C
9315 | dest_high | dest_low |
9319 If the shift amount C caused we shift B to across the word
9320 size boundary, i.e part of B shifted into high half of
9321 destination register, and part of B remains in the low
9322 half, then GCC will use the following left shift expand
9325 1. Initialize dest_low to B.
9326 2. Initialize every bit of dest_high to the sign bit of B.
9327 3. Logic left shift dest_low by C bit to finalize dest_low.
9328 The value of dest_low before this shift is kept in a temp D.
9329 4. Logic left shift dest_high by C.
9330 5. Logic right shift D by (word_size - C).
9331 6. Or the result of 4 and 5 to finalize dest_high.
9333 While, by checking gimple statements, if operand B is
9334 coming from signed extension, then we can simplify above
9337 1. dest_high = src_low >> (word_size - C).
9338 2. dest_low = src_low << C.
9340 We can use one arithmetic right shift to finish all the
9341 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9342 needed from 6 into 2.
9344 The case is similar for zero extension, except that we
9345 initialize dest_high to zero rather than copies of the sign
9346 bit from B. Furthermore, we need to use a logical right shift
9349 The choice of sign-extension versus zero-extension is
9350 determined entirely by whether or not B is signed and is
9351 independent of the current setting of unsignedp. */
9354 if (code == LSHIFT_EXPR
9357 && GET_MODE_2XWIDER_MODE (word_mode).exists (&int_mode)
9359 && TREE_CONSTANT (treeop1)
9360 && TREE_CODE (treeop0) == SSA_NAME)
9362 gimple *def = SSA_NAME_DEF_STMT (treeop0);
9363 if (is_gimple_assign (def)
9364 && gimple_assign_rhs_code (def) == NOP_EXPR)
9366 scalar_int_mode rmode = SCALAR_INT_TYPE_MODE
9367 (TREE_TYPE (gimple_assign_rhs1 (def)));
9369 if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (int_mode)
9370 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
9371 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
9372 >= GET_MODE_BITSIZE (word_mode)))
9374 rtx_insn *seq, *seq_old;
9375 poly_uint64 high_off = subreg_highpart_offset (word_mode,
9377 bool extend_unsigned
9378 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9379 rtx low = lowpart_subreg (word_mode, op0, int_mode);
9380 rtx dest_low = lowpart_subreg (word_mode, target, int_mode);
9381 rtx dest_high = simplify_gen_subreg (word_mode, target,
9382 int_mode, high_off);
9383 HOST_WIDE_INT ramount = (BITS_PER_WORD
9384 - TREE_INT_CST_LOW (treeop1));
9385 tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9388 /* dest_high = src_low >> (word_size - C). */
9389 temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9392 if (temp != dest_high)
9393 emit_move_insn (dest_high, temp);
9395 /* dest_low = src_low << C. */
9396 temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9397 treeop1, dest_low, unsignedp);
9398 if (temp != dest_low)
9399 emit_move_insn (dest_low, temp);
9405 if (have_insn_for (ASHIFT, int_mode))
9407 bool speed_p = optimize_insn_for_speed_p ();
9409 rtx ret_old = expand_variable_shift (code, int_mode,
9414 seq_old = get_insns ();
9416 if (seq_cost (seq, speed_p)
9417 >= seq_cost (seq_old, speed_p))
9428 if (temp == NULL_RTX)
9429 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9431 if (code == LSHIFT_EXPR)
9432 temp = REDUCE_BIT_FIELD (temp);
9436 /* Could determine the answer when only additive constants differ. Also,
9437 the addition of one can be handled by changing the condition. */
9444 case UNORDERED_EXPR:
9453 temp = do_store_flag (ops,
9454 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9455 tmode != VOIDmode ? tmode : mode);
9459 /* Use a compare and a jump for BLKmode comparisons, or for function
9460 type comparisons is have_canonicalize_funcptr_for_compare. */
9463 || modifier == EXPAND_STACK_PARM
9464 || ! safe_from_p (target, treeop0, 1)
9465 || ! safe_from_p (target, treeop1, 1)
9466 /* Make sure we don't have a hard reg (such as function's return
9467 value) live across basic blocks, if not optimizing. */
9468 || (!optimize && REG_P (target)
9469 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9470 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9472 emit_move_insn (target, const0_rtx);
9474 rtx_code_label *lab1 = gen_label_rtx ();
9475 jumpifnot_1 (code, treeop0, treeop1, lab1,
9476 profile_probability::uninitialized ());
9478 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9479 emit_move_insn (target, constm1_rtx);
9481 emit_move_insn (target, const1_rtx);
9487 /* Get the rtx code of the operands. */
9488 op0 = expand_normal (treeop0);
9489 op1 = expand_normal (treeop1);
9492 target = gen_reg_rtx (TYPE_MODE (type));
9494 /* If target overlaps with op1, then either we need to force
9495 op1 into a pseudo (if target also overlaps with op0),
9496 or write the complex parts in reverse order. */
9497 switch (GET_CODE (target))
9500 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9502 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9504 complex_expr_force_op1:
9505 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9506 emit_move_insn (temp, op1);
9510 complex_expr_swap_order:
9511 /* Move the imaginary (op1) and real (op0) parts to their
9513 write_complex_part (target, op1, true);
9514 write_complex_part (target, op0, false);
9520 temp = adjust_address_nv (target,
9521 GET_MODE_INNER (GET_MODE (target)), 0);
9522 if (reg_overlap_mentioned_p (temp, op1))
9524 scalar_mode imode = GET_MODE_INNER (GET_MODE (target));
9525 temp = adjust_address_nv (target, imode,
9526 GET_MODE_SIZE (imode));
9527 if (reg_overlap_mentioned_p (temp, op0))
9528 goto complex_expr_force_op1;
9529 goto complex_expr_swap_order;
9533 if (reg_overlap_mentioned_p (target, op1))
9535 if (reg_overlap_mentioned_p (target, op0))
9536 goto complex_expr_force_op1;
9537 goto complex_expr_swap_order;
9542 /* Move the real (op0) and imaginary (op1) parts to their location. */
9543 write_complex_part (target, op0, false);
9544 write_complex_part (target, op1, true);
9548 case WIDEN_SUM_EXPR:
9550 tree oprnd0 = treeop0;
9551 tree oprnd1 = treeop1;
9553 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9554 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9559 case VEC_UNPACK_HI_EXPR:
9560 case VEC_UNPACK_LO_EXPR:
9562 op0 = expand_normal (treeop0);
9563 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9569 case VEC_UNPACK_FLOAT_HI_EXPR:
9570 case VEC_UNPACK_FLOAT_LO_EXPR:
9572 op0 = expand_normal (treeop0);
9573 /* The signedness is determined from input operand. */
9574 temp = expand_widen_pattern_expr
9575 (ops, op0, NULL_RTX, NULL_RTX,
9576 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9582 case VEC_WIDEN_MULT_HI_EXPR:
9583 case VEC_WIDEN_MULT_LO_EXPR:
9584 case VEC_WIDEN_MULT_EVEN_EXPR:
9585 case VEC_WIDEN_MULT_ODD_EXPR:
9586 case VEC_WIDEN_LSHIFT_HI_EXPR:
9587 case VEC_WIDEN_LSHIFT_LO_EXPR:
9588 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9589 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9591 gcc_assert (target);
9594 case VEC_PACK_TRUNC_EXPR:
9595 case VEC_PACK_SAT_EXPR:
9596 case VEC_PACK_FIX_TRUNC_EXPR:
9597 mode = TYPE_MODE (TREE_TYPE (treeop0));
9602 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9603 vec_perm_builder sel;
9604 if (TREE_CODE (treeop2) == VECTOR_CST
9605 && tree_to_vec_perm_builder (&sel, treeop2))
9607 machine_mode sel_mode = TYPE_MODE (TREE_TYPE (treeop2));
9608 temp = expand_vec_perm_const (mode, op0, op1, sel,
9613 op2 = expand_normal (treeop2);
9614 temp = expand_vec_perm_var (mode, op0, op1, op2, target);
9622 tree oprnd0 = treeop0;
9623 tree oprnd1 = treeop1;
9624 tree oprnd2 = treeop2;
9627 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9628 op2 = expand_normal (oprnd2);
9629 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9636 tree oprnd0 = treeop0;
9637 tree oprnd1 = treeop1;
9638 tree oprnd2 = treeop2;
9641 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9642 op2 = expand_normal (oprnd2);
9643 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9648 case REALIGN_LOAD_EXPR:
9650 tree oprnd0 = treeop0;
9651 tree oprnd1 = treeop1;
9652 tree oprnd2 = treeop2;
9655 this_optab = optab_for_tree_code (code, type, optab_default);
9656 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9657 op2 = expand_normal (oprnd2);
9658 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9666 /* A COND_EXPR with its type being VOID_TYPE represents a
9667 conditional jump and is handled in
9668 expand_gimple_cond_expr. */
9669 gcc_assert (!VOID_TYPE_P (type));
9671 /* Note that COND_EXPRs whose type is a structure or union
9672 are required to be constructed to contain assignments of
9673 a temporary variable, so that we can evaluate them here
9674 for side effect only. If type is void, we must do likewise. */
9676 gcc_assert (!TREE_ADDRESSABLE (type)
9678 && TREE_TYPE (treeop1) != void_type_node
9679 && TREE_TYPE (treeop2) != void_type_node);
9681 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9685 /* If we are not to produce a result, we have no target. Otherwise,
9686 if a target was specified use it; it will not be used as an
9687 intermediate target unless it is safe. If no target, use a
9690 if (modifier != EXPAND_STACK_PARM
9692 && safe_from_p (original_target, treeop0, 1)
9693 && GET_MODE (original_target) == mode
9694 && !MEM_P (original_target))
9695 temp = original_target;
9697 temp = assign_temp (type, 0, 1);
9699 do_pending_stack_adjust ();
9701 rtx_code_label *lab0 = gen_label_rtx ();
9702 rtx_code_label *lab1 = gen_label_rtx ();
9703 jumpifnot (treeop0, lab0,
9704 profile_probability::uninitialized ());
9705 store_expr (treeop1, temp,
9706 modifier == EXPAND_STACK_PARM,
9709 emit_jump_insn (targetm.gen_jump (lab1));
9712 store_expr (treeop2, temp,
9713 modifier == EXPAND_STACK_PARM,
9722 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9725 case VEC_DUPLICATE_EXPR:
9726 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
9727 target = expand_vector_broadcast (mode, op0);
9728 gcc_assert (target);
9731 case VEC_SERIES_EXPR:
9732 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, modifier);
9733 return expand_vec_series_expr (mode, op0, op1, target);
9735 case BIT_INSERT_EXPR:
9737 unsigned bitpos = tree_to_uhwi (treeop2);
9739 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1)))
9740 bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
9742 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
9743 rtx op0 = expand_normal (treeop0);
9744 rtx op1 = expand_normal (treeop1);
9745 rtx dst = gen_reg_rtx (mode);
9746 emit_move_insn (dst, op0);
9747 store_bit_field (dst, bitsize, bitpos, 0, 0,
9748 TYPE_MODE (TREE_TYPE (treeop1)), op1, false);
9756 /* Here to do an ordinary binary operator. */
9758 expand_operands (treeop0, treeop1,
9759 subtarget, &op0, &op1, EXPAND_NORMAL);
9761 this_optab = optab_for_tree_code (code, type, optab_default);
9763 if (modifier == EXPAND_STACK_PARM)
9765 temp = expand_binop (mode, this_optab, op0, op1, target,
9766 unsignedp, OPTAB_LIB_WIDEN);
9768 /* Bitwise operations do not need bitfield reduction as we expect their
9769 operands being properly truncated. */
9770 if (code == BIT_XOR_EXPR
9771 || code == BIT_AND_EXPR
9772 || code == BIT_IOR_EXPR)
9774 return REDUCE_BIT_FIELD (temp);
9776 #undef REDUCE_BIT_FIELD
9779 /* Return TRUE if expression STMT is suitable for replacement.
9780 Never consider memory loads as replaceable, because those don't ever lead
9781 into constant expressions. */
9784 stmt_is_replaceable_p (gimple *stmt)
9786 if (ssa_is_replaceable_p (stmt))
9788 /* Don't move around loads. */
9789 if (!gimple_assign_single_p (stmt)
9790 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9797 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9798 enum expand_modifier modifier, rtx *alt_rtl,
9799 bool inner_reference_p)
9801 rtx op0, op1, temp, decl_rtl;
9804 machine_mode mode, dmode;
9805 enum tree_code code = TREE_CODE (exp);
9806 rtx subtarget, original_target;
9809 bool reduce_bit_field;
9810 location_t loc = EXPR_LOCATION (exp);
9811 struct separate_ops ops;
9812 tree treeop0, treeop1, treeop2;
9813 tree ssa_name = NULL_TREE;
9816 type = TREE_TYPE (exp);
9817 mode = TYPE_MODE (type);
9818 unsignedp = TYPE_UNSIGNED (type);
9820 treeop0 = treeop1 = treeop2 = NULL_TREE;
9821 if (!VL_EXP_CLASS_P (exp))
9822 switch (TREE_CODE_LENGTH (code))
9825 case 3: treeop2 = TREE_OPERAND (exp, 2); /* FALLTHRU */
9826 case 2: treeop1 = TREE_OPERAND (exp, 1); /* FALLTHRU */
9827 case 1: treeop0 = TREE_OPERAND (exp, 0); /* FALLTHRU */
9837 ignore = (target == const0_rtx
9838 || ((CONVERT_EXPR_CODE_P (code)
9839 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9840 && TREE_CODE (type) == VOID_TYPE));
9842 /* An operation in what may be a bit-field type needs the
9843 result to be reduced to the precision of the bit-field type,
9844 which is narrower than that of the type's mode. */
9845 reduce_bit_field = (!ignore
9846 && INTEGRAL_TYPE_P (type)
9847 && !type_has_mode_precision_p (type));
9849 /* If we are going to ignore this result, we need only do something
9850 if there is a side-effect somewhere in the expression. If there
9851 is, short-circuit the most common cases here. Note that we must
9852 not call expand_expr with anything but const0_rtx in case this
9853 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9857 if (! TREE_SIDE_EFFECTS (exp))
9860 /* Ensure we reference a volatile object even if value is ignored, but
9861 don't do this if all we are doing is taking its address. */
9862 if (TREE_THIS_VOLATILE (exp)
9863 && TREE_CODE (exp) != FUNCTION_DECL
9864 && mode != VOIDmode && mode != BLKmode
9865 && modifier != EXPAND_CONST_ADDRESS)
9867 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9873 if (TREE_CODE_CLASS (code) == tcc_unary
9874 || code == BIT_FIELD_REF
9875 || code == COMPONENT_REF
9876 || code == INDIRECT_REF)
9877 return expand_expr (treeop0, const0_rtx, VOIDmode,
9880 else if (TREE_CODE_CLASS (code) == tcc_binary
9881 || TREE_CODE_CLASS (code) == tcc_comparison
9882 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9884 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9885 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9892 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9895 /* Use subtarget as the target for operand 0 of a binary operation. */
9896 subtarget = get_subtarget (target);
9897 original_target = target;
9903 tree function = decl_function_context (exp);
9905 temp = label_rtx (exp);
9906 temp = gen_rtx_LABEL_REF (Pmode, temp);
9908 if (function != current_function_decl
9910 LABEL_REF_NONLOCAL_P (temp) = 1;
9912 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9917 /* ??? ivopts calls expander, without any preparation from
9918 out-of-ssa. So fake instructions as if this was an access to the
9919 base variable. This unnecessarily allocates a pseudo, see how we can
9920 reuse it, if partition base vars have it set already. */
9921 if (!currently_expanding_to_rtl)
9923 tree var = SSA_NAME_VAR (exp);
9924 if (var && DECL_RTL_SET_P (var))
9925 return DECL_RTL (var);
9926 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9927 LAST_VIRTUAL_REGISTER + 1);
9930 g = get_gimple_for_ssa_name (exp);
9931 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9933 && modifier == EXPAND_INITIALIZER
9934 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9935 && (optimize || !SSA_NAME_VAR (exp)
9936 || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9937 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9938 g = SSA_NAME_DEF_STMT (exp);
9942 location_t saved_loc = curr_insn_location ();
9943 location_t loc = gimple_location (g);
9944 if (loc != UNKNOWN_LOCATION)
9945 set_curr_insn_location (loc);
9946 ops.code = gimple_assign_rhs_code (g);
9947 switch (get_gimple_rhs_class (ops.code))
9949 case GIMPLE_TERNARY_RHS:
9950 ops.op2 = gimple_assign_rhs3 (g);
9952 case GIMPLE_BINARY_RHS:
9953 ops.op1 = gimple_assign_rhs2 (g);
9955 /* Try to expand conditonal compare. */
9956 if (targetm.gen_ccmp_first)
9958 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9959 r = expand_ccmp_expr (g, mode);
9964 case GIMPLE_UNARY_RHS:
9965 ops.op0 = gimple_assign_rhs1 (g);
9966 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9968 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9970 case GIMPLE_SINGLE_RHS:
9972 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9973 tmode, modifier, alt_rtl,
9980 set_curr_insn_location (saved_loc);
9981 if (REG_P (r) && !REG_EXPR (r))
9982 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9987 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9988 exp = SSA_NAME_VAR (ssa_name);
9989 goto expand_decl_rtl;
9993 /* If a static var's type was incomplete when the decl was written,
9994 but the type is complete now, lay out the decl now. */
9995 if (DECL_SIZE (exp) == 0
9996 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9997 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9998 layout_decl (exp, 0);
10002 case FUNCTION_DECL:
10004 decl_rtl = DECL_RTL (exp);
10006 gcc_assert (decl_rtl);
10008 /* DECL_MODE might change when TYPE_MODE depends on attribute target
10009 settings for VECTOR_TYPE_P that might switch for the function. */
10010 if (currently_expanding_to_rtl
10011 && code == VAR_DECL && MEM_P (decl_rtl)
10012 && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
10013 decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
10015 decl_rtl = copy_rtx (decl_rtl);
10017 /* Record writes to register variables. */
10018 if (modifier == EXPAND_WRITE
10019 && REG_P (decl_rtl)
10020 && HARD_REGISTER_P (decl_rtl))
10021 add_to_hard_reg_set (&crtl->asm_clobbers,
10022 GET_MODE (decl_rtl), REGNO (decl_rtl));
10024 /* Ensure variable marked as used even if it doesn't go through
10025 a parser. If it hasn't be used yet, write out an external
10028 TREE_USED (exp) = 1;
10030 /* Show we haven't gotten RTL for this yet. */
10033 /* Variables inherited from containing functions should have
10034 been lowered by this point. */
10036 context = decl_function_context (exp);
10038 || SCOPE_FILE_SCOPE_P (context)
10039 || context == current_function_decl
10040 || TREE_STATIC (exp)
10041 || DECL_EXTERNAL (exp)
10042 /* ??? C++ creates functions that are not TREE_STATIC. */
10043 || TREE_CODE (exp) == FUNCTION_DECL);
10045 /* This is the case of an array whose size is to be determined
10046 from its initializer, while the initializer is still being parsed.
10047 ??? We aren't parsing while expanding anymore. */
10049 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
10050 temp = validize_mem (decl_rtl);
10052 /* If DECL_RTL is memory, we are in the normal case and the
10053 address is not valid, get the address into a register. */
10055 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
10058 *alt_rtl = decl_rtl;
10059 decl_rtl = use_anchored_address (decl_rtl);
10060 if (modifier != EXPAND_CONST_ADDRESS
10061 && modifier != EXPAND_SUM
10062 && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
10063 : GET_MODE (decl_rtl),
10064 XEXP (decl_rtl, 0),
10065 MEM_ADDR_SPACE (decl_rtl)))
10066 temp = replace_equiv_address (decl_rtl,
10067 copy_rtx (XEXP (decl_rtl, 0)));
10070 /* If we got something, return it. But first, set the alignment
10071 if the address is a register. */
10074 if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
10075 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
10081 dmode = DECL_MODE (exp);
10083 dmode = TYPE_MODE (TREE_TYPE (ssa_name));
10085 /* If the mode of DECL_RTL does not match that of the decl,
10086 there are two cases: we are dealing with a BLKmode value
10087 that is returned in a register, or we are dealing with
10088 a promoted value. In the latter case, return a SUBREG
10089 of the wanted mode, but mark it so that we know that it
10090 was already extended. */
10091 if (REG_P (decl_rtl)
10092 && dmode != BLKmode
10093 && GET_MODE (decl_rtl) != dmode)
10095 machine_mode pmode;
10097 /* Get the signedness to be used for this variable. Ensure we get
10098 the same mode we got when the variable was declared. */
10099 if (code != SSA_NAME)
10100 pmode = promote_decl_mode (exp, &unsignedp);
10101 else if ((g = SSA_NAME_DEF_STMT (ssa_name))
10102 && gimple_code (g) == GIMPLE_CALL
10103 && !gimple_call_internal_p (g))
10104 pmode = promote_function_mode (type, mode, &unsignedp,
10105 gimple_call_fntype (g),
10108 pmode = promote_ssa_mode (ssa_name, &unsignedp);
10109 gcc_assert (GET_MODE (decl_rtl) == pmode);
10111 temp = gen_lowpart_SUBREG (mode, decl_rtl);
10112 SUBREG_PROMOTED_VAR_P (temp) = 1;
10113 SUBREG_PROMOTED_SET (temp, unsignedp);
10121 /* Given that TYPE_PRECISION (type) is not always equal to
10122 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10123 the former to the latter according to the signedness of the
10125 scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
10126 temp = immed_wide_int_const
10127 (wi::to_wide (exp, GET_MODE_PRECISION (mode)), mode);
10133 tree tmp = NULL_TREE;
10134 if (VECTOR_MODE_P (mode))
10135 return const_vector_from_tree (exp);
10136 scalar_int_mode int_mode;
10137 if (is_int_mode (mode, &int_mode))
10139 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
10140 return const_scalar_mask_from_tree (int_mode, exp);
10144 = lang_hooks.types.type_for_mode (int_mode, 1);
10146 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
10147 type_for_mode, exp);
10152 vec<constructor_elt, va_gc> *v;
10153 /* Constructors need to be fixed-length. FIXME. */
10154 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
10155 vec_alloc (v, nunits);
10156 for (unsigned int i = 0; i < nunits; ++i)
10157 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
10158 tmp = build_constructor (type, v);
10160 return expand_expr (tmp, ignore ? const0_rtx : target,
10165 if (modifier == EXPAND_WRITE)
10167 /* Writing into CONST_DECL is always invalid, but handle it
10169 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
10170 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
10171 op0 = expand_expr_addr_expr_1 (exp, NULL_RTX, address_mode,
10172 EXPAND_NORMAL, as);
10173 op0 = memory_address_addr_space (mode, op0, as);
10174 temp = gen_rtx_MEM (mode, op0);
10175 set_mem_addr_space (temp, as);
10178 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10181 /* If optimized, generate immediate CONST_DOUBLE
10182 which will be turned into memory by reload if necessary.
10184 We used to force a register so that loop.c could see it. But
10185 this does not allow gen_* patterns to perform optimizations with
10186 the constants. It also produces two insns in cases like "x = 1.0;".
10187 On most machines, floating-point constants are not permitted in
10188 many insns, so we'd end up copying it to a register in any case.
10190 Now, we do the copying in expand_binop, if appropriate. */
10191 return const_double_from_real_value (TREE_REAL_CST (exp),
10192 TYPE_MODE (TREE_TYPE (exp)));
10195 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
10196 TYPE_MODE (TREE_TYPE (exp)));
10199 /* Handle evaluating a complex constant in a CONCAT target. */
10200 if (original_target && GET_CODE (original_target) == CONCAT)
10202 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
10205 rtarg = XEXP (original_target, 0);
10206 itarg = XEXP (original_target, 1);
10208 /* Move the real and imaginary parts separately. */
10209 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
10210 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
10213 emit_move_insn (rtarg, op0);
10215 emit_move_insn (itarg, op1);
10217 return original_target;
10223 temp = expand_expr_constant (exp, 1, modifier);
10225 /* temp contains a constant address.
10226 On RISC machines where a constant address isn't valid,
10227 make some insns to get that address into a register. */
10228 if (modifier != EXPAND_CONST_ADDRESS
10229 && modifier != EXPAND_INITIALIZER
10230 && modifier != EXPAND_SUM
10231 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
10232 MEM_ADDR_SPACE (temp)))
10233 return replace_equiv_address (temp,
10234 copy_rtx (XEXP (temp, 0)));
10238 return immed_wide_int_const (poly_int_cst_value (exp), mode);
10242 tree val = treeop0;
10243 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
10244 inner_reference_p);
10246 if (!SAVE_EXPR_RESOLVED_P (exp))
10248 /* We can indeed still hit this case, typically via builtin
10249 expanders calling save_expr immediately before expanding
10250 something. Assume this means that we only have to deal
10251 with non-BLKmode values. */
10252 gcc_assert (GET_MODE (ret) != BLKmode);
10254 val = build_decl (curr_insn_location (),
10255 VAR_DECL, NULL, TREE_TYPE (exp));
10256 DECL_ARTIFICIAL (val) = 1;
10257 DECL_IGNORED_P (val) = 1;
10259 TREE_OPERAND (exp, 0) = treeop0;
10260 SAVE_EXPR_RESOLVED_P (exp) = 1;
10262 if (!CONSTANT_P (ret))
10263 ret = copy_to_reg (ret);
10264 SET_DECL_RTL (val, ret);
10272 /* If we don't need the result, just ensure we evaluate any
10276 unsigned HOST_WIDE_INT idx;
10279 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
10280 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
10285 return expand_constructor (exp, target, modifier, false);
10287 case TARGET_MEM_REF:
10290 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10291 enum insn_code icode;
10292 unsigned int align;
10294 op0 = addr_for_mem_ref (exp, as, true);
10295 op0 = memory_address_addr_space (mode, op0, as);
10296 temp = gen_rtx_MEM (mode, op0);
10297 set_mem_attributes (temp, exp, 0);
10298 set_mem_addr_space (temp, as);
10299 align = get_object_alignment (exp);
10300 if (modifier != EXPAND_WRITE
10301 && modifier != EXPAND_MEMORY
10303 && align < GET_MODE_ALIGNMENT (mode)
10304 /* If the target does not have special handling for unaligned
10305 loads of mode then it can use regular moves for them. */
10306 && ((icode = optab_handler (movmisalign_optab, mode))
10307 != CODE_FOR_nothing))
10309 struct expand_operand ops[2];
10311 /* We've already validated the memory, and we're creating a
10312 new pseudo destination. The predicates really can't fail,
10313 nor can the generator. */
10314 create_output_operand (&ops[0], NULL_RTX, mode);
10315 create_fixed_operand (&ops[1], temp);
10316 expand_insn (icode, 2, ops);
10317 temp = ops[0].value;
10324 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
10326 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10327 machine_mode address_mode;
10328 tree base = TREE_OPERAND (exp, 0);
10330 enum insn_code icode;
10332 /* Handle expansion of non-aliased memory with non-BLKmode. That
10333 might end up in a register. */
10334 if (mem_ref_refers_to_non_mem_p (exp))
10336 poly_int64 offset = mem_ref_offset (exp).force_shwi ();
10337 base = TREE_OPERAND (base, 0);
10338 if (known_eq (offset, 0)
10340 && tree_fits_uhwi_p (TYPE_SIZE (type))
10341 && known_eq (GET_MODE_BITSIZE (DECL_MODE (base)),
10342 tree_to_uhwi (TYPE_SIZE (type))))
10343 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
10344 target, tmode, modifier);
10345 if (TYPE_MODE (type) == BLKmode)
10347 temp = assign_stack_temp (DECL_MODE (base),
10348 GET_MODE_SIZE (DECL_MODE (base)));
10349 store_expr (base, temp, 0, false, false);
10350 temp = adjust_address (temp, BLKmode, offset);
10351 set_mem_size (temp, int_size_in_bytes (type));
10354 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
10355 bitsize_int (offset * BITS_PER_UNIT));
10356 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
10357 return expand_expr (exp, target, tmode, modifier);
10359 address_mode = targetm.addr_space.address_mode (as);
10360 base = TREE_OPERAND (exp, 0);
10361 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
10363 tree mask = gimple_assign_rhs2 (def_stmt);
10364 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
10365 gimple_assign_rhs1 (def_stmt), mask);
10366 TREE_OPERAND (exp, 0) = base;
10368 align = get_object_alignment (exp);
10369 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
10370 op0 = memory_address_addr_space (mode, op0, as);
10371 if (!integer_zerop (TREE_OPERAND (exp, 1)))
10373 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
10374 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
10375 op0 = memory_address_addr_space (mode, op0, as);
10377 temp = gen_rtx_MEM (mode, op0);
10378 set_mem_attributes (temp, exp, 0);
10379 set_mem_addr_space (temp, as);
10380 if (TREE_THIS_VOLATILE (exp))
10381 MEM_VOLATILE_P (temp) = 1;
10382 if (modifier != EXPAND_WRITE
10383 && modifier != EXPAND_MEMORY
10384 && !inner_reference_p
10386 && align < GET_MODE_ALIGNMENT (mode))
10388 if ((icode = optab_handler (movmisalign_optab, mode))
10389 != CODE_FOR_nothing)
10391 struct expand_operand ops[2];
10393 /* We've already validated the memory, and we're creating a
10394 new pseudo destination. The predicates really can't fail,
10395 nor can the generator. */
10396 create_output_operand (&ops[0], NULL_RTX, mode);
10397 create_fixed_operand (&ops[1], temp);
10398 expand_insn (icode, 2, ops);
10399 temp = ops[0].value;
10401 else if (targetm.slow_unaligned_access (mode, align))
10402 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
10403 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
10404 (modifier == EXPAND_STACK_PARM
10405 ? NULL_RTX : target),
10406 mode, mode, false, alt_rtl);
10409 && modifier != EXPAND_MEMORY
10410 && modifier != EXPAND_WRITE)
10411 temp = flip_storage_order (mode, temp);
10418 tree array = treeop0;
10419 tree index = treeop1;
10422 /* Fold an expression like: "foo"[2].
10423 This is not done in fold so it won't happen inside &.
10424 Don't fold if this is for wide characters since it's too
10425 difficult to do correctly and this is a very rare case. */
10427 if (modifier != EXPAND_CONST_ADDRESS
10428 && modifier != EXPAND_INITIALIZER
10429 && modifier != EXPAND_MEMORY)
10431 tree t = fold_read_from_constant_string (exp);
10434 return expand_expr (t, target, tmode, modifier);
10437 /* If this is a constant index into a constant array,
10438 just get the value from the array. Handle both the cases when
10439 we have an explicit constructor and when our operand is a variable
10440 that was declared const. */
10442 if (modifier != EXPAND_CONST_ADDRESS
10443 && modifier != EXPAND_INITIALIZER
10444 && modifier != EXPAND_MEMORY
10445 && TREE_CODE (array) == CONSTRUCTOR
10446 && ! TREE_SIDE_EFFECTS (array)
10447 && TREE_CODE (index) == INTEGER_CST)
10449 unsigned HOST_WIDE_INT ix;
10452 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10454 if (tree_int_cst_equal (field, index))
10456 if (!TREE_SIDE_EFFECTS (value))
10457 return expand_expr (fold (value), target, tmode, modifier);
10462 else if (optimize >= 1
10463 && modifier != EXPAND_CONST_ADDRESS
10464 && modifier != EXPAND_INITIALIZER
10465 && modifier != EXPAND_MEMORY
10466 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10467 && TREE_CODE (index) == INTEGER_CST
10468 && (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
10469 && (init = ctor_for_folding (array)) != error_mark_node)
10471 if (init == NULL_TREE)
10473 tree value = build_zero_cst (type);
10474 if (TREE_CODE (value) == CONSTRUCTOR)
10476 /* If VALUE is a CONSTRUCTOR, this optimization is only
10477 useful if this doesn't store the CONSTRUCTOR into
10478 memory. If it does, it is more efficient to just
10479 load the data from the array directly. */
10480 rtx ret = expand_constructor (value, target,
10482 if (ret == NULL_RTX)
10487 return expand_expr (value, target, tmode, modifier);
10489 else if (TREE_CODE (init) == CONSTRUCTOR)
10491 unsigned HOST_WIDE_INT ix;
10494 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10496 if (tree_int_cst_equal (field, index))
10498 if (TREE_SIDE_EFFECTS (value))
10501 if (TREE_CODE (value) == CONSTRUCTOR)
10503 /* If VALUE is a CONSTRUCTOR, this
10504 optimization is only useful if
10505 this doesn't store the CONSTRUCTOR
10506 into memory. If it does, it is more
10507 efficient to just load the data from
10508 the array directly. */
10509 rtx ret = expand_constructor (value, target,
10511 if (ret == NULL_RTX)
10516 expand_expr (fold (value), target, tmode, modifier);
10519 else if (TREE_CODE (init) == STRING_CST)
10521 tree low_bound = array_ref_low_bound (exp);
10522 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10524 /* Optimize the special case of a zero lower bound.
10526 We convert the lower bound to sizetype to avoid problems
10527 with constant folding. E.g. suppose the lower bound is
10528 1 and its mode is QI. Without the conversion
10529 (ARRAY + (INDEX - (unsigned char)1))
10531 (ARRAY + (-(unsigned char)1) + INDEX)
10533 (ARRAY + 255 + INDEX). Oops! */
10534 if (!integer_zerop (low_bound))
10535 index1 = size_diffop_loc (loc, index1,
10536 fold_convert_loc (loc, sizetype,
10539 if (tree_fits_uhwi_p (index1)
10540 && compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10542 tree type = TREE_TYPE (TREE_TYPE (init));
10543 scalar_int_mode mode;
10545 if (is_int_mode (TYPE_MODE (type), &mode)
10546 && GET_MODE_SIZE (mode) == 1)
10547 return gen_int_mode (TREE_STRING_POINTER (init)
10548 [TREE_INT_CST_LOW (index1)],
10554 goto normal_inner_ref;
10556 case COMPONENT_REF:
10557 /* If the operand is a CONSTRUCTOR, we can just extract the
10558 appropriate field if it is present. */
10559 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10561 unsigned HOST_WIDE_INT idx;
10563 scalar_int_mode field_mode;
10565 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10567 if (field == treeop1
10568 /* We can normally use the value of the field in the
10569 CONSTRUCTOR. However, if this is a bitfield in
10570 an integral mode that we can fit in a HOST_WIDE_INT,
10571 we must mask only the number of bits in the bitfield,
10572 since this is done implicitly by the constructor. If
10573 the bitfield does not meet either of those conditions,
10574 we can't do this optimization. */
10575 && (! DECL_BIT_FIELD (field)
10576 || (is_int_mode (DECL_MODE (field), &field_mode)
10577 && (GET_MODE_PRECISION (field_mode)
10578 <= HOST_BITS_PER_WIDE_INT))))
10580 if (DECL_BIT_FIELD (field)
10581 && modifier == EXPAND_STACK_PARM)
10583 op0 = expand_expr (value, target, tmode, modifier);
10584 if (DECL_BIT_FIELD (field))
10586 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10587 scalar_int_mode imode
10588 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field));
10590 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10592 op1 = gen_int_mode ((HOST_WIDE_INT_1 << bitsize) - 1,
10594 op0 = expand_and (imode, op0, op1, target);
10598 int count = GET_MODE_PRECISION (imode) - bitsize;
10600 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10602 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10610 goto normal_inner_ref;
10612 case BIT_FIELD_REF:
10613 case ARRAY_RANGE_REF:
10616 machine_mode mode1, mode2;
10617 poly_int64 bitsize, bitpos, bytepos;
10619 int reversep, volatilep = 0, must_force_mem;
10621 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10622 &unsignedp, &reversep, &volatilep);
10623 rtx orig_op0, memloc;
10624 bool clear_mem_expr = false;
10626 /* If we got back the original object, something is wrong. Perhaps
10627 we are evaluating an expression too early. In any event, don't
10628 infinitely recurse. */
10629 gcc_assert (tem != exp);
10631 /* If TEM's type is a union of variable size, pass TARGET to the inner
10632 computation, since it will need a temporary and TARGET is known
10633 to have to do. This occurs in unchecked conversion in Ada. */
10635 = expand_expr_real (tem,
10636 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10637 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10638 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10640 && modifier != EXPAND_STACK_PARM
10641 ? target : NULL_RTX),
10643 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10646 /* If the field has a mode, we want to access it in the
10647 field's mode, not the computed mode.
10648 If a MEM has VOIDmode (external with incomplete type),
10649 use BLKmode for it instead. */
10652 if (mode1 != VOIDmode)
10653 op0 = adjust_address (op0, mode1, 0);
10654 else if (GET_MODE (op0) == VOIDmode)
10655 op0 = adjust_address (op0, BLKmode, 0);
10659 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10661 /* If we have either an offset, a BLKmode result, or a reference
10662 outside the underlying object, we must force it to memory.
10663 Such a case can occur in Ada if we have unchecked conversion
10664 of an expression from a scalar type to an aggregate type or
10665 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10666 passed a partially uninitialized object or a view-conversion
10667 to a larger size. */
10668 must_force_mem = (offset
10669 || mode1 == BLKmode
10670 || maybe_gt (bitpos + bitsize,
10671 GET_MODE_BITSIZE (mode2)));
10673 /* Handle CONCAT first. */
10674 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10676 if (known_eq (bitpos, 0)
10677 && known_eq (bitsize, GET_MODE_BITSIZE (GET_MODE (op0)))
10678 && COMPLEX_MODE_P (mode1)
10679 && COMPLEX_MODE_P (GET_MODE (op0))
10680 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10681 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10684 op0 = flip_storage_order (GET_MODE (op0), op0);
10685 if (mode1 != GET_MODE (op0))
10688 for (int i = 0; i < 2; i++)
10690 rtx op = read_complex_part (op0, i != 0);
10691 if (GET_CODE (op) == SUBREG)
10692 op = force_reg (GET_MODE (op), op);
10693 rtx temp = gen_lowpart_common (GET_MODE_INNER (mode1),
10699 if (!REG_P (op) && !MEM_P (op))
10700 op = force_reg (GET_MODE (op), op);
10701 op = gen_lowpart (GET_MODE_INNER (mode1), op);
10705 op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10709 if (known_eq (bitpos, 0)
10710 && known_eq (bitsize,
10711 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10712 && maybe_ne (bitsize, 0))
10714 op0 = XEXP (op0, 0);
10715 mode2 = GET_MODE (op0);
10717 else if (known_eq (bitpos,
10718 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10719 && known_eq (bitsize,
10720 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1))))
10721 && maybe_ne (bitpos, 0)
10722 && maybe_ne (bitsize, 0))
10724 op0 = XEXP (op0, 1);
10726 mode2 = GET_MODE (op0);
10729 /* Otherwise force into memory. */
10730 must_force_mem = 1;
10733 /* If this is a constant, put it in a register if it is a legitimate
10734 constant and we don't need a memory reference. */
10735 if (CONSTANT_P (op0)
10736 && mode2 != BLKmode
10737 && targetm.legitimate_constant_p (mode2, op0)
10738 && !must_force_mem)
10739 op0 = force_reg (mode2, op0);
10741 /* Otherwise, if this is a constant, try to force it to the constant
10742 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10743 is a legitimate constant. */
10744 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10745 op0 = validize_mem (memloc);
10747 /* Otherwise, if this is a constant or the object is not in memory
10748 and need be, put it there. */
10749 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10751 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10752 emit_move_insn (memloc, op0);
10754 clear_mem_expr = true;
10759 machine_mode address_mode;
10760 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10763 gcc_assert (MEM_P (op0));
10765 address_mode = get_address_mode (op0);
10766 if (GET_MODE (offset_rtx) != address_mode)
10768 /* We cannot be sure that the RTL in offset_rtx is valid outside
10769 of a memory address context, so force it into a register
10770 before attempting to convert it to the desired mode. */
10771 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10772 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10775 /* See the comment in expand_assignment for the rationale. */
10776 if (mode1 != VOIDmode
10777 && maybe_ne (bitpos, 0)
10778 && maybe_gt (bitsize, 0)
10779 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
10780 && multiple_p (bitpos, bitsize)
10781 && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
10782 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10784 op0 = adjust_address (op0, mode1, bytepos);
10788 op0 = offset_address (op0, offset_rtx,
10789 highest_pow2_factor (offset));
10792 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10793 record its alignment as BIGGEST_ALIGNMENT. */
10795 && known_eq (bitpos, 0)
10797 && is_aligning_offset (offset, tem))
10798 set_mem_align (op0, BIGGEST_ALIGNMENT);
10800 /* Don't forget about volatility even if this is a bitfield. */
10801 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10803 if (op0 == orig_op0)
10804 op0 = copy_rtx (op0);
10806 MEM_VOLATILE_P (op0) = 1;
10809 /* In cases where an aligned union has an unaligned object
10810 as a field, we might be extracting a BLKmode value from
10811 an integer-mode (e.g., SImode) object. Handle this case
10812 by doing the extract into an object as wide as the field
10813 (which we know to be the width of a basic mode), then
10814 storing into memory, and changing the mode to BLKmode. */
10815 if (mode1 == VOIDmode
10816 || REG_P (op0) || GET_CODE (op0) == SUBREG
10817 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10818 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10819 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10820 && modifier != EXPAND_CONST_ADDRESS
10821 && modifier != EXPAND_INITIALIZER
10822 && modifier != EXPAND_MEMORY)
10823 /* If the bitfield is volatile and the bitsize
10824 is narrower than the access size of the bitfield,
10825 we need to extract bitfields from the access. */
10826 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10827 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10828 && mode1 != BLKmode
10829 && maybe_lt (bitsize, GET_MODE_SIZE (mode1) * BITS_PER_UNIT))
10830 /* If the field isn't aligned enough to fetch as a memref,
10831 fetch it as a bit field. */
10832 || (mode1 != BLKmode
10834 ? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10835 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode1))
10836 : TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10837 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
10838 && modifier != EXPAND_MEMORY
10839 && ((modifier == EXPAND_CONST_ADDRESS
10840 || modifier == EXPAND_INITIALIZER)
10842 : targetm.slow_unaligned_access (mode1,
10844 || !multiple_p (bitpos, BITS_PER_UNIT)))
10845 /* If the type and the field are a constant size and the
10846 size of the type isn't the same size as the bitfield,
10847 we must use bitfield operations. */
10848 || (known_size_p (bitsize)
10849 && TYPE_SIZE (TREE_TYPE (exp))
10850 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
10851 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
10854 machine_mode ext_mode = mode;
10856 if (ext_mode == BLKmode
10857 && ! (target != 0 && MEM_P (op0)
10859 && multiple_p (bitpos, BITS_PER_UNIT)))
10860 ext_mode = int_mode_for_size (bitsize, 1).else_blk ();
10862 if (ext_mode == BLKmode)
10865 target = assign_temp (type, 1, 1);
10867 /* ??? Unlike the similar test a few lines below, this one is
10868 very likely obsolete. */
10869 if (known_eq (bitsize, 0))
10872 /* In this case, BITPOS must start at a byte boundary and
10873 TARGET, if specified, must be a MEM. */
10874 gcc_assert (MEM_P (op0)
10875 && (!target || MEM_P (target)));
10877 bytepos = exact_div (bitpos, BITS_PER_UNIT);
10878 poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
10879 emit_block_move (target,
10880 adjust_address (op0, VOIDmode, bytepos),
10881 gen_int_mode (bytesize, Pmode),
10882 (modifier == EXPAND_STACK_PARM
10883 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10888 /* If we have nothing to extract, the result will be 0 for targets
10889 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10890 return 0 for the sake of consistency, as reading a zero-sized
10891 bitfield is valid in Ada and the value is fully specified. */
10892 if (known_eq (bitsize, 0))
10895 op0 = validize_mem (op0);
10897 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10898 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10900 /* If the result has a record type and the extraction is done in
10901 an integral mode, then the field may be not aligned on a byte
10902 boundary; in this case, if it has reverse storage order, it
10903 needs to be extracted as a scalar field with reverse storage
10904 order and put back into memory order afterwards. */
10905 if (TREE_CODE (type) == RECORD_TYPE
10906 && GET_MODE_CLASS (ext_mode) == MODE_INT)
10907 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10909 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10910 (modifier == EXPAND_STACK_PARM
10911 ? NULL_RTX : target),
10912 ext_mode, ext_mode, reversep, alt_rtl);
10914 /* If the result has a record type and the mode of OP0 is an
10915 integral mode then, if BITSIZE is narrower than this mode
10916 and this is for big-endian data, we must put the field
10917 into the high-order bits. And we must also put it back
10918 into memory order if it has been previously reversed. */
10919 scalar_int_mode op0_mode;
10920 if (TREE_CODE (type) == RECORD_TYPE
10921 && is_int_mode (GET_MODE (op0), &op0_mode))
10923 HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode);
10925 gcc_checking_assert (known_le (bitsize, size));
10926 if (maybe_lt (bitsize, size)
10927 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10928 op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0,
10929 size - bitsize, op0, 1);
10932 op0 = flip_storage_order (op0_mode, op0);
10935 /* If the result type is BLKmode, store the data into a temporary
10936 of the appropriate type, but with the mode corresponding to the
10937 mode for the data we have (op0's mode). */
10938 if (mode == BLKmode)
10941 = assign_stack_temp_for_type (ext_mode,
10942 GET_MODE_BITSIZE (ext_mode),
10944 emit_move_insn (new_rtx, op0);
10945 op0 = copy_rtx (new_rtx);
10946 PUT_MODE (op0, BLKmode);
10952 /* If the result is BLKmode, use that to access the object
10954 if (mode == BLKmode)
10957 /* Get a reference to just this component. */
10958 bytepos = bits_to_bytes_round_down (bitpos);
10959 if (modifier == EXPAND_CONST_ADDRESS
10960 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10961 op0 = adjust_address_nv (op0, mode1, bytepos);
10963 op0 = adjust_address (op0, mode1, bytepos);
10965 if (op0 == orig_op0)
10966 op0 = copy_rtx (op0);
10968 /* Don't set memory attributes if the base expression is
10969 SSA_NAME that got expanded as a MEM. In that case, we should
10970 just honor its original memory attributes. */
10971 if (TREE_CODE (tem) != SSA_NAME || !MEM_P (orig_op0))
10972 set_mem_attributes (op0, exp, 0);
10974 if (REG_P (XEXP (op0, 0)))
10975 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10977 /* If op0 is a temporary because the original expressions was forced
10978 to memory, clear MEM_EXPR so that the original expression cannot
10979 be marked as addressable through MEM_EXPR of the temporary. */
10980 if (clear_mem_expr)
10981 set_mem_expr (op0, NULL_TREE);
10983 MEM_VOLATILE_P (op0) |= volatilep;
10986 && modifier != EXPAND_MEMORY
10987 && modifier != EXPAND_WRITE)
10988 op0 = flip_storage_order (mode1, op0);
10990 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10991 || modifier == EXPAND_CONST_ADDRESS
10992 || modifier == EXPAND_INITIALIZER)
10996 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10998 convert_move (target, op0, unsignedp);
11003 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
11006 /* All valid uses of __builtin_va_arg_pack () are removed during
11008 if (CALL_EXPR_VA_ARG_PACK (exp))
11009 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
11011 tree fndecl = get_callee_fndecl (exp), attr;
11014 /* Don't diagnose the error attribute in thunks, those are
11015 artificially created. */
11016 && !CALL_FROM_THUNK_P (exp)
11017 && (attr = lookup_attribute ("error",
11018 DECL_ATTRIBUTES (fndecl))) != NULL)
11020 const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
11021 error ("%Kcall to %qs declared with attribute error: %s", exp,
11022 identifier_to_locale (ident),
11023 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
11026 /* Don't diagnose the warning attribute in thunks, those are
11027 artificially created. */
11028 && !CALL_FROM_THUNK_P (exp)
11029 && (attr = lookup_attribute ("warning",
11030 DECL_ATTRIBUTES (fndecl))) != NULL)
11032 const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
11033 warning_at (tree_nonartificial_location (exp), 0,
11034 "%Kcall to %qs declared with attribute warning: %s",
11035 exp, identifier_to_locale (ident),
11036 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
11039 /* Check for a built-in function. */
11040 if (fndecl && DECL_BUILT_IN (fndecl))
11042 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
11043 if (CALL_WITH_BOUNDS_P (exp))
11044 return expand_builtin_with_bounds (exp, target, subtarget,
11047 return expand_builtin (exp, target, subtarget, tmode, ignore);
11050 return expand_call (exp, target, ignore);
11052 case VIEW_CONVERT_EXPR:
11055 /* If we are converting to BLKmode, try to avoid an intermediate
11056 temporary by fetching an inner memory reference. */
11057 if (mode == BLKmode
11058 && poly_int_tree_p (TYPE_SIZE (type))
11059 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
11060 && handled_component_p (treeop0))
11062 machine_mode mode1;
11063 poly_int64 bitsize, bitpos, bytepos;
11065 int unsignedp, reversep, volatilep = 0;
11067 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
11068 &unsignedp, &reversep, &volatilep);
11071 /* ??? We should work harder and deal with non-zero offsets. */
11073 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
11075 && known_size_p (bitsize)
11076 && known_eq (wi::to_poly_offset (TYPE_SIZE (type)), bitsize))
11078 /* See the normal_inner_ref case for the rationale. */
11080 = expand_expr_real (tem,
11081 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
11082 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
11084 && modifier != EXPAND_STACK_PARM
11085 ? target : NULL_RTX),
11087 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
11090 if (MEM_P (orig_op0))
11094 /* Get a reference to just this component. */
11095 if (modifier == EXPAND_CONST_ADDRESS
11096 || modifier == EXPAND_SUM
11097 || modifier == EXPAND_INITIALIZER)
11098 op0 = adjust_address_nv (op0, mode, bytepos);
11100 op0 = adjust_address (op0, mode, bytepos);
11102 if (op0 == orig_op0)
11103 op0 = copy_rtx (op0);
11105 set_mem_attributes (op0, treeop0, 0);
11106 if (REG_P (XEXP (op0, 0)))
11107 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11109 MEM_VOLATILE_P (op0) |= volatilep;
11115 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
11116 NULL, inner_reference_p);
11118 /* If the input and output modes are both the same, we are done. */
11119 if (mode == GET_MODE (op0))
11121 /* If neither mode is BLKmode, and both modes are the same size
11122 then we can use gen_lowpart. */
11123 else if (mode != BLKmode
11124 && GET_MODE (op0) != BLKmode
11125 && known_eq (GET_MODE_PRECISION (mode),
11126 GET_MODE_PRECISION (GET_MODE (op0)))
11127 && !COMPLEX_MODE_P (GET_MODE (op0)))
11129 if (GET_CODE (op0) == SUBREG)
11130 op0 = force_reg (GET_MODE (op0), op0);
11131 temp = gen_lowpart_common (mode, op0);
11136 if (!REG_P (op0) && !MEM_P (op0))
11137 op0 = force_reg (GET_MODE (op0), op0);
11138 op0 = gen_lowpart (mode, op0);
11141 /* If both types are integral, convert from one mode to the other. */
11142 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
11143 op0 = convert_modes (mode, GET_MODE (op0), op0,
11144 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
11145 /* If the output type is a bit-field type, do an extraction. */
11146 else if (reduce_bit_field)
11147 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
11148 TYPE_UNSIGNED (type), NULL_RTX,
11149 mode, mode, false, NULL);
11150 /* As a last resort, spill op0 to memory, and reload it in a
11152 else if (!MEM_P (op0))
11154 /* If the operand is not a MEM, force it into memory. Since we
11155 are going to be changing the mode of the MEM, don't call
11156 force_const_mem for constants because we don't allow pool
11157 constants to change mode. */
11158 tree inner_type = TREE_TYPE (treeop0);
11160 gcc_assert (!TREE_ADDRESSABLE (exp));
11162 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
11164 = assign_stack_temp_for_type
11165 (TYPE_MODE (inner_type),
11166 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
11168 emit_move_insn (target, op0);
11172 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
11173 output type is such that the operand is known to be aligned, indicate
11174 that it is. Otherwise, we need only be concerned about alignment for
11175 non-BLKmode results. */
11178 enum insn_code icode;
11180 if (modifier != EXPAND_WRITE
11181 && modifier != EXPAND_MEMORY
11182 && !inner_reference_p
11184 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
11186 /* If the target does have special handling for unaligned
11187 loads of mode then use them. */
11188 if ((icode = optab_handler (movmisalign_optab, mode))
11189 != CODE_FOR_nothing)
11193 op0 = adjust_address (op0, mode, 0);
11194 /* We've already validated the memory, and we're creating a
11195 new pseudo destination. The predicates really can't
11197 reg = gen_reg_rtx (mode);
11199 /* Nor can the insn generator. */
11200 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
11204 else if (STRICT_ALIGNMENT)
11206 poly_uint64 mode_size = GET_MODE_SIZE (mode);
11207 poly_uint64 temp_size = mode_size;
11208 if (GET_MODE (op0) != BLKmode)
11209 temp_size = upper_bound (temp_size,
11210 GET_MODE_SIZE (GET_MODE (op0)));
11212 = assign_stack_temp_for_type (mode, temp_size, type);
11213 rtx new_with_op0_mode
11214 = adjust_address (new_rtx, GET_MODE (op0), 0);
11216 gcc_assert (!TREE_ADDRESSABLE (exp));
11218 if (GET_MODE (op0) == BLKmode)
11220 rtx size_rtx = gen_int_mode (mode_size, Pmode);
11221 emit_block_move (new_with_op0_mode, op0, size_rtx,
11222 (modifier == EXPAND_STACK_PARM
11223 ? BLOCK_OP_CALL_PARM
11224 : BLOCK_OP_NORMAL));
11227 emit_move_insn (new_with_op0_mode, op0);
11233 op0 = adjust_address (op0, mode, 0);
11240 tree lhs = treeop0;
11241 tree rhs = treeop1;
11242 gcc_assert (ignore);
11244 /* Check for |= or &= of a bitfield of size one into another bitfield
11245 of size 1. In this case, (unless we need the result of the
11246 assignment) we can do this more efficiently with a
11247 test followed by an assignment, if necessary.
11249 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11250 things change so we do, this code should be enhanced to
11252 if (TREE_CODE (lhs) == COMPONENT_REF
11253 && (TREE_CODE (rhs) == BIT_IOR_EXPR
11254 || TREE_CODE (rhs) == BIT_AND_EXPR)
11255 && TREE_OPERAND (rhs, 0) == lhs
11256 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
11257 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
11258 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
11260 rtx_code_label *label = gen_label_rtx ();
11261 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
11262 do_jump (TREE_OPERAND (rhs, 1),
11265 profile_probability::uninitialized ());
11266 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
11268 do_pending_stack_adjust ();
11269 emit_label (label);
11273 expand_assignment (lhs, rhs, false);
11278 return expand_expr_addr_expr (exp, target, tmode, modifier);
11280 case REALPART_EXPR:
11281 op0 = expand_normal (treeop0);
11282 return read_complex_part (op0, false);
11284 case IMAGPART_EXPR:
11285 op0 = expand_normal (treeop0);
11286 return read_complex_part (op0, true);
11293 /* Expanded in cfgexpand.c. */
11294 gcc_unreachable ();
11296 case TRY_CATCH_EXPR:
11298 case EH_FILTER_EXPR:
11299 case TRY_FINALLY_EXPR:
11300 /* Lowered by tree-eh.c. */
11301 gcc_unreachable ();
11303 case WITH_CLEANUP_EXPR:
11304 case CLEANUP_POINT_EXPR:
11306 case CASE_LABEL_EXPR:
11311 case COMPOUND_EXPR:
11312 case PREINCREMENT_EXPR:
11313 case PREDECREMENT_EXPR:
11314 case POSTINCREMENT_EXPR:
11315 case POSTDECREMENT_EXPR:
11318 case COMPOUND_LITERAL_EXPR:
11319 /* Lowered by gimplify.c. */
11320 gcc_unreachable ();
11323 /* Function descriptors are not valid except for as
11324 initialization constants, and should not be expanded. */
11325 gcc_unreachable ();
11327 case WITH_SIZE_EXPR:
11328 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11329 have pulled out the size to use in whatever context it needed. */
11330 return expand_expr_real (treeop0, original_target, tmode,
11331 modifier, alt_rtl, inner_reference_p);
11334 return expand_expr_real_2 (&ops, target, tmode, modifier);
11338 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11339 signedness of TYPE), possibly returning the result in TARGET.
11340 TYPE is known to be a partial integer type. */
11342 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
11344 HOST_WIDE_INT prec = TYPE_PRECISION (type);
11345 if (target && GET_MODE (target) != GET_MODE (exp))
11347 /* For constant values, reduce using build_int_cst_type. */
11348 if (CONST_INT_P (exp))
11350 HOST_WIDE_INT value = INTVAL (exp);
11351 tree t = build_int_cst_type (type, value);
11352 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
11354 else if (TYPE_UNSIGNED (type))
11356 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp));
11357 rtx mask = immed_wide_int_const
11358 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
11359 return expand_and (mode, exp, mask, target);
11363 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp));
11364 int count = GET_MODE_PRECISION (mode) - prec;
11365 exp = expand_shift (LSHIFT_EXPR, mode, exp, count, target, 0);
11366 return expand_shift (RSHIFT_EXPR, mode, exp, count, target, 0);
11370 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11371 when applied to the address of EXP produces an address known to be
11372 aligned more than BIGGEST_ALIGNMENT. */
11375 is_aligning_offset (const_tree offset, const_tree exp)
11377 /* Strip off any conversions. */
11378 while (CONVERT_EXPR_P (offset))
11379 offset = TREE_OPERAND (offset, 0);
11381 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11382 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11383 if (TREE_CODE (offset) != BIT_AND_EXPR
11384 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
11385 || compare_tree_int (TREE_OPERAND (offset, 1),
11386 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
11387 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
11390 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11391 It must be NEGATE_EXPR. Then strip any more conversions. */
11392 offset = TREE_OPERAND (offset, 0);
11393 while (CONVERT_EXPR_P (offset))
11394 offset = TREE_OPERAND (offset, 0);
11396 if (TREE_CODE (offset) != NEGATE_EXPR)
11399 offset = TREE_OPERAND (offset, 0);
11400 while (CONVERT_EXPR_P (offset))
11401 offset = TREE_OPERAND (offset, 0);
11403 /* This must now be the address of EXP. */
11404 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
11407 /* Return the tree node if an ARG corresponds to a string constant or zero
11408 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
11409 in bytes within the string that ARG is accessing. The type of the
11410 offset will be `sizetype'. */
11413 string_constant (tree arg, tree *ptr_offset)
11415 tree array, offset, lower_bound;
11418 if (TREE_CODE (arg) == ADDR_EXPR)
11420 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
11422 *ptr_offset = size_zero_node;
11423 return TREE_OPERAND (arg, 0);
11425 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
11427 array = TREE_OPERAND (arg, 0);
11428 offset = size_zero_node;
11430 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
11432 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11433 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11434 if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11437 /* Check if the array has a nonzero lower bound. */
11438 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
11439 if (!integer_zerop (lower_bound))
11441 /* If the offset and base aren't both constants, return 0. */
11442 if (TREE_CODE (lower_bound) != INTEGER_CST)
11444 if (TREE_CODE (offset) != INTEGER_CST)
11446 /* Adjust offset by the lower bound. */
11447 offset = size_diffop (fold_convert (sizetype, offset),
11448 fold_convert (sizetype, lower_bound));
11451 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
11453 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11454 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11455 if (TREE_CODE (array) != ADDR_EXPR)
11457 array = TREE_OPERAND (array, 0);
11458 if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11464 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11466 tree arg0 = TREE_OPERAND (arg, 0);
11467 tree arg1 = TREE_OPERAND (arg, 1);
11472 if (TREE_CODE (arg0) == ADDR_EXPR
11473 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11474 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11476 array = TREE_OPERAND (arg0, 0);
11479 else if (TREE_CODE (arg1) == ADDR_EXPR
11480 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11481 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11483 array = TREE_OPERAND (arg1, 0);
11492 if (TREE_CODE (array) == STRING_CST)
11494 *ptr_offset = fold_convert (sizetype, offset);
11497 else if (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
11500 tree init = ctor_for_folding (array);
11502 /* Variables initialized to string literals can be handled too. */
11503 if (init == error_mark_node
11505 || TREE_CODE (init) != STRING_CST)
11508 /* Avoid const char foo[4] = "abcde"; */
11509 if (DECL_SIZE_UNIT (array) == NULL_TREE
11510 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11511 || (length = TREE_STRING_LENGTH (init)) <= 0
11512 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11515 /* If variable is bigger than the string literal, OFFSET must be constant
11516 and inside of the bounds of the string literal. */
11517 offset = fold_convert (sizetype, offset);
11518 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11519 && (! tree_fits_uhwi_p (offset)
11520 || compare_tree_int (offset, length) >= 0))
11523 *ptr_offset = offset;
11530 /* Generate code to calculate OPS, and exploded expression
11531 using a store-flag instruction and return an rtx for the result.
11532 OPS reflects a comparison.
11534 If TARGET is nonzero, store the result there if convenient.
11536 Return zero if there is no suitable set-flag instruction
11537 available on this machine.
11539 Once expand_expr has been called on the arguments of the comparison,
11540 we are committed to doing the store flag, since it is not safe to
11541 re-evaluate the expression. We emit the store-flag insn by calling
11542 emit_store_flag, but only expand the arguments if we have a reason
11543 to believe that emit_store_flag will be successful. If we think that
11544 it will, but it isn't, we have to simulate the store-flag with a
11545 set/jump/set sequence. */
11548 do_store_flag (sepops ops, rtx target, machine_mode mode)
11550 enum rtx_code code;
11551 tree arg0, arg1, type;
11552 machine_mode operand_mode;
11555 rtx subtarget = target;
11556 location_t loc = ops->location;
11561 /* Don't crash if the comparison was erroneous. */
11562 if (arg0 == error_mark_node || arg1 == error_mark_node)
11565 type = TREE_TYPE (arg0);
11566 operand_mode = TYPE_MODE (type);
11567 unsignedp = TYPE_UNSIGNED (type);
11569 /* We won't bother with BLKmode store-flag operations because it would mean
11570 passing a lot of information to emit_store_flag. */
11571 if (operand_mode == BLKmode)
11574 /* We won't bother with store-flag operations involving function pointers
11575 when function pointers must be canonicalized before comparisons. */
11576 if (targetm.have_canonicalize_funcptr_for_compare ()
11577 && ((POINTER_TYPE_P (TREE_TYPE (arg0))
11578 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0))))
11579 || (POINTER_TYPE_P (TREE_TYPE (arg1))
11580 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1))))))
11586 /* For vector typed comparisons emit code to generate the desired
11587 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11588 expander for this. */
11589 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11591 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11592 if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11593 && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
11594 return expand_vec_cmp_expr (ops->type, ifexp, target);
11597 tree if_true = constant_boolean_node (true, ops->type);
11598 tree if_false = constant_boolean_node (false, ops->type);
11599 return expand_vec_cond_expr (ops->type, ifexp, if_true,
11604 /* Get the rtx comparison code to use. We know that EXP is a comparison
11605 operation of some type. Some comparisons against 1 and -1 can be
11606 converted to comparisons with zero. Do so here so that the tests
11607 below will be aware that we have a comparison with zero. These
11608 tests will not catch constants in the first operand, but constants
11609 are rarely passed as the first operand. */
11620 if (integer_onep (arg1))
11621 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11623 code = unsignedp ? LTU : LT;
11626 if (! unsignedp && integer_all_onesp (arg1))
11627 arg1 = integer_zero_node, code = LT;
11629 code = unsignedp ? LEU : LE;
11632 if (! unsignedp && integer_all_onesp (arg1))
11633 arg1 = integer_zero_node, code = GE;
11635 code = unsignedp ? GTU : GT;
11638 if (integer_onep (arg1))
11639 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11641 code = unsignedp ? GEU : GE;
11644 case UNORDERED_EXPR:
11670 gcc_unreachable ();
11673 /* Put a constant second. */
11674 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11675 || TREE_CODE (arg0) == FIXED_CST)
11677 std::swap (arg0, arg1);
11678 code = swap_condition (code);
11681 /* If this is an equality or inequality test of a single bit, we can
11682 do this by shifting the bit being tested to the low-order bit and
11683 masking the result with the constant 1. If the condition was EQ,
11684 we xor it with 1. This does not require an scc insn and is faster
11685 than an scc insn even if we have it.
11687 The code to make this transformation was moved into fold_single_bit_test,
11688 so we just call into the folder and expand its result. */
11690 if ((code == NE || code == EQ)
11691 && integer_zerop (arg1)
11692 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11694 gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11696 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11698 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11699 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11700 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11701 gimple_assign_rhs1 (srcstmt),
11702 gimple_assign_rhs2 (srcstmt));
11703 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11705 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11709 if (! get_subtarget (target)
11710 || GET_MODE (subtarget) != operand_mode)
11713 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11716 target = gen_reg_rtx (mode);
11718 /* Try a cstore if possible. */
11719 return emit_store_flag_force (target, code, op0, op1,
11720 operand_mode, unsignedp,
11721 (TYPE_PRECISION (ops->type) == 1
11722 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11725 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11726 0 otherwise (i.e. if there is no casesi instruction).
11728 DEFAULT_PROBABILITY is the probability of jumping to the default
11731 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11732 rtx table_label, rtx default_label, rtx fallback_label,
11733 profile_probability default_probability)
11735 struct expand_operand ops[5];
11736 scalar_int_mode index_mode = SImode;
11737 rtx op1, op2, index;
11739 if (! targetm.have_casesi ())
11742 /* The index must be some form of integer. Convert it to SImode. */
11743 scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type);
11744 if (GET_MODE_BITSIZE (omode) > GET_MODE_BITSIZE (index_mode))
11746 rtx rangertx = expand_normal (range);
11748 /* We must handle the endpoints in the original mode. */
11749 index_expr = build2 (MINUS_EXPR, index_type,
11750 index_expr, minval);
11751 minval = integer_zero_node;
11752 index = expand_normal (index_expr);
11754 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11755 omode, 1, default_label,
11756 default_probability);
11757 /* Now we can safely truncate. */
11758 index = convert_to_mode (index_mode, index, 0);
11762 if (omode != index_mode)
11764 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11765 index_expr = fold_convert (index_type, index_expr);
11768 index = expand_normal (index_expr);
11771 do_pending_stack_adjust ();
11773 op1 = expand_normal (minval);
11774 op2 = expand_normal (range);
11776 create_input_operand (&ops[0], index, index_mode);
11777 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11778 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11779 create_fixed_operand (&ops[3], table_label);
11780 create_fixed_operand (&ops[4], (default_label
11782 : fallback_label));
11783 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11787 /* Attempt to generate a tablejump instruction; same concept. */
11788 /* Subroutine of the next function.
11790 INDEX is the value being switched on, with the lowest value
11791 in the table already subtracted.
11792 MODE is its expected mode (needed if INDEX is constant).
11793 RANGE is the length of the jump table.
11794 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11796 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11797 index value is out of range.
11798 DEFAULT_PROBABILITY is the probability of jumping to
11799 the default label. */
11802 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11803 rtx default_label, profile_probability default_probability)
11807 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11808 cfun->cfg->max_jumptable_ents = INTVAL (range);
11810 /* Do an unsigned comparison (in the proper mode) between the index
11811 expression and the value which represents the length of the range.
11812 Since we just finished subtracting the lower bound of the range
11813 from the index expression, this comparison allows us to simultaneously
11814 check that the original index expression value is both greater than
11815 or equal to the minimum value of the range and less than or equal to
11816 the maximum value of the range. */
11819 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11820 default_label, default_probability);
11823 /* If index is in range, it must fit in Pmode.
11824 Convert to Pmode so we can index with it. */
11826 index = convert_to_mode (Pmode, index, 1);
11828 /* Don't let a MEM slip through, because then INDEX that comes
11829 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11830 and break_out_memory_refs will go to work on it and mess it up. */
11831 #ifdef PIC_CASE_VECTOR_ADDRESS
11832 if (flag_pic && !REG_P (index))
11833 index = copy_to_mode_reg (Pmode, index);
11836 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11837 GET_MODE_SIZE, because this indicates how large insns are. The other
11838 uses should all be Pmode, because they are addresses. This code
11839 could fail if addresses and insns are not the same size. */
11840 index = simplify_gen_binary (MULT, Pmode, index,
11841 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11843 index = simplify_gen_binary (PLUS, Pmode, index,
11844 gen_rtx_LABEL_REF (Pmode, table_label));
11846 #ifdef PIC_CASE_VECTOR_ADDRESS
11848 index = PIC_CASE_VECTOR_ADDRESS (index);
11851 index = memory_address (CASE_VECTOR_MODE, index);
11852 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11853 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11854 convert_move (temp, vector, 0);
11856 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11858 /* If we are generating PIC code or if the table is PC-relative, the
11859 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11860 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11865 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11866 rtx table_label, rtx default_label,
11867 profile_probability default_probability)
11871 if (! targetm.have_tablejump ())
11874 index_expr = fold_build2 (MINUS_EXPR, index_type,
11875 fold_convert (index_type, index_expr),
11876 fold_convert (index_type, minval));
11877 index = expand_normal (index_expr);
11878 do_pending_stack_adjust ();
11880 do_tablejump (index, TYPE_MODE (index_type),
11881 convert_modes (TYPE_MODE (index_type),
11882 TYPE_MODE (TREE_TYPE (range)),
11883 expand_normal (range),
11884 TYPE_UNSIGNED (TREE_TYPE (range))),
11885 table_label, default_label, default_probability);
11889 /* Return a CONST_VECTOR rtx representing vector mask for
11890 a VECTOR_CST of booleans. */
11892 const_vector_mask_from_tree (tree exp)
11894 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11895 machine_mode inner = GET_MODE_INNER (mode);
11897 rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
11898 VECTOR_CST_NELTS_PER_PATTERN (exp));
11899 unsigned int count = builder.encoded_nelts ();
11900 for (unsigned int i = 0; i < count; ++i)
11902 tree elt = VECTOR_CST_ELT (exp, i);
11903 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11904 if (integer_zerop (elt))
11905 builder.quick_push (CONST0_RTX (inner));
11906 else if (integer_onep (elt)
11907 || integer_minus_onep (elt))
11908 builder.quick_push (CONSTM1_RTX (inner));
11910 gcc_unreachable ();
11912 return builder.build ();
11915 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
11916 Return a constant scalar rtx of mode MODE in which bit X is set if element
11917 X of EXP is nonzero. */
11919 const_scalar_mask_from_tree (scalar_int_mode mode, tree exp)
11921 wide_int res = wi::zero (GET_MODE_PRECISION (mode));
11924 /* The result has a fixed number of bits so the input must too. */
11925 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
11926 for (unsigned int i = 0; i < nunits; ++i)
11928 elt = VECTOR_CST_ELT (exp, i);
11929 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11930 if (integer_all_onesp (elt))
11931 res = wi::set_bit (res, i);
11933 gcc_assert (integer_zerop (elt));
11936 return immed_wide_int_const (res, mode);
11939 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11941 const_vector_from_tree (tree exp)
11943 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11945 if (initializer_zerop (exp))
11946 return CONST0_RTX (mode);
11948 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11949 return const_vector_mask_from_tree (exp);
11951 machine_mode inner = GET_MODE_INNER (mode);
11953 rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
11954 VECTOR_CST_NELTS_PER_PATTERN (exp));
11955 unsigned int count = builder.encoded_nelts ();
11956 for (unsigned int i = 0; i < count; ++i)
11958 tree elt = VECTOR_CST_ELT (exp, i);
11959 if (TREE_CODE (elt) == REAL_CST)
11960 builder.quick_push (const_double_from_real_value (TREE_REAL_CST (elt),
11962 else if (TREE_CODE (elt) == FIXED_CST)
11963 builder.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11966 builder.quick_push (immed_wide_int_const (wi::to_poly_wide (elt),
11969 return builder.build ();
11972 /* Build a decl for a personality function given a language prefix. */
11975 build_personality_function (const char *lang)
11977 const char *unwind_and_version;
11981 switch (targetm_common.except_unwind_info (&global_options))
11986 unwind_and_version = "_sj0";
11990 unwind_and_version = "_v0";
11993 unwind_and_version = "_seh0";
11996 gcc_unreachable ();
11999 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
12001 type = build_function_type_list (integer_type_node, integer_type_node,
12002 long_long_unsigned_type_node,
12003 ptr_type_node, ptr_type_node, NULL_TREE);
12004 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
12005 get_identifier (name), type);
12006 DECL_ARTIFICIAL (decl) = 1;
12007 DECL_EXTERNAL (decl) = 1;
12008 TREE_PUBLIC (decl) = 1;
12010 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
12011 are the flags assigned by targetm.encode_section_info. */
12012 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
12017 /* Extracts the personality function of DECL and returns the corresponding
12021 get_personality_function (tree decl)
12023 tree personality = DECL_FUNCTION_PERSONALITY (decl);
12024 enum eh_personality_kind pk;
12026 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
12027 if (pk == eh_personality_none)
12031 && pk == eh_personality_any)
12032 personality = lang_hooks.eh_personality ();
12034 if (pk == eh_personality_lang)
12035 gcc_assert (personality != NULL_TREE);
12037 return XEXP (DECL_RTL (personality), 0);
12040 /* Returns a tree for the size of EXP in bytes. */
12043 tree_expr_size (const_tree exp)
12046 && DECL_SIZE_UNIT (exp) != 0)
12047 return DECL_SIZE_UNIT (exp);
12049 return size_in_bytes (TREE_TYPE (exp));
12052 /* Return an rtx for the size in bytes of the value of EXP. */
12055 expr_size (tree exp)
12059 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12060 size = TREE_OPERAND (exp, 1);
12063 size = tree_expr_size (exp);
12065 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
12068 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
12071 /* Return a wide integer for the size in bytes of the value of EXP, or -1
12072 if the size can vary or is larger than an integer. */
12074 static HOST_WIDE_INT
12075 int_expr_size (tree exp)
12079 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12080 size = TREE_OPERAND (exp, 1);
12083 size = tree_expr_size (exp);
12087 if (size == 0 || !tree_fits_shwi_p (size))
12090 return tree_to_shwi (size);