1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
69 #include "builtins.def"
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls[(int) END_BUILTINS];
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_args_info (tree);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
126 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_bcopy (tree, int);
129 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
131 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_alloca (tree, rtx);
143 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static rtx expand_builtin_fputs (tree, rtx, bool);
146 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
147 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
148 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
149 static tree stabilize_va_list (tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_expect (tree, tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (tree, tree);
155 static tree fold_builtin_inf (tree, int);
156 static tree fold_builtin_nan (tree, tree, int);
157 static tree rewrite_call_expr (tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static bool integer_valued_real_p (tree);
160 static tree fold_trunc_transparent_mathfn (tree, tree);
161 static bool readonly_data_expr (tree);
162 static rtx expand_builtin_fabs (tree, rtx, rtx);
163 static rtx expand_builtin_signbit (tree, rtx);
164 static tree fold_builtin_sqrt (tree, tree);
165 static tree fold_builtin_cbrt (tree, tree);
166 static tree fold_builtin_pow (tree, tree, tree, tree);
167 static tree fold_builtin_powi (tree, tree, tree, tree);
168 static tree fold_builtin_cos (tree, tree, tree);
169 static tree fold_builtin_cosh (tree, tree, tree);
170 static tree fold_builtin_tan (tree, tree);
171 static tree fold_builtin_trunc (tree, tree);
172 static tree fold_builtin_floor (tree, tree);
173 static tree fold_builtin_ceil (tree, tree);
174 static tree fold_builtin_round (tree, tree);
175 static tree fold_builtin_int_roundingfn (tree, tree);
176 static tree fold_builtin_bitop (tree, tree);
177 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
178 static tree fold_builtin_strchr (tree, tree, tree);
179 static tree fold_builtin_memchr (tree, tree, tree, tree);
180 static tree fold_builtin_memcmp (tree, tree, tree);
181 static tree fold_builtin_strcmp (tree, tree);
182 static tree fold_builtin_strncmp (tree, tree, tree);
183 static tree fold_builtin_signbit (tree, tree);
184 static tree fold_builtin_copysign (tree, tree, tree, tree);
185 static tree fold_builtin_isascii (tree);
186 static tree fold_builtin_toascii (tree);
187 static tree fold_builtin_isdigit (tree);
188 static tree fold_builtin_fabs (tree, tree);
189 static tree fold_builtin_abs (tree, tree);
190 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
192 static tree fold_builtin_n (tree, tree *, int, bool);
193 static tree fold_builtin_0 (tree, bool);
194 static tree fold_builtin_1 (tree, tree, bool);
195 static tree fold_builtin_2 (tree, tree, tree, bool);
196 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
197 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
198 static tree fold_builtin_varargs (tree, tree, bool);
200 static tree fold_builtin_strpbrk (tree, tree, tree);
201 static tree fold_builtin_strstr (tree, tree, tree);
202 static tree fold_builtin_strrchr (tree, tree, tree);
203 static tree fold_builtin_strcat (tree, tree);
204 static tree fold_builtin_strncat (tree, tree, tree);
205 static tree fold_builtin_strspn (tree, tree);
206 static tree fold_builtin_strcspn (tree, tree);
207 static tree fold_builtin_sprintf (tree, tree, tree, int);
209 static rtx expand_builtin_object_size (tree);
210 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
211 enum built_in_function);
212 static void maybe_emit_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
214 static void maybe_emit_free_warning (tree);
215 static tree fold_builtin_object_size (tree, tree);
216 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
217 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
218 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
219 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
220 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
221 enum built_in_function);
222 static bool init_target_chars (void);
224 static unsigned HOST_WIDE_INT target_newline;
225 static unsigned HOST_WIDE_INT target_percent;
226 static unsigned HOST_WIDE_INT target_c;
227 static unsigned HOST_WIDE_INT target_s;
228 static char target_percent_c[3];
229 static char target_percent_s[3];
230 static char target_percent_s_newline[4];
231 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_arg2 (tree, tree, tree,
234 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
235 static tree do_mpfr_arg3 (tree, tree, tree, tree,
236 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
237 static tree do_mpfr_sincos (tree, tree, tree);
238 static tree do_mpfr_bessel_n (tree, tree, tree,
239 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
240 const REAL_VALUE_TYPE *, bool);
241 static tree do_mpfr_remquo (tree, tree, tree);
242 static tree do_mpfr_lgamma_r (tree, tree, tree);
244 /* Return true if NODE should be considered for inline expansion regardless
245 of the optimization level. This means whenever a function is invoked with
246 its "internal" name, which normally contains the prefix "__builtin". */
248 static bool called_as_built_in (tree node)
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 if (strncmp (name, "__builtin_", 10) == 0)
253 if (strncmp (name, "__sync_", 7) == 0)
258 /* Return the alignment in bits of EXP, an object.
259 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
260 guessed alignment e.g. from type alignment. */
263 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
268 if (handled_component_p (exp))
270 HOST_WIDE_INT bitsize, bitpos;
272 enum machine_mode mode;
273 int unsignedp, volatilep;
275 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
276 &mode, &unsignedp, &volatilep, true);
278 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
283 if (TREE_CODE (offset) == PLUS_EXPR)
285 next_offset = TREE_OPERAND (offset, 0);
286 offset = TREE_OPERAND (offset, 1);
290 if (host_integerp (offset, 1))
292 /* Any overflow in calculating offset_bits won't change
295 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
298 inner = MIN (inner, (offset_bits & -offset_bits));
300 else if (TREE_CODE (offset) == MULT_EXPR
301 && host_integerp (TREE_OPERAND (offset, 1), 1))
303 /* Any overflow in calculating offset_factor won't change
305 unsigned offset_factor
306 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
310 inner = MIN (inner, (offset_factor & -offset_factor));
314 inner = MIN (inner, BITS_PER_UNIT);
317 offset = next_offset;
321 align = MIN (inner, DECL_ALIGN (exp));
322 #ifdef CONSTANT_ALIGNMENT
323 else if (CONSTANT_CLASS_P (exp))
324 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
326 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
327 || TREE_CODE (exp) == INDIRECT_REF)
328 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
330 align = MIN (align, inner);
331 return MIN (align, max_align);
334 /* Return the alignment in bits of EXP, a pointer valued expression.
335 But don't return more than MAX_ALIGN no matter what.
336 The alignment returned is, by default, the alignment of the thing that
337 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
339 Otherwise, look at the expression to see if we can do better, i.e., if the
340 expression is actually pointing at an object whose alignment is tighter. */
343 get_pointer_alignment (tree exp, unsigned int max_align)
345 unsigned int align, inner;
347 /* We rely on TER to compute accurate alignment information. */
348 if (!(optimize && flag_tree_ter))
351 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
354 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
355 align = MIN (align, max_align);
359 switch (TREE_CODE (exp))
362 exp = TREE_OPERAND (exp, 0);
363 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
366 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
367 align = MIN (inner, max_align);
370 case POINTER_PLUS_EXPR:
371 /* If sum of pointer + int, restrict our maximum alignment to that
372 imposed by the integer. If not, we can't do any better than
374 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
377 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
378 & (max_align / BITS_PER_UNIT - 1))
382 exp = TREE_OPERAND (exp, 0);
386 /* See what we are pointing at and look at its alignment. */
387 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
395 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
396 way, because it could contain a zero byte in the middle.
397 TREE_STRING_LENGTH is the size of the character array, not the string.
399 ONLY_VALUE should be nonzero if the result is not going to be emitted
400 into the instruction stream and zero if it is going to be expanded.
401 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
402 is returned, otherwise NULL, since
403 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
404 evaluate the side-effects.
406 The value returned is of type `ssizetype'.
408 Unfortunately, string_constant can't access the values of const char
409 arrays with initializers, so neither can we do so here. */
412 c_strlen (tree src, int only_value)
415 HOST_WIDE_INT offset;
420 if (TREE_CODE (src) == COND_EXPR
421 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
425 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
426 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
427 if (tree_int_cst_equal (len1, len2))
431 if (TREE_CODE (src) == COMPOUND_EXPR
432 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
433 return c_strlen (TREE_OPERAND (src, 1), only_value);
435 src = string_constant (src, &offset_node);
439 max = TREE_STRING_LENGTH (src) - 1;
440 ptr = TREE_STRING_POINTER (src);
442 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
444 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
445 compute the offset to the following null if we don't know where to
446 start searching for it. */
449 for (i = 0; i < max; i++)
453 /* We don't know the starting offset, but we do know that the string
454 has no internal zero bytes. We can assume that the offset falls
455 within the bounds of the string; otherwise, the programmer deserves
456 what he gets. Subtract the offset from the length of the string,
457 and return that. This would perhaps not be valid if we were dealing
458 with named arrays in addition to literal string constants. */
460 return size_diffop (size_int (max), offset_node);
463 /* We have a known offset into the string. Start searching there for
464 a null character if we can represent it as a single HOST_WIDE_INT. */
465 if (offset_node == 0)
467 else if (! host_integerp (offset_node, 0))
470 offset = tree_low_cst (offset_node, 0);
472 /* If the offset is known to be out of bounds, warn, and call strlen at
474 if (offset < 0 || offset > max)
476 /* Suppress multiple warnings for propagated constant strings. */
477 if (! TREE_NO_WARNING (src))
479 warning (0, "offset outside bounds of constant string");
480 TREE_NO_WARNING (src) = 1;
485 /* Use strlen to search for the first zero byte. Since any strings
486 constructed with build_string will have nulls appended, we win even
487 if we get handed something like (char[4])"abcd".
489 Since OFFSET is our starting index into the string, no further
490 calculation is needed. */
491 return ssize_int (strlen (ptr + offset));
494 /* Return a char pointer for a C string if it is a string constant
495 or sum of string constant and integer constant. */
502 src = string_constant (src, &offset_node);
506 if (offset_node == 0)
507 return TREE_STRING_POINTER (src);
508 else if (!host_integerp (offset_node, 1)
509 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
512 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
515 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
516 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
519 c_readstr (const char *str, enum machine_mode mode)
525 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
530 for (i = 0; i < GET_MODE_SIZE (mode); i++)
533 if (WORDS_BIG_ENDIAN)
534 j = GET_MODE_SIZE (mode) - i - 1;
535 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
536 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
537 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
539 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
542 ch = (unsigned char) str[i];
543 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
545 return immed_double_const (c[0], c[1], mode);
548 /* Cast a target constant CST to target CHAR and if that value fits into
549 host char type, return zero and put that value into variable pointed to by
553 target_char_cast (tree cst, char *p)
555 unsigned HOST_WIDE_INT val, hostval;
557 if (!host_integerp (cst, 1)
558 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
561 val = tree_low_cst (cst, 1);
562 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
563 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
566 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
567 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
576 /* Similar to save_expr, but assumes that arbitrary code is not executed
577 in between the multiple evaluations. In particular, we assume that a
578 non-addressable local variable will not be modified. */
581 builtin_save_expr (tree exp)
583 if (TREE_ADDRESSABLE (exp) == 0
584 && (TREE_CODE (exp) == PARM_DECL
585 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
588 return save_expr (exp);
591 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
592 times to get the address of either a higher stack frame, or a return
593 address located within it (depending on FNDECL_CODE). */
596 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
600 #ifdef INITIAL_FRAME_ADDRESS_RTX
601 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
605 /* For a zero count with __builtin_return_address, we don't care what
606 frame address we return, because target-specific definitions will
607 override us. Therefore frame pointer elimination is OK, and using
608 the soft frame pointer is OK.
610 For a nonzero count, or a zero count with __builtin_frame_address,
611 we require a stable offset from the current frame pointer to the
612 previous one, so we must use the hard frame pointer, and
613 we must disable frame pointer elimination. */
614 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 tem = frame_pointer_rtx;
618 tem = hard_frame_pointer_rtx;
620 /* Tell reload not to eliminate the frame pointer. */
621 crtl->accesses_prior_frames = 1;
625 /* Some machines need special handling before we can access
626 arbitrary frames. For example, on the SPARC, we must first flush
627 all register windows to the stack. */
628 #ifdef SETUP_FRAME_ADDRESSES
630 SETUP_FRAME_ADDRESSES ();
633 /* On the SPARC, the return address is not in the frame, it is in a
634 register. There is no way to access it off of the current frame
635 pointer, but it can be accessed off the previous frame pointer by
636 reading the value from the register window save area. */
637 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
638 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
642 /* Scan back COUNT frames to the specified frame. */
643 for (i = 0; i < count; i++)
645 /* Assume the dynamic chain pointer is in the word that the
646 frame address points to, unless otherwise specified. */
647 #ifdef DYNAMIC_CHAIN_ADDRESS
648 tem = DYNAMIC_CHAIN_ADDRESS (tem);
650 tem = memory_address (Pmode, tem);
651 tem = gen_frame_mem (Pmode, tem);
652 tem = copy_to_reg (tem);
655 /* For __builtin_frame_address, return what we've got. But, on
656 the SPARC for example, we may have to add a bias. */
657 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
658 #ifdef FRAME_ADDR_RTX
659 return FRAME_ADDR_RTX (tem);
664 /* For __builtin_return_address, get the return address from that frame. */
665 #ifdef RETURN_ADDR_RTX
666 tem = RETURN_ADDR_RTX (count, tem);
668 tem = memory_address (Pmode,
669 plus_constant (tem, GET_MODE_SIZE (Pmode)));
670 tem = gen_frame_mem (Pmode, tem);
675 /* Alias set used for setjmp buffer. */
676 static alias_set_type setjmp_alias_set = -1;
678 /* Construct the leading half of a __builtin_setjmp call. Control will
679 return to RECEIVER_LABEL. This is also called directly by the SJLJ
680 exception handling code. */
683 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
685 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
689 if (setjmp_alias_set == -1)
690 setjmp_alias_set = new_alias_set ();
692 buf_addr = convert_memory_address (Pmode, buf_addr);
694 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
696 /* We store the frame pointer and the address of receiver_label in
697 the buffer and use the rest of it for the stack save area, which
698 is machine-dependent. */
700 mem = gen_rtx_MEM (Pmode, buf_addr);
701 set_mem_alias_set (mem, setjmp_alias_set);
702 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
704 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
705 set_mem_alias_set (mem, setjmp_alias_set);
707 emit_move_insn (validize_mem (mem),
708 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
710 stack_save = gen_rtx_MEM (sa_mode,
711 plus_constant (buf_addr,
712 2 * GET_MODE_SIZE (Pmode)));
713 set_mem_alias_set (stack_save, setjmp_alias_set);
714 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
716 /* If there is further processing to do, do it. */
717 #ifdef HAVE_builtin_setjmp_setup
718 if (HAVE_builtin_setjmp_setup)
719 emit_insn (gen_builtin_setjmp_setup (buf_addr));
722 /* Tell optimize_save_area_alloca that extra work is going to
723 need to go on during alloca. */
724 cfun->calls_setjmp = 1;
726 /* We have a nonlocal label. */
727 cfun->has_nonlocal_label = 1;
730 /* Construct the trailing part of a __builtin_setjmp call. This is
731 also called directly by the SJLJ exception handling code. */
734 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
736 /* Clobber the FP when we get here, so we have to make sure it's
737 marked as used by this function. */
738 emit_use (hard_frame_pointer_rtx);
740 /* Mark the static chain as clobbered here so life information
741 doesn't get messed up for it. */
742 emit_clobber (static_chain_rtx);
744 /* Now put in the code to restore the frame pointer, and argument
745 pointer, if needed. */
746 #ifdef HAVE_nonlocal_goto
747 if (! HAVE_nonlocal_goto)
750 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
751 /* This might change the hard frame pointer in ways that aren't
752 apparent to early optimization passes, so force a clobber. */
753 emit_clobber (hard_frame_pointer_rtx);
756 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
757 if (fixed_regs[ARG_POINTER_REGNUM])
759 #ifdef ELIMINABLE_REGS
761 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
763 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
764 if (elim_regs[i].from == ARG_POINTER_REGNUM
765 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
768 if (i == ARRAY_SIZE (elim_regs))
771 /* Now restore our arg pointer from the address at which it
772 was saved in our stack frame. */
773 emit_move_insn (crtl->args.internal_arg_pointer,
774 copy_to_reg (get_arg_pointer_save_area ()));
779 #ifdef HAVE_builtin_setjmp_receiver
780 if (HAVE_builtin_setjmp_receiver)
781 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
784 #ifdef HAVE_nonlocal_goto_receiver
785 if (HAVE_nonlocal_goto_receiver)
786 emit_insn (gen_nonlocal_goto_receiver ());
791 /* We must not allow the code we just generated to be reordered by
792 scheduling. Specifically, the update of the frame pointer must
793 happen immediately, not later. */
794 emit_insn (gen_blockage ());
797 /* __builtin_longjmp is passed a pointer to an array of five words (not
798 all will be used on all machines). It operates similarly to the C
799 library function of the same name, but is more efficient. Much of
800 the code below is copied from the handling of non-local gotos. */
803 expand_builtin_longjmp (rtx buf_addr, rtx value)
805 rtx fp, lab, stack, insn, last;
806 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
808 /* DRAP is needed for stack realign if longjmp is expanded to current
810 if (SUPPORTS_STACK_ALIGNMENT)
811 crtl->need_drap = true;
813 if (setjmp_alias_set == -1)
814 setjmp_alias_set = new_alias_set ();
816 buf_addr = convert_memory_address (Pmode, buf_addr);
818 buf_addr = force_reg (Pmode, buf_addr);
820 /* We used to store value in static_chain_rtx, but that fails if pointers
821 are smaller than integers. We instead require that the user must pass
822 a second argument of 1, because that is what builtin_setjmp will
823 return. This also makes EH slightly more efficient, since we are no
824 longer copying around a value that we don't care about. */
825 gcc_assert (value == const1_rtx);
827 last = get_last_insn ();
828 #ifdef HAVE_builtin_longjmp
829 if (HAVE_builtin_longjmp)
830 emit_insn (gen_builtin_longjmp (buf_addr));
834 fp = gen_rtx_MEM (Pmode, buf_addr);
835 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
836 GET_MODE_SIZE (Pmode)));
838 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
839 2 * GET_MODE_SIZE (Pmode)));
840 set_mem_alias_set (fp, setjmp_alias_set);
841 set_mem_alias_set (lab, setjmp_alias_set);
842 set_mem_alias_set (stack, setjmp_alias_set);
844 /* Pick up FP, label, and SP from the block and jump. This code is
845 from expand_goto in stmt.c; see there for detailed comments. */
846 #ifdef HAVE_nonlocal_goto
847 if (HAVE_nonlocal_goto)
848 /* We have to pass a value to the nonlocal_goto pattern that will
849 get copied into the static_chain pointer, but it does not matter
850 what that value is, because builtin_setjmp does not use it. */
851 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
855 lab = copy_to_reg (lab);
857 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
858 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
860 emit_move_insn (hard_frame_pointer_rtx, fp);
861 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
863 emit_use (hard_frame_pointer_rtx);
864 emit_use (stack_pointer_rtx);
865 emit_indirect_jump (lab);
869 /* Search backwards and mark the jump insn as a non-local goto.
870 Note that this precludes the use of __builtin_longjmp to a
871 __builtin_setjmp target in the same function. However, we've
872 already cautioned the user that these functions are for
873 internal exception handling use only. */
874 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
876 gcc_assert (insn != last);
880 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
883 else if (CALL_P (insn))
888 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
889 and the address of the save area. */
892 expand_builtin_nonlocal_goto (tree exp)
894 tree t_label, t_save_area;
895 rtx r_label, r_save_area, r_fp, r_sp, insn;
897 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
900 t_label = CALL_EXPR_ARG (exp, 0);
901 t_save_area = CALL_EXPR_ARG (exp, 1);
903 r_label = expand_normal (t_label);
904 r_label = convert_memory_address (Pmode, r_label);
905 r_save_area = expand_normal (t_save_area);
906 r_save_area = convert_memory_address (Pmode, r_save_area);
907 /* Copy the address of the save location to a register just in case it was based
908 on the frame pointer. */
909 r_save_area = copy_to_reg (r_save_area);
910 r_fp = gen_rtx_MEM (Pmode, r_save_area);
911 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
912 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
914 crtl->has_nonlocal_goto = 1;
916 #ifdef HAVE_nonlocal_goto
917 /* ??? We no longer need to pass the static chain value, afaik. */
918 if (HAVE_nonlocal_goto)
919 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
923 r_label = copy_to_reg (r_label);
925 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
926 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
928 /* Restore frame pointer for containing function.
929 This sets the actual hard register used for the frame pointer
930 to the location of the function's incoming static chain info.
931 The non-local goto handler will then adjust it to contain the
932 proper value and reload the argument pointer, if needed. */
933 emit_move_insn (hard_frame_pointer_rtx, r_fp);
934 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
936 /* USE of hard_frame_pointer_rtx added for consistency;
937 not clear if really needed. */
938 emit_use (hard_frame_pointer_rtx);
939 emit_use (stack_pointer_rtx);
941 /* If the architecture is using a GP register, we must
942 conservatively assume that the target function makes use of it.
943 The prologue of functions with nonlocal gotos must therefore
944 initialize the GP register to the appropriate value, and we
945 must then make sure that this value is live at the point
946 of the jump. (Note that this doesn't necessarily apply
947 to targets with a nonlocal_goto pattern; they are free
948 to implement it in their own way. Note also that this is
949 a no-op if the GP register is a global invariant.) */
950 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
951 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
952 emit_use (pic_offset_table_rtx);
954 emit_indirect_jump (r_label);
957 /* Search backwards to the jump insn and mark it as a
959 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
963 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
966 else if (CALL_P (insn))
973 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
974 (not all will be used on all machines) that was passed to __builtin_setjmp.
975 It updates the stack pointer in that block to correspond to the current
979 expand_builtin_update_setjmp_buf (rtx buf_addr)
981 enum machine_mode sa_mode = Pmode;
985 #ifdef HAVE_save_stack_nonlocal
986 if (HAVE_save_stack_nonlocal)
987 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
989 #ifdef STACK_SAVEAREA_MODE
990 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
994 = gen_rtx_MEM (sa_mode,
997 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1001 emit_insn (gen_setjmp ());
1004 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1007 /* Expand a call to __builtin_prefetch. For a target that does not support
1008 data prefetch, evaluate the memory address argument in case it has side
1012 expand_builtin_prefetch (tree exp)
1014 tree arg0, arg1, arg2;
1018 if (!validate_arglist (exp, POINTER_TYPE, 0))
1021 arg0 = CALL_EXPR_ARG (exp, 0);
1023 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1024 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1026 nargs = call_expr_nargs (exp);
1028 arg1 = CALL_EXPR_ARG (exp, 1);
1030 arg1 = integer_zero_node;
1032 arg2 = CALL_EXPR_ARG (exp, 2);
1034 arg2 = build_int_cst (NULL_TREE, 3);
1036 /* Argument 0 is an address. */
1037 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1039 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1040 if (TREE_CODE (arg1) != INTEGER_CST)
1042 error ("second argument to %<__builtin_prefetch%> must be a constant");
1043 arg1 = integer_zero_node;
1045 op1 = expand_normal (arg1);
1046 /* Argument 1 must be either zero or one. */
1047 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1049 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1054 /* Argument 2 (locality) must be a compile-time constant int. */
1055 if (TREE_CODE (arg2) != INTEGER_CST)
1057 error ("third argument to %<__builtin_prefetch%> must be a constant");
1058 arg2 = integer_zero_node;
1060 op2 = expand_normal (arg2);
1061 /* Argument 2 must be 0, 1, 2, or 3. */
1062 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1064 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1068 #ifdef HAVE_prefetch
1071 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1073 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1074 || (GET_MODE (op0) != Pmode))
1076 op0 = convert_memory_address (Pmode, op0);
1077 op0 = force_reg (Pmode, op0);
1079 emit_insn (gen_prefetch (op0, op1, op2));
1083 /* Don't do anything with direct references to volatile memory, but
1084 generate code to handle other side effects. */
1085 if (!MEM_P (op0) && side_effects_p (op0))
1089 /* Get a MEM rtx for expression EXP which is the address of an operand
1090 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1091 the maximum length of the block of memory that might be accessed or
1095 get_memory_rtx (tree exp, tree len)
1097 tree orig_exp = exp;
1101 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1102 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1103 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1104 exp = TREE_OPERAND (exp, 0);
1106 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1107 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1109 /* Get an expression we can use to find the attributes to assign to MEM.
1110 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1111 we can. First remove any nops. */
1112 while (CONVERT_EXPR_P (exp)
1113 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1114 exp = TREE_OPERAND (exp, 0);
1117 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1118 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1119 && host_integerp (TREE_OPERAND (exp, 1), 0)
1120 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1121 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1122 else if (TREE_CODE (exp) == ADDR_EXPR)
1123 exp = TREE_OPERAND (exp, 0);
1124 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1125 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1129 /* Honor attributes derived from exp, except for the alias set
1130 (as builtin stringops may alias with anything) and the size
1131 (as stringops may access multiple array elements). */
1134 set_mem_attributes (mem, exp, 0);
1137 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1139 /* Allow the string and memory builtins to overflow from one
1140 field into another, see http://gcc.gnu.org/PR23561.
1141 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1142 memory accessed by the string or memory builtin will fit
1143 within the field. */
1144 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1146 tree mem_expr = MEM_EXPR (mem);
1147 HOST_WIDE_INT offset = -1, length = -1;
1150 while (TREE_CODE (inner) == ARRAY_REF
1151 || CONVERT_EXPR_P (inner)
1152 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1153 || TREE_CODE (inner) == SAVE_EXPR)
1154 inner = TREE_OPERAND (inner, 0);
1156 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1158 if (MEM_OFFSET (mem)
1159 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1160 offset = INTVAL (MEM_OFFSET (mem));
1162 if (offset >= 0 && len && host_integerp (len, 0))
1163 length = tree_low_cst (len, 0);
1165 while (TREE_CODE (inner) == COMPONENT_REF)
1167 tree field = TREE_OPERAND (inner, 1);
1168 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1169 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1171 /* Bitfields are generally not byte-addressable. */
1172 gcc_assert (!DECL_BIT_FIELD (field)
1173 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1174 % BITS_PER_UNIT) == 0
1175 && host_integerp (DECL_SIZE (field), 0)
1176 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1177 % BITS_PER_UNIT) == 0));
1179 /* If we can prove that the memory starting at XEXP (mem, 0) and
1180 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1181 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1182 fields without DECL_SIZE_UNIT like flexible array members. */
1184 && DECL_SIZE_UNIT (field)
1185 && host_integerp (DECL_SIZE_UNIT (field), 0))
1188 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1191 && offset + length <= size)
1196 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1197 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1198 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1206 mem_expr = TREE_OPERAND (mem_expr, 0);
1207 inner = TREE_OPERAND (inner, 0);
1210 if (mem_expr == NULL)
1212 if (mem_expr != MEM_EXPR (mem))
1214 set_mem_expr (mem, mem_expr);
1215 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1218 set_mem_alias_set (mem, 0);
1219 set_mem_size (mem, NULL_RTX);
1225 /* Built-in functions to perform an untyped call and return. */
1227 /* For each register that may be used for calling a function, this
1228 gives a mode used to copy the register's value. VOIDmode indicates
1229 the register is not used for calling a function. If the machine
1230 has register windows, this gives only the outbound registers.
1231 INCOMING_REGNO gives the corresponding inbound register. */
1232 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1234 /* For each register that may be used for returning values, this gives
1235 a mode used to copy the register's value. VOIDmode indicates the
1236 register is not used for returning values. If the machine has
1237 register windows, this gives only the outbound registers.
1238 INCOMING_REGNO gives the corresponding inbound register. */
1239 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1241 /* For each register that may be used for calling a function, this
1242 gives the offset of that register into the block returned by
1243 __builtin_apply_args. 0 indicates that the register is not
1244 used for calling a function. */
1245 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1247 /* Return the size required for the block returned by __builtin_apply_args,
1248 and initialize apply_args_mode. */
1251 apply_args_size (void)
1253 static int size = -1;
1256 enum machine_mode mode;
1258 /* The values computed by this function never change. */
1261 /* The first value is the incoming arg-pointer. */
1262 size = GET_MODE_SIZE (Pmode);
1264 /* The second value is the structure value address unless this is
1265 passed as an "invisible" first argument. */
1266 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1267 size += GET_MODE_SIZE (Pmode);
1269 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1270 if (FUNCTION_ARG_REGNO_P (regno))
1272 mode = reg_raw_mode[regno];
1274 gcc_assert (mode != VOIDmode);
1276 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1277 if (size % align != 0)
1278 size = CEIL (size, align) * align;
1279 apply_args_reg_offset[regno] = size;
1280 size += GET_MODE_SIZE (mode);
1281 apply_args_mode[regno] = mode;
1285 apply_args_mode[regno] = VOIDmode;
1286 apply_args_reg_offset[regno] = 0;
1292 /* Return the size required for the block returned by __builtin_apply,
1293 and initialize apply_result_mode. */
1296 apply_result_size (void)
1298 static int size = -1;
1300 enum machine_mode mode;
1302 /* The values computed by this function never change. */
1307 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1308 if (FUNCTION_VALUE_REGNO_P (regno))
1310 mode = reg_raw_mode[regno];
1312 gcc_assert (mode != VOIDmode);
1314 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1315 if (size % align != 0)
1316 size = CEIL (size, align) * align;
1317 size += GET_MODE_SIZE (mode);
1318 apply_result_mode[regno] = mode;
1321 apply_result_mode[regno] = VOIDmode;
1323 /* Allow targets that use untyped_call and untyped_return to override
1324 the size so that machine-specific information can be stored here. */
1325 #ifdef APPLY_RESULT_SIZE
1326 size = APPLY_RESULT_SIZE;
1332 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1333 /* Create a vector describing the result block RESULT. If SAVEP is true,
1334 the result block is used to save the values; otherwise it is used to
1335 restore the values. */
1338 result_vector (int savep, rtx result)
1340 int regno, size, align, nelts;
1341 enum machine_mode mode;
1343 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1346 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1347 if ((mode = apply_result_mode[regno]) != VOIDmode)
1349 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1350 if (size % align != 0)
1351 size = CEIL (size, align) * align;
1352 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1353 mem = adjust_address (result, mode, size);
1354 savevec[nelts++] = (savep
1355 ? gen_rtx_SET (VOIDmode, mem, reg)
1356 : gen_rtx_SET (VOIDmode, reg, mem));
1357 size += GET_MODE_SIZE (mode);
1359 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1361 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1363 /* Save the state required to perform an untyped call with the same
1364 arguments as were passed to the current function. */
1367 expand_builtin_apply_args_1 (void)
1370 int size, align, regno;
1371 enum machine_mode mode;
1372 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1374 /* Create a block where the arg-pointer, structure value address,
1375 and argument registers can be saved. */
1376 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1378 /* Walk past the arg-pointer and structure value address. */
1379 size = GET_MODE_SIZE (Pmode);
1380 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1381 size += GET_MODE_SIZE (Pmode);
1383 /* Save each register used in calling a function to the block. */
1384 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1385 if ((mode = apply_args_mode[regno]) != VOIDmode)
1387 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1388 if (size % align != 0)
1389 size = CEIL (size, align) * align;
1391 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1393 emit_move_insn (adjust_address (registers, mode, size), tem);
1394 size += GET_MODE_SIZE (mode);
1397 /* Save the arg pointer to the block. */
1398 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1399 #ifdef STACK_GROWS_DOWNWARD
1400 /* We need the pointer as the caller actually passed them to us, not
1401 as we might have pretended they were passed. Make sure it's a valid
1402 operand, as emit_move_insn isn't expected to handle a PLUS. */
1404 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1407 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1409 size = GET_MODE_SIZE (Pmode);
1411 /* Save the structure value address unless this is passed as an
1412 "invisible" first argument. */
1413 if (struct_incoming_value)
1415 emit_move_insn (adjust_address (registers, Pmode, size),
1416 copy_to_reg (struct_incoming_value));
1417 size += GET_MODE_SIZE (Pmode);
1420 /* Return the address of the block. */
1421 return copy_addr_to_reg (XEXP (registers, 0));
1424 /* __builtin_apply_args returns block of memory allocated on
1425 the stack into which is stored the arg pointer, structure
1426 value address, static chain, and all the registers that might
1427 possibly be used in performing a function call. The code is
1428 moved to the start of the function so the incoming values are
1432 expand_builtin_apply_args (void)
1434 /* Don't do __builtin_apply_args more than once in a function.
1435 Save the result of the first call and reuse it. */
1436 if (apply_args_value != 0)
1437 return apply_args_value;
1439 /* When this function is called, it means that registers must be
1440 saved on entry to this function. So we migrate the
1441 call to the first insn of this function. */
1446 temp = expand_builtin_apply_args_1 ();
1450 apply_args_value = temp;
1452 /* Put the insns after the NOTE that starts the function.
1453 If this is inside a start_sequence, make the outer-level insn
1454 chain current, so the code is placed at the start of the
1455 function. If internal_arg_pointer is a non-virtual pseudo,
1456 it needs to be placed after the function that initializes
1458 push_topmost_sequence ();
1459 if (REG_P (crtl->args.internal_arg_pointer)
1460 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1461 emit_insn_before (seq, parm_birth_insn);
1463 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1464 pop_topmost_sequence ();
1469 /* Perform an untyped call and save the state required to perform an
1470 untyped return of whatever value was returned by the given function. */
1473 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1475 int size, align, regno;
1476 enum machine_mode mode;
1477 rtx incoming_args, result, reg, dest, src, call_insn;
1478 rtx old_stack_level = 0;
1479 rtx call_fusage = 0;
1480 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1482 arguments = convert_memory_address (Pmode, arguments);
1484 /* Create a block where the return registers can be saved. */
1485 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1487 /* Fetch the arg pointer from the ARGUMENTS block. */
1488 incoming_args = gen_reg_rtx (Pmode);
1489 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1490 #ifndef STACK_GROWS_DOWNWARD
1491 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1492 incoming_args, 0, OPTAB_LIB_WIDEN);
1495 /* Push a new argument block and copy the arguments. Do not allow
1496 the (potential) memcpy call below to interfere with our stack
1498 do_pending_stack_adjust ();
1501 /* Save the stack with nonlocal if available. */
1502 #ifdef HAVE_save_stack_nonlocal
1503 if (HAVE_save_stack_nonlocal)
1504 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1507 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1509 /* Allocate a block of memory onto the stack and copy the memory
1510 arguments to the outgoing arguments address. */
1511 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1513 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1514 may have already set current_function_calls_alloca to true.
1515 current_function_calls_alloca won't be set if argsize is zero,
1516 so we have to guarantee need_drap is true here. */
1517 if (SUPPORTS_STACK_ALIGNMENT)
1518 crtl->need_drap = true;
1520 dest = virtual_outgoing_args_rtx;
1521 #ifndef STACK_GROWS_DOWNWARD
1522 if (GET_CODE (argsize) == CONST_INT)
1523 dest = plus_constant (dest, -INTVAL (argsize));
1525 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1527 dest = gen_rtx_MEM (BLKmode, dest);
1528 set_mem_align (dest, PARM_BOUNDARY);
1529 src = gen_rtx_MEM (BLKmode, incoming_args);
1530 set_mem_align (src, PARM_BOUNDARY);
1531 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1533 /* Refer to the argument block. */
1535 arguments = gen_rtx_MEM (BLKmode, arguments);
1536 set_mem_align (arguments, PARM_BOUNDARY);
1538 /* Walk past the arg-pointer and structure value address. */
1539 size = GET_MODE_SIZE (Pmode);
1541 size += GET_MODE_SIZE (Pmode);
1543 /* Restore each of the registers previously saved. Make USE insns
1544 for each of these registers for use in making the call. */
1545 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1546 if ((mode = apply_args_mode[regno]) != VOIDmode)
1548 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1549 if (size % align != 0)
1550 size = CEIL (size, align) * align;
1551 reg = gen_rtx_REG (mode, regno);
1552 emit_move_insn (reg, adjust_address (arguments, mode, size));
1553 use_reg (&call_fusage, reg);
1554 size += GET_MODE_SIZE (mode);
1557 /* Restore the structure value address unless this is passed as an
1558 "invisible" first argument. */
1559 size = GET_MODE_SIZE (Pmode);
1562 rtx value = gen_reg_rtx (Pmode);
1563 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1564 emit_move_insn (struct_value, value);
1565 if (REG_P (struct_value))
1566 use_reg (&call_fusage, struct_value);
1567 size += GET_MODE_SIZE (Pmode);
1570 /* All arguments and registers used for the call are set up by now! */
1571 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1573 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1574 and we don't want to load it into a register as an optimization,
1575 because prepare_call_address already did it if it should be done. */
1576 if (GET_CODE (function) != SYMBOL_REF)
1577 function = memory_address (FUNCTION_MODE, function);
1579 /* Generate the actual call instruction and save the return value. */
1580 #ifdef HAVE_untyped_call
1581 if (HAVE_untyped_call)
1582 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1583 result, result_vector (1, result)));
1586 #ifdef HAVE_call_value
1587 if (HAVE_call_value)
1591 /* Locate the unique return register. It is not possible to
1592 express a call that sets more than one return register using
1593 call_value; use untyped_call for that. In fact, untyped_call
1594 only needs to save the return registers in the given block. */
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1600 valreg = gen_rtx_REG (mode, regno);
1603 emit_call_insn (GEN_CALL_VALUE (valreg,
1604 gen_rtx_MEM (FUNCTION_MODE, function),
1605 const0_rtx, NULL_RTX, const0_rtx));
1607 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1613 /* Find the CALL insn we just emitted, and attach the register usage
1615 call_insn = last_call_insn ();
1616 add_function_usage_to (call_insn, call_fusage);
1618 /* Restore the stack. */
1619 #ifdef HAVE_save_stack_nonlocal
1620 if (HAVE_save_stack_nonlocal)
1621 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1624 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1628 /* Return the address of the result block. */
1629 result = copy_addr_to_reg (XEXP (result, 0));
1630 return convert_memory_address (ptr_mode, result);
1633 /* Perform an untyped return. */
1636 expand_builtin_return (rtx result)
1638 int size, align, regno;
1639 enum machine_mode mode;
1641 rtx call_fusage = 0;
1643 result = convert_memory_address (Pmode, result);
1645 apply_result_size ();
1646 result = gen_rtx_MEM (BLKmode, result);
1648 #ifdef HAVE_untyped_return
1649 if (HAVE_untyped_return)
1651 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1657 /* Restore the return value and note that each value is used. */
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_result_mode[regno]) != VOIDmode)
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1665 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1666 emit_move_insn (reg, adjust_address (result, mode, size));
1668 push_to_sequence (call_fusage);
1670 call_fusage = get_insns ();
1672 size += GET_MODE_SIZE (mode);
1675 /* Put the USE insns before the return. */
1676 emit_insn (call_fusage);
1678 /* Return whatever values was restored by jumping directly to the end
1680 expand_naked_return ();
1683 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1685 static enum type_class
1686 type_to_class (tree type)
1688 switch (TREE_CODE (type))
1690 case VOID_TYPE: return void_type_class;
1691 case INTEGER_TYPE: return integer_type_class;
1692 case ENUMERAL_TYPE: return enumeral_type_class;
1693 case BOOLEAN_TYPE: return boolean_type_class;
1694 case POINTER_TYPE: return pointer_type_class;
1695 case REFERENCE_TYPE: return reference_type_class;
1696 case OFFSET_TYPE: return offset_type_class;
1697 case REAL_TYPE: return real_type_class;
1698 case COMPLEX_TYPE: return complex_type_class;
1699 case FUNCTION_TYPE: return function_type_class;
1700 case METHOD_TYPE: return method_type_class;
1701 case RECORD_TYPE: return record_type_class;
1703 case QUAL_UNION_TYPE: return union_type_class;
1704 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1705 ? string_type_class : array_type_class);
1706 case LANG_TYPE: return lang_type_class;
1707 default: return no_type_class;
1711 /* Expand a call EXP to __builtin_classify_type. */
1714 expand_builtin_classify_type (tree exp)
1716 if (call_expr_nargs (exp))
1717 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1718 return GEN_INT (no_type_class);
1721 /* This helper macro, meant to be used in mathfn_built_in below,
1722 determines which among a set of three builtin math functions is
1723 appropriate for a given type mode. The `F' and `L' cases are
1724 automatically generated from the `double' case. */
1725 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1726 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1727 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1728 fcodel = BUILT_IN_MATHFN##L ; break;
1729 /* Similar to above, but appends _R after any F/L suffix. */
1730 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1731 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1732 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1733 fcodel = BUILT_IN_MATHFN##L_R ; break;
1735 /* Return mathematic function equivalent to FN but operating directly
1736 on TYPE, if available. If IMPLICIT is true find the function in
1737 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1738 can't do the conversion, return zero. */
1741 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1743 tree const *const fn_arr
1744 = implicit ? implicit_built_in_decls : built_in_decls;
1745 enum built_in_function fcode, fcodef, fcodel;
1749 CASE_MATHFN (BUILT_IN_ACOS)
1750 CASE_MATHFN (BUILT_IN_ACOSH)
1751 CASE_MATHFN (BUILT_IN_ASIN)
1752 CASE_MATHFN (BUILT_IN_ASINH)
1753 CASE_MATHFN (BUILT_IN_ATAN)
1754 CASE_MATHFN (BUILT_IN_ATAN2)
1755 CASE_MATHFN (BUILT_IN_ATANH)
1756 CASE_MATHFN (BUILT_IN_CBRT)
1757 CASE_MATHFN (BUILT_IN_CEIL)
1758 CASE_MATHFN (BUILT_IN_CEXPI)
1759 CASE_MATHFN (BUILT_IN_COPYSIGN)
1760 CASE_MATHFN (BUILT_IN_COS)
1761 CASE_MATHFN (BUILT_IN_COSH)
1762 CASE_MATHFN (BUILT_IN_DREM)
1763 CASE_MATHFN (BUILT_IN_ERF)
1764 CASE_MATHFN (BUILT_IN_ERFC)
1765 CASE_MATHFN (BUILT_IN_EXP)
1766 CASE_MATHFN (BUILT_IN_EXP10)
1767 CASE_MATHFN (BUILT_IN_EXP2)
1768 CASE_MATHFN (BUILT_IN_EXPM1)
1769 CASE_MATHFN (BUILT_IN_FABS)
1770 CASE_MATHFN (BUILT_IN_FDIM)
1771 CASE_MATHFN (BUILT_IN_FLOOR)
1772 CASE_MATHFN (BUILT_IN_FMA)
1773 CASE_MATHFN (BUILT_IN_FMAX)
1774 CASE_MATHFN (BUILT_IN_FMIN)
1775 CASE_MATHFN (BUILT_IN_FMOD)
1776 CASE_MATHFN (BUILT_IN_FREXP)
1777 CASE_MATHFN (BUILT_IN_GAMMA)
1778 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1779 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1780 CASE_MATHFN (BUILT_IN_HYPOT)
1781 CASE_MATHFN (BUILT_IN_ILOGB)
1782 CASE_MATHFN (BUILT_IN_INF)
1783 CASE_MATHFN (BUILT_IN_ISINF)
1784 CASE_MATHFN (BUILT_IN_J0)
1785 CASE_MATHFN (BUILT_IN_J1)
1786 CASE_MATHFN (BUILT_IN_JN)
1787 CASE_MATHFN (BUILT_IN_LCEIL)
1788 CASE_MATHFN (BUILT_IN_LDEXP)
1789 CASE_MATHFN (BUILT_IN_LFLOOR)
1790 CASE_MATHFN (BUILT_IN_LGAMMA)
1791 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1792 CASE_MATHFN (BUILT_IN_LLCEIL)
1793 CASE_MATHFN (BUILT_IN_LLFLOOR)
1794 CASE_MATHFN (BUILT_IN_LLRINT)
1795 CASE_MATHFN (BUILT_IN_LLROUND)
1796 CASE_MATHFN (BUILT_IN_LOG)
1797 CASE_MATHFN (BUILT_IN_LOG10)
1798 CASE_MATHFN (BUILT_IN_LOG1P)
1799 CASE_MATHFN (BUILT_IN_LOG2)
1800 CASE_MATHFN (BUILT_IN_LOGB)
1801 CASE_MATHFN (BUILT_IN_LRINT)
1802 CASE_MATHFN (BUILT_IN_LROUND)
1803 CASE_MATHFN (BUILT_IN_MODF)
1804 CASE_MATHFN (BUILT_IN_NAN)
1805 CASE_MATHFN (BUILT_IN_NANS)
1806 CASE_MATHFN (BUILT_IN_NEARBYINT)
1807 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1808 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1809 CASE_MATHFN (BUILT_IN_POW)
1810 CASE_MATHFN (BUILT_IN_POWI)
1811 CASE_MATHFN (BUILT_IN_POW10)
1812 CASE_MATHFN (BUILT_IN_REMAINDER)
1813 CASE_MATHFN (BUILT_IN_REMQUO)
1814 CASE_MATHFN (BUILT_IN_RINT)
1815 CASE_MATHFN (BUILT_IN_ROUND)
1816 CASE_MATHFN (BUILT_IN_SCALB)
1817 CASE_MATHFN (BUILT_IN_SCALBLN)
1818 CASE_MATHFN (BUILT_IN_SCALBN)
1819 CASE_MATHFN (BUILT_IN_SIGNBIT)
1820 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1821 CASE_MATHFN (BUILT_IN_SIN)
1822 CASE_MATHFN (BUILT_IN_SINCOS)
1823 CASE_MATHFN (BUILT_IN_SINH)
1824 CASE_MATHFN (BUILT_IN_SQRT)
1825 CASE_MATHFN (BUILT_IN_TAN)
1826 CASE_MATHFN (BUILT_IN_TANH)
1827 CASE_MATHFN (BUILT_IN_TGAMMA)
1828 CASE_MATHFN (BUILT_IN_TRUNC)
1829 CASE_MATHFN (BUILT_IN_Y0)
1830 CASE_MATHFN (BUILT_IN_Y1)
1831 CASE_MATHFN (BUILT_IN_YN)
1837 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1838 return fn_arr[fcode];
1839 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1840 return fn_arr[fcodef];
1841 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1842 return fn_arr[fcodel];
1847 /* Like mathfn_built_in_1(), but always use the implicit array. */
1850 mathfn_built_in (tree type, enum built_in_function fn)
1852 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1855 /* If errno must be maintained, expand the RTL to check if the result,
1856 TARGET, of a built-in function call, EXP, is NaN, and if so set
1860 expand_errno_check (tree exp, rtx target)
1862 rtx lab = gen_label_rtx ();
1864 /* Test the result; if it is NaN, set errno=EDOM because
1865 the argument was not in the domain. */
1866 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1870 /* If this built-in doesn't throw an exception, set errno directly. */
1871 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1873 #ifdef GEN_ERRNO_RTX
1874 rtx errno_rtx = GEN_ERRNO_RTX;
1877 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1879 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1885 /* Make sure the library call isn't expanded as a tail call. */
1886 CALL_EXPR_TAILCALL (exp) = 0;
1888 /* We can't set errno=EDOM directly; let the library call do it.
1889 Pop the arguments right away in case the call gets deleted. */
1891 expand_call (exp, target, 0);
1896 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1897 Return NULL_RTX if a normal call should be emitted rather than expanding
1898 the function in-line. EXP is the expression that is a call to the builtin
1899 function; if convenient, the result should be placed in TARGET.
1900 SUBTARGET may be used as the target for computing one of EXP's operands. */
1903 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1905 optab builtin_optab;
1906 rtx op0, insns, before_call;
1907 tree fndecl = get_callee_fndecl (exp);
1908 enum machine_mode mode;
1909 bool errno_set = false;
1912 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1915 arg = CALL_EXPR_ARG (exp, 0);
1917 switch (DECL_FUNCTION_CODE (fndecl))
1919 CASE_FLT_FN (BUILT_IN_SQRT):
1920 errno_set = ! tree_expr_nonnegative_p (arg);
1921 builtin_optab = sqrt_optab;
1923 CASE_FLT_FN (BUILT_IN_EXP):
1924 errno_set = true; builtin_optab = exp_optab; break;
1925 CASE_FLT_FN (BUILT_IN_EXP10):
1926 CASE_FLT_FN (BUILT_IN_POW10):
1927 errno_set = true; builtin_optab = exp10_optab; break;
1928 CASE_FLT_FN (BUILT_IN_EXP2):
1929 errno_set = true; builtin_optab = exp2_optab; break;
1930 CASE_FLT_FN (BUILT_IN_EXPM1):
1931 errno_set = true; builtin_optab = expm1_optab; break;
1932 CASE_FLT_FN (BUILT_IN_LOGB):
1933 errno_set = true; builtin_optab = logb_optab; break;
1934 CASE_FLT_FN (BUILT_IN_LOG):
1935 errno_set = true; builtin_optab = log_optab; break;
1936 CASE_FLT_FN (BUILT_IN_LOG10):
1937 errno_set = true; builtin_optab = log10_optab; break;
1938 CASE_FLT_FN (BUILT_IN_LOG2):
1939 errno_set = true; builtin_optab = log2_optab; break;
1940 CASE_FLT_FN (BUILT_IN_LOG1P):
1941 errno_set = true; builtin_optab = log1p_optab; break;
1942 CASE_FLT_FN (BUILT_IN_ASIN):
1943 builtin_optab = asin_optab; break;
1944 CASE_FLT_FN (BUILT_IN_ACOS):
1945 builtin_optab = acos_optab; break;
1946 CASE_FLT_FN (BUILT_IN_TAN):
1947 builtin_optab = tan_optab; break;
1948 CASE_FLT_FN (BUILT_IN_ATAN):
1949 builtin_optab = atan_optab; break;
1950 CASE_FLT_FN (BUILT_IN_FLOOR):
1951 builtin_optab = floor_optab; break;
1952 CASE_FLT_FN (BUILT_IN_CEIL):
1953 builtin_optab = ceil_optab; break;
1954 CASE_FLT_FN (BUILT_IN_TRUNC):
1955 builtin_optab = btrunc_optab; break;
1956 CASE_FLT_FN (BUILT_IN_ROUND):
1957 builtin_optab = round_optab; break;
1958 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1959 builtin_optab = nearbyint_optab;
1960 if (flag_trapping_math)
1962 /* Else fallthrough and expand as rint. */
1963 CASE_FLT_FN (BUILT_IN_RINT):
1964 builtin_optab = rint_optab; break;
1969 /* Make a suitable register to place result in. */
1970 mode = TYPE_MODE (TREE_TYPE (exp));
1972 if (! flag_errno_math || ! HONOR_NANS (mode))
1975 /* Before working hard, check whether the instruction is available. */
1976 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1978 target = gen_reg_rtx (mode);
1980 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1981 need to expand the argument again. This way, we will not perform
1982 side-effects more the once. */
1983 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1985 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1989 /* Compute into TARGET.
1990 Set TARGET to wherever the result comes back. */
1991 target = expand_unop (mode, builtin_optab, op0, target, 0);
1996 expand_errno_check (exp, target);
1998 /* Output the entire sequence. */
1999 insns = get_insns ();
2005 /* If we were unable to expand via the builtin, stop the sequence
2006 (without outputting the insns) and call to the library function
2007 with the stabilized argument list. */
2011 before_call = get_last_insn ();
2013 return expand_call (exp, target, target == const0_rtx);
2016 /* Expand a call to the builtin binary math functions (pow and atan2).
2017 Return NULL_RTX if a normal call should be emitted rather than expanding the
2018 function in-line. EXP is the expression that is a call to the builtin
2019 function; if convenient, the result should be placed in TARGET.
2020 SUBTARGET may be used as the target for computing one of EXP's
2024 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2026 optab builtin_optab;
2027 rtx op0, op1, insns;
2028 int op1_type = REAL_TYPE;
2029 tree fndecl = get_callee_fndecl (exp);
2031 enum machine_mode mode;
2032 bool errno_set = true;
2034 switch (DECL_FUNCTION_CODE (fndecl))
2036 CASE_FLT_FN (BUILT_IN_SCALBN):
2037 CASE_FLT_FN (BUILT_IN_SCALBLN):
2038 CASE_FLT_FN (BUILT_IN_LDEXP):
2039 op1_type = INTEGER_TYPE;
2044 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2047 arg0 = CALL_EXPR_ARG (exp, 0);
2048 arg1 = CALL_EXPR_ARG (exp, 1);
2050 switch (DECL_FUNCTION_CODE (fndecl))
2052 CASE_FLT_FN (BUILT_IN_POW):
2053 builtin_optab = pow_optab; break;
2054 CASE_FLT_FN (BUILT_IN_ATAN2):
2055 builtin_optab = atan2_optab; break;
2056 CASE_FLT_FN (BUILT_IN_SCALB):
2057 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2059 builtin_optab = scalb_optab; break;
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2064 /* Fall through... */
2065 CASE_FLT_FN (BUILT_IN_LDEXP):
2066 builtin_optab = ldexp_optab; break;
2067 CASE_FLT_FN (BUILT_IN_FMOD):
2068 builtin_optab = fmod_optab; break;
2069 CASE_FLT_FN (BUILT_IN_REMAINDER):
2070 CASE_FLT_FN (BUILT_IN_DREM):
2071 builtin_optab = remainder_optab; break;
2076 /* Make a suitable register to place result in. */
2077 mode = TYPE_MODE (TREE_TYPE (exp));
2079 /* Before working hard, check whether the instruction is available. */
2080 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2083 target = gen_reg_rtx (mode);
2085 if (! flag_errno_math || ! HONOR_NANS (mode))
2088 /* Always stabilize the argument list. */
2089 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2090 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2092 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2093 op1 = expand_normal (arg1);
2097 /* Compute into TARGET.
2098 Set TARGET to wherever the result comes back. */
2099 target = expand_binop (mode, builtin_optab, op0, op1,
2100 target, 0, OPTAB_DIRECT);
2102 /* If we were unable to expand via the builtin, stop the sequence
2103 (without outputting the insns) and call to the library function
2104 with the stabilized argument list. */
2108 return expand_call (exp, target, target == const0_rtx);
2112 expand_errno_check (exp, target);
2114 /* Output the entire sequence. */
2115 insns = get_insns ();
2122 /* Expand a call to the builtin sin and cos math functions.
2123 Return NULL_RTX if a normal call should be emitted rather than expanding the
2124 function in-line. EXP is the expression that is a call to the builtin
2125 function; if convenient, the result should be placed in TARGET.
2126 SUBTARGET may be used as the target for computing one of EXP's
2130 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2132 optab builtin_optab;
2134 tree fndecl = get_callee_fndecl (exp);
2135 enum machine_mode mode;
2138 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2141 arg = CALL_EXPR_ARG (exp, 0);
2143 switch (DECL_FUNCTION_CODE (fndecl))
2145 CASE_FLT_FN (BUILT_IN_SIN):
2146 CASE_FLT_FN (BUILT_IN_COS):
2147 builtin_optab = sincos_optab; break;
2152 /* Make a suitable register to place result in. */
2153 mode = TYPE_MODE (TREE_TYPE (exp));
2155 /* Check if sincos insn is available, otherwise fallback
2156 to sin or cos insn. */
2157 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2158 switch (DECL_FUNCTION_CODE (fndecl))
2160 CASE_FLT_FN (BUILT_IN_SIN):
2161 builtin_optab = sin_optab; break;
2162 CASE_FLT_FN (BUILT_IN_COS):
2163 builtin_optab = cos_optab; break;
2168 /* Before working hard, check whether the instruction is available. */
2169 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2171 target = gen_reg_rtx (mode);
2173 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2174 need to expand the argument again. This way, we will not perform
2175 side-effects more the once. */
2176 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2178 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2182 /* Compute into TARGET.
2183 Set TARGET to wherever the result comes back. */
2184 if (builtin_optab == sincos_optab)
2188 switch (DECL_FUNCTION_CODE (fndecl))
2190 CASE_FLT_FN (BUILT_IN_SIN):
2191 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2193 CASE_FLT_FN (BUILT_IN_COS):
2194 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2199 gcc_assert (result);
2203 target = expand_unop (mode, builtin_optab, op0, target, 0);
2208 /* Output the entire sequence. */
2209 insns = get_insns ();
2215 /* If we were unable to expand via the builtin, stop the sequence
2216 (without outputting the insns) and call to the library function
2217 with the stabilized argument list. */
2221 target = expand_call (exp, target, target == const0_rtx);
2226 /* Expand a call to one of the builtin math functions that operate on
2227 floating point argument and output an integer result (ilogb, isinf,
2229 Return 0 if a normal call should be emitted rather than expanding the
2230 function in-line. EXP is the expression that is a call to the builtin
2231 function; if convenient, the result should be placed in TARGET.
2232 SUBTARGET may be used as the target for computing one of EXP's operands. */
2235 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2237 optab builtin_optab = 0;
2238 enum insn_code icode = CODE_FOR_nothing;
2240 tree fndecl = get_callee_fndecl (exp);
2241 enum machine_mode mode;
2242 bool errno_set = false;
2245 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2248 arg = CALL_EXPR_ARG (exp, 0);
2250 switch (DECL_FUNCTION_CODE (fndecl))
2252 CASE_FLT_FN (BUILT_IN_ILOGB):
2253 errno_set = true; builtin_optab = ilogb_optab; break;
2254 CASE_FLT_FN (BUILT_IN_ISINF):
2255 builtin_optab = isinf_optab; break;
2256 case BUILT_IN_ISNORMAL:
2257 case BUILT_IN_ISFINITE:
2258 CASE_FLT_FN (BUILT_IN_FINITE):
2259 /* These builtins have no optabs (yet). */
2265 /* There's no easy way to detect the case we need to set EDOM. */
2266 if (flag_errno_math && errno_set)
2269 /* Optab mode depends on the mode of the input argument. */
2270 mode = TYPE_MODE (TREE_TYPE (arg));
2273 icode = optab_handler (builtin_optab, mode)->insn_code;
2275 /* Before working hard, check whether the instruction is available. */
2276 if (icode != CODE_FOR_nothing)
2278 rtx last = get_last_insn ();
2279 tree orig_arg = arg;
2280 /* Make a suitable register to place result in. */
2282 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2283 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2285 gcc_assert (insn_data[icode].operand[0].predicate
2286 (target, GET_MODE (target)));
2288 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2289 need to expand the argument again. This way, we will not perform
2290 side-effects more the once. */
2291 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2293 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2295 if (mode != GET_MODE (op0))
2296 op0 = convert_to_mode (mode, op0, 0);
2298 /* Compute into TARGET.
2299 Set TARGET to wherever the result comes back. */
2300 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2302 delete_insns_since (last);
2303 CALL_EXPR_ARG (exp, 0) = orig_arg;
2306 /* If there is no optab, try generic code. */
2307 switch (DECL_FUNCTION_CODE (fndecl))
2311 CASE_FLT_FN (BUILT_IN_ISINF):
2313 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2314 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2315 tree const type = TREE_TYPE (arg);
2319 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2320 real_from_string (&r, buf);
2321 result = build_call_expr (isgr_fn, 2,
2322 fold_build1 (ABS_EXPR, type, arg),
2323 build_real (type, r));
2324 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2326 CASE_FLT_FN (BUILT_IN_FINITE):
2327 case BUILT_IN_ISFINITE:
2329 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2330 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2331 tree const type = TREE_TYPE (arg);
2335 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2336 real_from_string (&r, buf);
2337 result = build_call_expr (isle_fn, 2,
2338 fold_build1 (ABS_EXPR, type, arg),
2339 build_real (type, r));
2340 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2342 case BUILT_IN_ISNORMAL:
2344 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2345 islessequal(fabs(x),DBL_MAX). */
2346 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2347 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2348 tree const type = TREE_TYPE (arg);
2349 REAL_VALUE_TYPE rmax, rmin;
2352 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2353 real_from_string (&rmax, buf);
2354 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2355 real_from_string (&rmin, buf);
2356 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2357 result = build_call_expr (isle_fn, 2, arg,
2358 build_real (type, rmax));
2359 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2360 build_call_expr (isge_fn, 2, arg,
2361 build_real (type, rmin)));
2362 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2368 target = expand_call (exp, target, target == const0_rtx);
2373 /* Expand a call to the builtin sincos math function.
2374 Return NULL_RTX if a normal call should be emitted rather than expanding the
2375 function in-line. EXP is the expression that is a call to the builtin
2379 expand_builtin_sincos (tree exp)
2381 rtx op0, op1, op2, target1, target2;
2382 enum machine_mode mode;
2383 tree arg, sinp, cosp;
2386 if (!validate_arglist (exp, REAL_TYPE,
2387 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2390 arg = CALL_EXPR_ARG (exp, 0);
2391 sinp = CALL_EXPR_ARG (exp, 1);
2392 cosp = CALL_EXPR_ARG (exp, 2);
2394 /* Make a suitable register to place result in. */
2395 mode = TYPE_MODE (TREE_TYPE (arg));
2397 /* Check if sincos insn is available, otherwise emit the call. */
2398 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2401 target1 = gen_reg_rtx (mode);
2402 target2 = gen_reg_rtx (mode);
2404 op0 = expand_normal (arg);
2405 op1 = expand_normal (build_fold_indirect_ref (sinp));
2406 op2 = expand_normal (build_fold_indirect_ref (cosp));
2408 /* Compute into target1 and target2.
2409 Set TARGET to wherever the result comes back. */
2410 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2411 gcc_assert (result);
2413 /* Move target1 and target2 to the memory locations indicated
2415 emit_move_insn (op1, target1);
2416 emit_move_insn (op2, target2);
2421 /* Expand a call to the internal cexpi builtin to the sincos math function.
2422 EXP is the expression that is a call to the builtin function; if convenient,
2423 the result should be placed in TARGET. SUBTARGET may be used as the target
2424 for computing one of EXP's operands. */
2427 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2429 tree fndecl = get_callee_fndecl (exp);
2431 enum machine_mode mode;
2434 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2437 arg = CALL_EXPR_ARG (exp, 0);
2438 type = TREE_TYPE (arg);
2439 mode = TYPE_MODE (TREE_TYPE (arg));
2441 /* Try expanding via a sincos optab, fall back to emitting a libcall
2442 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2443 is only generated from sincos, cexp or if we have either of them. */
2444 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2446 op1 = gen_reg_rtx (mode);
2447 op2 = gen_reg_rtx (mode);
2449 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2451 /* Compute into op1 and op2. */
2452 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2454 else if (TARGET_HAS_SINCOS)
2456 tree call, fn = NULL_TREE;
2460 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2461 fn = built_in_decls[BUILT_IN_SINCOSF];
2462 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2463 fn = built_in_decls[BUILT_IN_SINCOS];
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2465 fn = built_in_decls[BUILT_IN_SINCOSL];
2469 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2470 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2471 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2472 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2473 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2474 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2476 /* Make sure not to fold the sincos call again. */
2477 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2478 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2479 call, 3, arg, top1, top2));
2483 tree call, fn = NULL_TREE, narg;
2484 tree ctype = build_complex_type (type);
2486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2487 fn = built_in_decls[BUILT_IN_CEXPF];
2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2489 fn = built_in_decls[BUILT_IN_CEXP];
2490 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2491 fn = built_in_decls[BUILT_IN_CEXPL];
2495 /* If we don't have a decl for cexp create one. This is the
2496 friendliest fallback if the user calls __builtin_cexpi
2497 without full target C99 function support. */
2498 if (fn == NULL_TREE)
2501 const char *name = NULL;
2503 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2505 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2507 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2510 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2511 fn = build_fn_decl (name, fntype);
2514 narg = fold_build2 (COMPLEX_EXPR, ctype,
2515 build_real (type, dconst0), arg);
2517 /* Make sure not to fold the cexp call again. */
2518 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2519 return expand_expr (build_call_nary (ctype, call, 1, narg),
2520 target, VOIDmode, EXPAND_NORMAL);
2523 /* Now build the proper return type. */
2524 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2525 make_tree (TREE_TYPE (arg), op2),
2526 make_tree (TREE_TYPE (arg), op1)),
2527 target, VOIDmode, EXPAND_NORMAL);
2530 /* Expand a call to one of the builtin rounding functions gcc defines
2531 as an extension (lfloor and lceil). As these are gcc extensions we
2532 do not need to worry about setting errno to EDOM.
2533 If expanding via optab fails, lower expression to (int)(floor(x)).
2534 EXP is the expression that is a call to the builtin function;
2535 if convenient, the result should be placed in TARGET. */
2538 expand_builtin_int_roundingfn (tree exp, rtx target)
2540 convert_optab builtin_optab;
2541 rtx op0, insns, tmp;
2542 tree fndecl = get_callee_fndecl (exp);
2543 enum built_in_function fallback_fn;
2544 tree fallback_fndecl;
2545 enum machine_mode mode;
2548 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2551 arg = CALL_EXPR_ARG (exp, 0);
2553 switch (DECL_FUNCTION_CODE (fndecl))
2555 CASE_FLT_FN (BUILT_IN_LCEIL):
2556 CASE_FLT_FN (BUILT_IN_LLCEIL):
2557 builtin_optab = lceil_optab;
2558 fallback_fn = BUILT_IN_CEIL;
2561 CASE_FLT_FN (BUILT_IN_LFLOOR):
2562 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2563 builtin_optab = lfloor_optab;
2564 fallback_fn = BUILT_IN_FLOOR;
2571 /* Make a suitable register to place result in. */
2572 mode = TYPE_MODE (TREE_TYPE (exp));
2574 target = gen_reg_rtx (mode);
2576 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2577 need to expand the argument again. This way, we will not perform
2578 side-effects more the once. */
2579 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2581 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2585 /* Compute into TARGET. */
2586 if (expand_sfix_optab (target, op0, builtin_optab))
2588 /* Output the entire sequence. */
2589 insns = get_insns ();
2595 /* If we were unable to expand via the builtin, stop the sequence
2596 (without outputting the insns). */
2599 /* Fall back to floating point rounding optab. */
2600 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2602 /* For non-C99 targets we may end up without a fallback fndecl here
2603 if the user called __builtin_lfloor directly. In this case emit
2604 a call to the floor/ceil variants nevertheless. This should result
2605 in the best user experience for not full C99 targets. */
2606 if (fallback_fndecl == NULL_TREE)
2609 const char *name = NULL;
2611 switch (DECL_FUNCTION_CODE (fndecl))
2613 case BUILT_IN_LCEIL:
2614 case BUILT_IN_LLCEIL:
2617 case BUILT_IN_LCEILF:
2618 case BUILT_IN_LLCEILF:
2621 case BUILT_IN_LCEILL:
2622 case BUILT_IN_LLCEILL:
2625 case BUILT_IN_LFLOOR:
2626 case BUILT_IN_LLFLOOR:
2629 case BUILT_IN_LFLOORF:
2630 case BUILT_IN_LLFLOORF:
2633 case BUILT_IN_LFLOORL:
2634 case BUILT_IN_LLFLOORL:
2641 fntype = build_function_type_list (TREE_TYPE (arg),
2642 TREE_TYPE (arg), NULL_TREE);
2643 fallback_fndecl = build_fn_decl (name, fntype);
2646 exp = build_call_expr (fallback_fndecl, 1, arg);
2648 tmp = expand_normal (exp);
2650 /* Truncate the result of floating point optab to integer
2651 via expand_fix (). */
2652 target = gen_reg_rtx (mode);
2653 expand_fix (target, tmp, 0);
2658 /* Expand a call to one of the builtin math functions doing integer
2660 Return 0 if a normal call should be emitted rather than expanding the
2661 function in-line. EXP is the expression that is a call to the builtin
2662 function; if convenient, the result should be placed in TARGET. */
2665 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2667 convert_optab builtin_optab;
2669 tree fndecl = get_callee_fndecl (exp);
2671 enum machine_mode mode;
2673 /* There's no easy way to detect the case we need to set EDOM. */
2674 if (flag_errno_math)
2677 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2680 arg = CALL_EXPR_ARG (exp, 0);
2682 switch (DECL_FUNCTION_CODE (fndecl))
2684 CASE_FLT_FN (BUILT_IN_LRINT):
2685 CASE_FLT_FN (BUILT_IN_LLRINT):
2686 builtin_optab = lrint_optab; break;
2687 CASE_FLT_FN (BUILT_IN_LROUND):
2688 CASE_FLT_FN (BUILT_IN_LLROUND):
2689 builtin_optab = lround_optab; break;
2694 /* Make a suitable register to place result in. */
2695 mode = TYPE_MODE (TREE_TYPE (exp));
2697 target = gen_reg_rtx (mode);
2699 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2700 need to expand the argument again. This way, we will not perform
2701 side-effects more the once. */
2702 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2704 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2708 if (expand_sfix_optab (target, op0, builtin_optab))
2710 /* Output the entire sequence. */
2711 insns = get_insns ();
2717 /* If we were unable to expand via the builtin, stop the sequence
2718 (without outputting the insns) and call to the library function
2719 with the stabilized argument list. */
2722 target = expand_call (exp, target, target == const0_rtx);
2727 /* To evaluate powi(x,n), the floating point value x raised to the
2728 constant integer exponent n, we use a hybrid algorithm that
2729 combines the "window method" with look-up tables. For an
2730 introduction to exponentiation algorithms and "addition chains",
2731 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2732 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2733 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2734 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2736 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2737 multiplications to inline before calling the system library's pow
2738 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2739 so this default never requires calling pow, powf or powl. */
2741 #ifndef POWI_MAX_MULTS
2742 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2745 /* The size of the "optimal power tree" lookup table. All
2746 exponents less than this value are simply looked up in the
2747 powi_table below. This threshold is also used to size the
2748 cache of pseudo registers that hold intermediate results. */
2749 #define POWI_TABLE_SIZE 256
2751 /* The size, in bits of the window, used in the "window method"
2752 exponentiation algorithm. This is equivalent to a radix of
2753 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2754 #define POWI_WINDOW_SIZE 3
2756 /* The following table is an efficient representation of an
2757 "optimal power tree". For each value, i, the corresponding
2758 value, j, in the table states than an optimal evaluation
2759 sequence for calculating pow(x,i) can be found by evaluating
2760 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2761 100 integers is given in Knuth's "Seminumerical algorithms". */
2763 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2765 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2766 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2767 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2768 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2769 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2770 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2771 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2772 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2773 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2774 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2775 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2776 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2777 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2778 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2779 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2780 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2781 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2782 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2783 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2784 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2785 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2786 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2787 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2788 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2789 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2790 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2791 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2792 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2793 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2794 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2795 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2796 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2800 /* Return the number of multiplications required to calculate
2801 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2802 subroutine of powi_cost. CACHE is an array indicating
2803 which exponents have already been calculated. */
2806 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2808 /* If we've already calculated this exponent, then this evaluation
2809 doesn't require any additional multiplications. */
2814 return powi_lookup_cost (n - powi_table[n], cache)
2815 + powi_lookup_cost (powi_table[n], cache) + 1;
2818 /* Return the number of multiplications required to calculate
2819 powi(x,n) for an arbitrary x, given the exponent N. This
2820 function needs to be kept in sync with expand_powi below. */
2823 powi_cost (HOST_WIDE_INT n)
2825 bool cache[POWI_TABLE_SIZE];
2826 unsigned HOST_WIDE_INT digit;
2827 unsigned HOST_WIDE_INT val;
2833 /* Ignore the reciprocal when calculating the cost. */
2834 val = (n < 0) ? -n : n;
2836 /* Initialize the exponent cache. */
2837 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2842 while (val >= POWI_TABLE_SIZE)
2846 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2847 result += powi_lookup_cost (digit, cache)
2848 + POWI_WINDOW_SIZE + 1;
2849 val >>= POWI_WINDOW_SIZE;
2858 return result + powi_lookup_cost (val, cache);
2861 /* Recursive subroutine of expand_powi. This function takes the array,
2862 CACHE, of already calculated exponents and an exponent N and returns
2863 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2866 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2868 unsigned HOST_WIDE_INT digit;
2872 if (n < POWI_TABLE_SIZE)
2877 target = gen_reg_rtx (mode);
2880 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2881 op1 = expand_powi_1 (mode, powi_table[n], cache);
2885 target = gen_reg_rtx (mode);
2886 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2887 op0 = expand_powi_1 (mode, n - digit, cache);
2888 op1 = expand_powi_1 (mode, digit, cache);
2892 target = gen_reg_rtx (mode);
2893 op0 = expand_powi_1 (mode, n >> 1, cache);
2897 result = expand_mult (mode, op0, op1, target, 0);
2898 if (result != target)
2899 emit_move_insn (target, result);
2903 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2904 floating point operand in mode MODE, and N is the exponent. This
2905 function needs to be kept in sync with powi_cost above. */
2908 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2910 unsigned HOST_WIDE_INT val;
2911 rtx cache[POWI_TABLE_SIZE];
2915 return CONST1_RTX (mode);
2917 val = (n < 0) ? -n : n;
2919 memset (cache, 0, sizeof (cache));
2922 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2924 /* If the original exponent was negative, reciprocate the result. */
2926 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2927 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2932 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2933 a normal call should be emitted rather than expanding the function
2934 in-line. EXP is the expression that is a call to the builtin
2935 function; if convenient, the result should be placed in TARGET. */
2938 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2942 tree type = TREE_TYPE (exp);
2943 REAL_VALUE_TYPE cint, c, c2;
2946 enum machine_mode mode = TYPE_MODE (type);
2948 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2951 arg0 = CALL_EXPR_ARG (exp, 0);
2952 arg1 = CALL_EXPR_ARG (exp, 1);
2954 if (TREE_CODE (arg1) != REAL_CST
2955 || TREE_OVERFLOW (arg1))
2956 return expand_builtin_mathfn_2 (exp, target, subtarget);
2958 /* Handle constant exponents. */
2960 /* For integer valued exponents we can expand to an optimal multiplication
2961 sequence using expand_powi. */
2962 c = TREE_REAL_CST (arg1);
2963 n = real_to_integer (&c);
2964 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2965 if (real_identical (&c, &cint)
2966 && ((n >= -1 && n <= 2)
2967 || (flag_unsafe_math_optimizations
2968 && optimize_insn_for_speed_p ()
2969 && powi_cost (n) <= POWI_MAX_MULTS)))
2971 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2974 op = force_reg (mode, op);
2975 op = expand_powi (op, mode, n);
2980 narg0 = builtin_save_expr (arg0);
2982 /* If the exponent is not integer valued, check if it is half of an integer.
2983 In this case we can expand to sqrt (x) * x**(n/2). */
2984 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2985 if (fn != NULL_TREE)
2987 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2988 n = real_to_integer (&c2);
2989 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2990 if (real_identical (&c2, &cint)
2991 && ((flag_unsafe_math_optimizations
2992 && optimize_insn_for_speed_p ()
2993 && powi_cost (n/2) <= POWI_MAX_MULTS)
2994 /* Even the c==0.5 case cannot be done unconditionally
2995 when we need to preserve signed zeros, as
2996 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
2997 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)))
2999 tree call_expr = build_call_expr (fn, 1, narg0);
3000 /* Use expand_expr in case the newly built call expression
3001 was folded to a non-call. */
3002 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3005 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3006 op2 = force_reg (mode, op2);
3007 op2 = expand_powi (op2, mode, abs (n / 2));
3008 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3009 0, OPTAB_LIB_WIDEN);
3010 /* If the original exponent was negative, reciprocate the
3013 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3014 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3020 /* Try if the exponent is a third of an integer. In this case
3021 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3022 different from pow (x, 1./3.) due to rounding and behavior
3023 with negative x we need to constrain this transformation to
3024 unsafe math and positive x or finite math. */
3025 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3027 && flag_unsafe_math_optimizations
3028 && (tree_expr_nonnegative_p (arg0)
3029 || !HONOR_NANS (mode)))
3031 REAL_VALUE_TYPE dconst3;
3032 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3033 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3034 real_round (&c2, mode, &c2);
3035 n = real_to_integer (&c2);
3036 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3037 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3038 real_convert (&c2, mode, &c2);
3039 if (real_identical (&c2, &c)
3040 && ((optimize_insn_for_speed_p ()
3041 && powi_cost (n/3) <= POWI_MAX_MULTS)
3044 tree call_expr = build_call_expr (fn, 1,narg0);
3045 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3046 if (abs (n) % 3 == 2)
3047 op = expand_simple_binop (mode, MULT, op, op, op,
3048 0, OPTAB_LIB_WIDEN);
3051 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3052 op2 = force_reg (mode, op2);
3053 op2 = expand_powi (op2, mode, abs (n / 3));
3054 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3055 0, OPTAB_LIB_WIDEN);
3056 /* If the original exponent was negative, reciprocate the
3059 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3060 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3066 /* Fall back to optab expansion. */
3067 return expand_builtin_mathfn_2 (exp, target, subtarget);
3070 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3071 a normal call should be emitted rather than expanding the function
3072 in-line. EXP is the expression that is a call to the builtin
3073 function; if convenient, the result should be placed in TARGET. */
3076 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3080 enum machine_mode mode;
3081 enum machine_mode mode2;
3083 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3086 arg0 = CALL_EXPR_ARG (exp, 0);
3087 arg1 = CALL_EXPR_ARG (exp, 1);
3088 mode = TYPE_MODE (TREE_TYPE (exp));
3090 /* Handle constant power. */
3092 if (TREE_CODE (arg1) == INTEGER_CST
3093 && !TREE_OVERFLOW (arg1))
3095 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3097 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3098 Otherwise, check the number of multiplications required. */
3099 if ((TREE_INT_CST_HIGH (arg1) == 0
3100 || TREE_INT_CST_HIGH (arg1) == -1)
3101 && ((n >= -1 && n <= 2)
3102 || (optimize_insn_for_speed_p ()
3103 && powi_cost (n) <= POWI_MAX_MULTS)))
3105 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3106 op0 = force_reg (mode, op0);
3107 return expand_powi (op0, mode, n);
3111 /* Emit a libcall to libgcc. */
3113 /* Mode of the 2nd argument must match that of an int. */
3114 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3116 if (target == NULL_RTX)
3117 target = gen_reg_rtx (mode);
3119 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3120 if (GET_MODE (op0) != mode)
3121 op0 = convert_to_mode (mode, op0, 0);
3122 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3123 if (GET_MODE (op1) != mode2)
3124 op1 = convert_to_mode (mode2, op1, 0);
3126 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3127 target, LCT_CONST, mode, 2,
3128 op0, mode, op1, mode2);
3133 /* Expand expression EXP which is a call to the strlen builtin. Return
3134 NULL_RTX if we failed the caller should emit a normal call, otherwise
3135 try to get the result in TARGET, if convenient. */
3138 expand_builtin_strlen (tree exp, rtx target,
3139 enum machine_mode target_mode)
3141 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3147 tree src = CALL_EXPR_ARG (exp, 0);
3148 rtx result, src_reg, char_rtx, before_strlen;
3149 enum machine_mode insn_mode = target_mode, char_mode;
3150 enum insn_code icode = CODE_FOR_nothing;
3153 /* If the length can be computed at compile-time, return it. */
3154 len = c_strlen (src, 0);
3156 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3158 /* If the length can be computed at compile-time and is constant
3159 integer, but there are side-effects in src, evaluate
3160 src for side-effects, then return len.
3161 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3162 can be optimized into: i++; x = 3; */
3163 len = c_strlen (src, 1);
3164 if (len && TREE_CODE (len) == INTEGER_CST)
3166 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3167 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3170 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3172 /* If SRC is not a pointer type, don't do this operation inline. */
3176 /* Bail out if we can't compute strlen in the right mode. */
3177 while (insn_mode != VOIDmode)
3179 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3180 if (icode != CODE_FOR_nothing)
3183 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3185 if (insn_mode == VOIDmode)
3188 /* Make a place to write the result of the instruction. */
3192 && GET_MODE (result) == insn_mode
3193 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3194 result = gen_reg_rtx (insn_mode);
3196 /* Make a place to hold the source address. We will not expand
3197 the actual source until we are sure that the expansion will
3198 not fail -- there are trees that cannot be expanded twice. */
3199 src_reg = gen_reg_rtx (Pmode);
3201 /* Mark the beginning of the strlen sequence so we can emit the
3202 source operand later. */
3203 before_strlen = get_last_insn ();
3205 char_rtx = const0_rtx;
3206 char_mode = insn_data[(int) icode].operand[2].mode;
3207 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3209 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3211 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3212 char_rtx, GEN_INT (align));
3217 /* Now that we are assured of success, expand the source. */
3219 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3221 emit_move_insn (src_reg, pat);
3226 emit_insn_after (pat, before_strlen);
3228 emit_insn_before (pat, get_insns ());
3230 /* Return the value in the proper mode for this function. */
3231 if (GET_MODE (result) == target_mode)
3233 else if (target != 0)
3234 convert_move (target, result, 0);
3236 target = convert_to_mode (target_mode, result, 0);
3242 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3243 caller should emit a normal call, otherwise try to get the result
3244 in TARGET, if convenient (and in mode MODE if that's convenient). */
3247 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3249 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3251 tree type = TREE_TYPE (exp);
3252 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3253 CALL_EXPR_ARG (exp, 1), type);
3255 return expand_expr (result, target, mode, EXPAND_NORMAL);
3260 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3261 caller should emit a normal call, otherwise try to get the result
3262 in TARGET, if convenient (and in mode MODE if that's convenient). */
3265 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3267 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3269 tree type = TREE_TYPE (exp);
3270 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3271 CALL_EXPR_ARG (exp, 1), type);
3273 return expand_expr (result, target, mode, EXPAND_NORMAL);
3275 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3280 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3281 caller should emit a normal call, otherwise try to get the result
3282 in TARGET, if convenient (and in mode MODE if that's convenient). */
3285 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3287 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3289 tree type = TREE_TYPE (exp);
3290 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3291 CALL_EXPR_ARG (exp, 1), type);
3293 return expand_expr (result, target, mode, EXPAND_NORMAL);
3298 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3299 caller should emit a normal call, otherwise try to get the result
3300 in TARGET, if convenient (and in mode MODE if that's convenient). */
3303 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3305 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3307 tree type = TREE_TYPE (exp);
3308 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3309 CALL_EXPR_ARG (exp, 1), type);
3311 return expand_expr (result, target, mode, EXPAND_NORMAL);
3316 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3317 bytes from constant string DATA + OFFSET and return it as target
3321 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3322 enum machine_mode mode)
3324 const char *str = (const char *) data;
3326 gcc_assert (offset >= 0
3327 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3328 <= strlen (str) + 1));
3330 return c_readstr (str + offset, mode);
3333 /* Expand a call EXP to the memcpy builtin.
3334 Return NULL_RTX if we failed, the caller should emit a normal call,
3335 otherwise try to get the result in TARGET, if convenient (and in
3336 mode MODE if that's convenient). */
3339 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3341 tree fndecl = get_callee_fndecl (exp);
3343 if (!validate_arglist (exp,
3344 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3348 tree dest = CALL_EXPR_ARG (exp, 0);
3349 tree src = CALL_EXPR_ARG (exp, 1);
3350 tree len = CALL_EXPR_ARG (exp, 2);
3351 const char *src_str;
3352 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3353 unsigned int dest_align
3354 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3355 rtx dest_mem, src_mem, dest_addr, len_rtx;
3356 tree result = fold_builtin_memory_op (dest, src, len,
3357 TREE_TYPE (TREE_TYPE (fndecl)),
3359 HOST_WIDE_INT expected_size = -1;
3360 unsigned int expected_align = 0;
3361 tree_ann_common_t ann;
3365 while (TREE_CODE (result) == COMPOUND_EXPR)
3367 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3369 result = TREE_OPERAND (result, 1);
3371 return expand_expr (result, target, mode, EXPAND_NORMAL);
3374 /* If DEST is not a pointer type, call the normal function. */
3375 if (dest_align == 0)
3378 /* If either SRC is not a pointer type, don't do this
3379 operation in-line. */
3383 ann = tree_common_ann (exp);
3385 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3387 if (expected_align < dest_align)
3388 expected_align = dest_align;
3389 dest_mem = get_memory_rtx (dest, len);
3390 set_mem_align (dest_mem, dest_align);
3391 len_rtx = expand_normal (len);
3392 src_str = c_getstr (src);
3394 /* If SRC is a string constant and block move would be done
3395 by pieces, we can avoid loading the string from memory
3396 and only stored the computed constants. */
3398 && GET_CODE (len_rtx) == CONST_INT
3399 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3400 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3401 CONST_CAST (char *, src_str),
3404 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3405 builtin_memcpy_read_str,
3406 CONST_CAST (char *, src_str),
3407 dest_align, false, 0);
3408 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3409 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3413 src_mem = get_memory_rtx (src, len);
3414 set_mem_align (src_mem, src_align);
3416 /* Copy word part most expediently. */
3417 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3418 CALL_EXPR_TAILCALL (exp)
3419 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3420 expected_align, expected_size);
3424 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3425 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3431 /* Expand a call EXP to the mempcpy builtin.
3432 Return NULL_RTX if we failed; the caller should emit a normal call,
3433 otherwise try to get the result in TARGET, if convenient (and in
3434 mode MODE if that's convenient). If ENDP is 0 return the
3435 destination pointer, if ENDP is 1 return the end pointer ala
3436 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3440 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3442 if (!validate_arglist (exp,
3443 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3447 tree dest = CALL_EXPR_ARG (exp, 0);
3448 tree src = CALL_EXPR_ARG (exp, 1);
3449 tree len = CALL_EXPR_ARG (exp, 2);
3450 return expand_builtin_mempcpy_args (dest, src, len,
3452 target, mode, /*endp=*/ 1);
3456 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3457 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3458 so that this can also be called without constructing an actual CALL_EXPR.
3459 TYPE is the return type of the call. The other arguments and return value
3460 are the same as for expand_builtin_mempcpy. */
3463 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3464 rtx target, enum machine_mode mode, int endp)
3466 /* If return value is ignored, transform mempcpy into memcpy. */
3467 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3469 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3470 tree result = build_call_expr (fn, 3, dest, src, len);
3472 while (TREE_CODE (result) == COMPOUND_EXPR)
3474 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3476 result = TREE_OPERAND (result, 1);
3478 return expand_expr (result, target, mode, EXPAND_NORMAL);
3482 const char *src_str;
3483 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3484 unsigned int dest_align
3485 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3486 rtx dest_mem, src_mem, len_rtx;
3487 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3491 while (TREE_CODE (result) == COMPOUND_EXPR)
3493 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3495 result = TREE_OPERAND (result, 1);
3497 return expand_expr (result, target, mode, EXPAND_NORMAL);
3500 /* If either SRC or DEST is not a pointer type, don't do this
3501 operation in-line. */
3502 if (dest_align == 0 || src_align == 0)
3505 /* If LEN is not constant, call the normal function. */
3506 if (! host_integerp (len, 1))
3509 len_rtx = expand_normal (len);
3510 src_str = c_getstr (src);
3512 /* If SRC is a string constant and block move would be done
3513 by pieces, we can avoid loading the string from memory
3514 and only stored the computed constants. */
3516 && GET_CODE (len_rtx) == CONST_INT
3517 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3518 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3519 CONST_CAST (char *, src_str),
3522 dest_mem = get_memory_rtx (dest, len);
3523 set_mem_align (dest_mem, dest_align);
3524 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3525 builtin_memcpy_read_str,
3526 CONST_CAST (char *, src_str),
3527 dest_align, false, endp);
3528 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3529 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3533 if (GET_CODE (len_rtx) == CONST_INT
3534 && can_move_by_pieces (INTVAL (len_rtx),
3535 MIN (dest_align, src_align)))
3537 dest_mem = get_memory_rtx (dest, len);
3538 set_mem_align (dest_mem, dest_align);
3539 src_mem = get_memory_rtx (src, len);
3540 set_mem_align (src_mem, src_align);
3541 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3542 MIN (dest_align, src_align), endp);
3543 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3544 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3552 /* Expand expression EXP, which is a call to the memmove builtin. Return
3553 NULL_RTX if we failed; the caller should emit a normal call. */
3556 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3558 if (!validate_arglist (exp,
3559 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3563 tree dest = CALL_EXPR_ARG (exp, 0);
3564 tree src = CALL_EXPR_ARG (exp, 1);
3565 tree len = CALL_EXPR_ARG (exp, 2);
3566 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3567 target, mode, ignore);
3571 /* Helper function to do the actual work for expand_builtin_memmove. The
3572 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3573 so that this can also be called without constructing an actual CALL_EXPR.
3574 TYPE is the return type of the call. The other arguments and return value
3575 are the same as for expand_builtin_memmove. */
3578 expand_builtin_memmove_args (tree dest, tree src, tree len,
3579 tree type, rtx target, enum machine_mode mode,
3582 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3586 STRIP_TYPE_NOPS (result);
3587 while (TREE_CODE (result) == COMPOUND_EXPR)
3589 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3591 result = TREE_OPERAND (result, 1);
3593 return expand_expr (result, target, mode, EXPAND_NORMAL);
3596 /* Otherwise, call the normal function. */
3600 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3601 NULL_RTX if we failed the caller should emit a normal call. */
3604 expand_builtin_bcopy (tree exp, int ignore)
3606 tree type = TREE_TYPE (exp);
3607 tree src, dest, size;
3609 if (!validate_arglist (exp,
3610 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3613 src = CALL_EXPR_ARG (exp, 0);
3614 dest = CALL_EXPR_ARG (exp, 1);
3615 size = CALL_EXPR_ARG (exp, 2);
3617 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3618 This is done this way so that if it isn't expanded inline, we fall
3619 back to calling bcopy instead of memmove. */
3620 return expand_builtin_memmove_args (dest, src,
3621 fold_convert (sizetype, size),
3622 type, const0_rtx, VOIDmode,
3627 # define HAVE_movstr 0
3628 # define CODE_FOR_movstr CODE_FOR_nothing
3631 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3632 we failed, the caller should emit a normal call, otherwise try to
3633 get the result in TARGET, if convenient. If ENDP is 0 return the
3634 destination pointer, if ENDP is 1 return the end pointer ala
3635 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3639 expand_movstr (tree dest, tree src, rtx target, int endp)
3645 const struct insn_data * data;
3650 dest_mem = get_memory_rtx (dest, NULL);
3651 src_mem = get_memory_rtx (src, NULL);
3654 target = force_reg (Pmode, XEXP (dest_mem, 0));
3655 dest_mem = replace_equiv_address (dest_mem, target);
3656 end = gen_reg_rtx (Pmode);
3660 if (target == 0 || target == const0_rtx)
3662 end = gen_reg_rtx (Pmode);
3670 data = insn_data + CODE_FOR_movstr;
3672 if (data->operand[0].mode != VOIDmode)
3673 end = gen_lowpart (data->operand[0].mode, end);
3675 insn = data->genfun (end, dest_mem, src_mem);
3681 /* movstr is supposed to set end to the address of the NUL
3682 terminator. If the caller requested a mempcpy-like return value,
3684 if (endp == 1 && target != const0_rtx)
3686 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3687 emit_move_insn (target, force_operand (tem, NULL_RTX));
3693 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3694 NULL_RTX if we failed the caller should emit a normal call, otherwise
3695 try to get the result in TARGET, if convenient (and in mode MODE if that's
3699 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3701 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3703 tree dest = CALL_EXPR_ARG (exp, 0);
3704 tree src = CALL_EXPR_ARG (exp, 1);
3705 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3710 /* Helper function to do the actual work for expand_builtin_strcpy. The
3711 arguments to the builtin_strcpy call DEST and SRC are broken out
3712 so that this can also be called without constructing an actual CALL_EXPR.
3713 The other arguments and return value are the same as for
3714 expand_builtin_strcpy. */
3717 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3718 rtx target, enum machine_mode mode)
3720 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3723 while (TREE_CODE (result) == COMPOUND_EXPR)
3725 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3727 result = TREE_OPERAND (result, 1);
3729 return expand_expr (result, target, mode, EXPAND_NORMAL);
3731 return expand_movstr (dest, src, target, /*endp=*/0);
3735 /* Expand a call EXP to the stpcpy builtin.
3736 Return NULL_RTX if we failed the caller should emit a normal call,
3737 otherwise try to get the result in TARGET, if convenient (and in
3738 mode MODE if that's convenient). */
3741 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3748 dst = CALL_EXPR_ARG (exp, 0);
3749 src = CALL_EXPR_ARG (exp, 1);
3751 /* If return value is ignored, transform stpcpy into strcpy. */
3752 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3754 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3755 tree result = build_call_expr (fn, 2, dst, src);
3757 STRIP_NOPS (result);
3758 while (TREE_CODE (result) == COMPOUND_EXPR)
3760 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3762 result = TREE_OPERAND (result, 1);
3764 return expand_expr (result, target, mode, EXPAND_NORMAL);
3771 /* Ensure we get an actual string whose length can be evaluated at
3772 compile-time, not an expression containing a string. This is
3773 because the latter will potentially produce pessimized code
3774 when used to produce the return value. */
3775 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3776 return expand_movstr (dst, src, target, /*endp=*/2);
3778 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3779 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3780 target, mode, /*endp=*/2);
3785 if (TREE_CODE (len) == INTEGER_CST)
3787 rtx len_rtx = expand_normal (len);
3789 if (GET_CODE (len_rtx) == CONST_INT)
3791 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3792 dst, src, target, mode);
3798 if (mode != VOIDmode)
3799 target = gen_reg_rtx (mode);
3801 target = gen_reg_rtx (GET_MODE (ret));
3803 if (GET_MODE (target) != GET_MODE (ret))
3804 ret = gen_lowpart (GET_MODE (target), ret);
3806 ret = plus_constant (ret, INTVAL (len_rtx));
3807 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3815 return expand_movstr (dst, src, target, /*endp=*/2);
3819 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3820 bytes from constant string DATA + OFFSET and return it as target
3824 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3825 enum machine_mode mode)
3827 const char *str = (const char *) data;
3829 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3832 return c_readstr (str + offset, mode);
3835 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3836 NULL_RTX if we failed the caller should emit a normal call. */
3839 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3841 tree fndecl = get_callee_fndecl (exp);
3843 if (validate_arglist (exp,
3844 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3846 tree dest = CALL_EXPR_ARG (exp, 0);
3847 tree src = CALL_EXPR_ARG (exp, 1);
3848 tree len = CALL_EXPR_ARG (exp, 2);
3849 tree slen = c_strlen (src, 1);
3850 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3854 while (TREE_CODE (result) == COMPOUND_EXPR)
3856 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3858 result = TREE_OPERAND (result, 1);
3860 return expand_expr (result, target, mode, EXPAND_NORMAL);
3863 /* We must be passed a constant len and src parameter. */
3864 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3867 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3869 /* We're required to pad with trailing zeros if the requested
3870 len is greater than strlen(s2)+1. In that case try to
3871 use store_by_pieces, if it fails, punt. */
3872 if (tree_int_cst_lt (slen, len))
3874 unsigned int dest_align
3875 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3876 const char *p = c_getstr (src);
3879 if (!p || dest_align == 0 || !host_integerp (len, 1)
3880 || !can_store_by_pieces (tree_low_cst (len, 1),
3881 builtin_strncpy_read_str,
3882 CONST_CAST (char *, p),
3886 dest_mem = get_memory_rtx (dest, len);
3887 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3888 builtin_strncpy_read_str,
3889 CONST_CAST (char *, p), dest_align, false, 0);
3890 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3891 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3898 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3899 bytes from constant string DATA + OFFSET and return it as target
3903 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3904 enum machine_mode mode)
3906 const char *c = (const char *) data;
3907 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3909 memset (p, *c, GET_MODE_SIZE (mode));
3911 return c_readstr (p, mode);
3914 /* Callback routine for store_by_pieces. Return the RTL of a register
3915 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3916 char value given in the RTL register data. For example, if mode is
3917 4 bytes wide, return the RTL for 0x01010101*data. */
3920 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3921 enum machine_mode mode)
3927 size = GET_MODE_SIZE (mode);
3931 p = XALLOCAVEC (char, size);
3932 memset (p, 1, size);
3933 coeff = c_readstr (p, mode);
3935 target = convert_to_mode (mode, (rtx) data, 1);
3936 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3937 return force_reg (mode, target);
3940 /* Expand expression EXP, which is a call to the memset builtin. Return
3941 NULL_RTX if we failed the caller should emit a normal call, otherwise
3942 try to get the result in TARGET, if convenient (and in mode MODE if that's
3946 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3948 if (!validate_arglist (exp,
3949 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3953 tree dest = CALL_EXPR_ARG (exp, 0);
3954 tree val = CALL_EXPR_ARG (exp, 1);
3955 tree len = CALL_EXPR_ARG (exp, 2);
3956 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3960 /* Helper function to do the actual work for expand_builtin_memset. The
3961 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3962 so that this can also be called without constructing an actual CALL_EXPR.
3963 The other arguments and return value are the same as for
3964 expand_builtin_memset. */
3967 expand_builtin_memset_args (tree dest, tree val, tree len,
3968 rtx target, enum machine_mode mode, tree orig_exp)
3971 enum built_in_function fcode;
3973 unsigned int dest_align;
3974 rtx dest_mem, dest_addr, len_rtx;
3975 HOST_WIDE_INT expected_size = -1;
3976 unsigned int expected_align = 0;
3977 tree_ann_common_t ann;
3979 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3981 /* If DEST is not a pointer type, don't do this operation in-line. */
3982 if (dest_align == 0)
3985 ann = tree_common_ann (orig_exp);
3987 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3989 if (expected_align < dest_align)
3990 expected_align = dest_align;
3992 /* If the LEN parameter is zero, return DEST. */
3993 if (integer_zerop (len))
3995 /* Evaluate and ignore VAL in case it has side-effects. */
3996 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3997 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4000 /* Stabilize the arguments in case we fail. */
4001 dest = builtin_save_expr (dest);
4002 val = builtin_save_expr (val);
4003 len = builtin_save_expr (len);
4005 len_rtx = expand_normal (len);
4006 dest_mem = get_memory_rtx (dest, len);
4008 if (TREE_CODE (val) != INTEGER_CST)
4012 val_rtx = expand_normal (val);
4013 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4016 /* Assume that we can memset by pieces if we can store
4017 * the coefficients by pieces (in the required modes).
4018 * We can't pass builtin_memset_gen_str as that emits RTL. */
4020 if (host_integerp (len, 1)
4021 && can_store_by_pieces (tree_low_cst (len, 1),
4022 builtin_memset_read_str, &c, dest_align,
4025 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4027 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4028 builtin_memset_gen_str, val_rtx, dest_align,
4031 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4032 dest_align, expected_align,
4036 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4037 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4041 if (target_char_cast (val, &c))
4046 if (host_integerp (len, 1)
4047 && can_store_by_pieces (tree_low_cst (len, 1),
4048 builtin_memset_read_str, &c, dest_align,
4050 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4051 builtin_memset_read_str, &c, dest_align, true, 0);
4052 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4053 dest_align, expected_align,
4057 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4058 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4062 set_mem_align (dest_mem, dest_align);
4063 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4064 CALL_EXPR_TAILCALL (orig_exp)
4065 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4066 expected_align, expected_size);
4070 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4071 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4077 fndecl = get_callee_fndecl (orig_exp);
4078 fcode = DECL_FUNCTION_CODE (fndecl);
4079 if (fcode == BUILT_IN_MEMSET)
4080 fn = build_call_expr (fndecl, 3, dest, val, len);
4081 else if (fcode == BUILT_IN_BZERO)
4082 fn = build_call_expr (fndecl, 2, dest, len);
4085 if (TREE_CODE (fn) == CALL_EXPR)
4086 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4087 return expand_call (fn, target, target == const0_rtx);
4090 /* Expand expression EXP, which is a call to the bzero builtin. Return
4091 NULL_RTX if we failed the caller should emit a normal call. */
4094 expand_builtin_bzero (tree exp)
4098 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4101 dest = CALL_EXPR_ARG (exp, 0);
4102 size = CALL_EXPR_ARG (exp, 1);
4104 /* New argument list transforming bzero(ptr x, int y) to
4105 memset(ptr x, int 0, size_t y). This is done this way
4106 so that if it isn't expanded inline, we fallback to
4107 calling bzero instead of memset. */
4109 return expand_builtin_memset_args (dest, integer_zero_node,
4110 fold_convert (sizetype, size),
4111 const0_rtx, VOIDmode, exp);
4114 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4115 caller should emit a normal call, otherwise try to get the result
4116 in TARGET, if convenient (and in mode MODE if that's convenient). */
4119 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4121 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4122 INTEGER_TYPE, VOID_TYPE))
4124 tree type = TREE_TYPE (exp);
4125 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4126 CALL_EXPR_ARG (exp, 1),
4127 CALL_EXPR_ARG (exp, 2), type);
4129 return expand_expr (result, target, mode, EXPAND_NORMAL);
4134 /* Expand expression EXP, which is a call to the memcmp built-in function.
4135 Return NULL_RTX if we failed and the
4136 caller should emit a normal call, otherwise try to get the result in
4137 TARGET, if convenient (and in mode MODE, if that's convenient). */
4140 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4142 if (!validate_arglist (exp,
4143 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4147 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4148 CALL_EXPR_ARG (exp, 1),
4149 CALL_EXPR_ARG (exp, 2));
4151 return expand_expr (result, target, mode, EXPAND_NORMAL);
4154 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4156 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4159 tree arg1 = CALL_EXPR_ARG (exp, 0);
4160 tree arg2 = CALL_EXPR_ARG (exp, 1);
4161 tree len = CALL_EXPR_ARG (exp, 2);
4164 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4166 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4167 enum machine_mode insn_mode;
4169 #ifdef HAVE_cmpmemsi
4171 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4174 #ifdef HAVE_cmpstrnsi
4176 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4181 /* If we don't have POINTER_TYPE, call the function. */
4182 if (arg1_align == 0 || arg2_align == 0)
4185 /* Make a place to write the result of the instruction. */
4188 && REG_P (result) && GET_MODE (result) == insn_mode
4189 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4190 result = gen_reg_rtx (insn_mode);
4192 arg1_rtx = get_memory_rtx (arg1, len);
4193 arg2_rtx = get_memory_rtx (arg2, len);
4194 arg3_rtx = expand_normal (fold_convert (sizetype, len));
4196 /* Set MEM_SIZE as appropriate. */
4197 if (GET_CODE (arg3_rtx) == CONST_INT)
4199 set_mem_size (arg1_rtx, arg3_rtx);
4200 set_mem_size (arg2_rtx, arg3_rtx);
4203 #ifdef HAVE_cmpmemsi
4205 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4206 GEN_INT (MIN (arg1_align, arg2_align)));
4209 #ifdef HAVE_cmpstrnsi
4211 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4212 GEN_INT (MIN (arg1_align, arg2_align)));
4220 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4221 TYPE_MODE (integer_type_node), 3,
4222 XEXP (arg1_rtx, 0), Pmode,
4223 XEXP (arg2_rtx, 0), Pmode,
4224 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4225 TYPE_UNSIGNED (sizetype)),
4226 TYPE_MODE (sizetype));
4228 /* Return the value in the proper mode for this function. */
4229 mode = TYPE_MODE (TREE_TYPE (exp));
4230 if (GET_MODE (result) == mode)
4232 else if (target != 0)
4234 convert_move (target, result, 0);
4238 return convert_to_mode (mode, result, 0);
4245 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4246 if we failed the caller should emit a normal call, otherwise try to get
4247 the result in TARGET, if convenient. */
4250 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4252 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4256 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4257 CALL_EXPR_ARG (exp, 1));
4259 return expand_expr (result, target, mode, EXPAND_NORMAL);
4262 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4263 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4264 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4266 rtx arg1_rtx, arg2_rtx;
4267 rtx result, insn = NULL_RTX;
4269 tree arg1 = CALL_EXPR_ARG (exp, 0);
4270 tree arg2 = CALL_EXPR_ARG (exp, 1);
4273 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4275 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4277 /* If we don't have POINTER_TYPE, call the function. */
4278 if (arg1_align == 0 || arg2_align == 0)
4281 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4282 arg1 = builtin_save_expr (arg1);
4283 arg2 = builtin_save_expr (arg2);
4285 arg1_rtx = get_memory_rtx (arg1, NULL);
4286 arg2_rtx = get_memory_rtx (arg2, NULL);
4288 #ifdef HAVE_cmpstrsi
4289 /* Try to call cmpstrsi. */
4292 enum machine_mode insn_mode
4293 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4295 /* Make a place to write the result of the instruction. */
4298 && REG_P (result) && GET_MODE (result) == insn_mode
4299 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4300 result = gen_reg_rtx (insn_mode);
4302 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4303 GEN_INT (MIN (arg1_align, arg2_align)));
4306 #ifdef HAVE_cmpstrnsi
4307 /* Try to determine at least one length and call cmpstrnsi. */
4308 if (!insn && HAVE_cmpstrnsi)
4313 enum machine_mode insn_mode
4314 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4315 tree len1 = c_strlen (arg1, 1);
4316 tree len2 = c_strlen (arg2, 1);
4319 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4321 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4323 /* If we don't have a constant length for the first, use the length
4324 of the second, if we know it. We don't require a constant for
4325 this case; some cost analysis could be done if both are available
4326 but neither is constant. For now, assume they're equally cheap,
4327 unless one has side effects. If both strings have constant lengths,
4334 else if (TREE_SIDE_EFFECTS (len1))
4336 else if (TREE_SIDE_EFFECTS (len2))
4338 else if (TREE_CODE (len1) != INTEGER_CST)
4340 else if (TREE_CODE (len2) != INTEGER_CST)
4342 else if (tree_int_cst_lt (len1, len2))
4347 /* If both arguments have side effects, we cannot optimize. */
4348 if (!len || TREE_SIDE_EFFECTS (len))
4351 arg3_rtx = expand_normal (len);
4353 /* Make a place to write the result of the instruction. */
4356 && REG_P (result) && GET_MODE (result) == insn_mode
4357 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4358 result = gen_reg_rtx (insn_mode);
4360 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4361 GEN_INT (MIN (arg1_align, arg2_align)));
4369 /* Return the value in the proper mode for this function. */
4370 mode = TYPE_MODE (TREE_TYPE (exp));
4371 if (GET_MODE (result) == mode)
4374 return convert_to_mode (mode, result, 0);
4375 convert_move (target, result, 0);
4379 /* Expand the library call ourselves using a stabilized argument
4380 list to avoid re-evaluating the function's arguments twice. */
4381 #ifdef HAVE_cmpstrnsi
4384 fndecl = get_callee_fndecl (exp);
4385 fn = build_call_expr (fndecl, 2, arg1, arg2);
4386 if (TREE_CODE (fn) == CALL_EXPR)
4387 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4388 return expand_call (fn, target, target == const0_rtx);
4394 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4395 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4396 the result in TARGET, if convenient. */
4399 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4401 if (!validate_arglist (exp,
4402 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4406 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4407 CALL_EXPR_ARG (exp, 1),
4408 CALL_EXPR_ARG (exp, 2));
4410 return expand_expr (result, target, mode, EXPAND_NORMAL);
4413 /* If c_strlen can determine an expression for one of the string
4414 lengths, and it doesn't have side effects, then emit cmpstrnsi
4415 using length MIN(strlen(string)+1, arg3). */
4416 #ifdef HAVE_cmpstrnsi
4419 tree len, len1, len2;
4420 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4423 tree arg1 = CALL_EXPR_ARG (exp, 0);
4424 tree arg2 = CALL_EXPR_ARG (exp, 1);
4425 tree arg3 = CALL_EXPR_ARG (exp, 2);
4428 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4430 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4431 enum machine_mode insn_mode
4432 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4434 len1 = c_strlen (arg1, 1);
4435 len2 = c_strlen (arg2, 1);
4438 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4440 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4442 /* If we don't have a constant length for the first, use the length
4443 of the second, if we know it. We don't require a constant for
4444 this case; some cost analysis could be done if both are available
4445 but neither is constant. For now, assume they're equally cheap,
4446 unless one has side effects. If both strings have constant lengths,
4453 else if (TREE_SIDE_EFFECTS (len1))
4455 else if (TREE_SIDE_EFFECTS (len2))
4457 else if (TREE_CODE (len1) != INTEGER_CST)
4459 else if (TREE_CODE (len2) != INTEGER_CST)
4461 else if (tree_int_cst_lt (len1, len2))
4466 /* If both arguments have side effects, we cannot optimize. */
4467 if (!len || TREE_SIDE_EFFECTS (len))
4470 /* The actual new length parameter is MIN(len,arg3). */
4471 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4472 fold_convert (TREE_TYPE (len), arg3));
4474 /* If we don't have POINTER_TYPE, call the function. */
4475 if (arg1_align == 0 || arg2_align == 0)
4478 /* Make a place to write the result of the instruction. */
4481 && REG_P (result) && GET_MODE (result) == insn_mode
4482 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4483 result = gen_reg_rtx (insn_mode);
4485 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4486 arg1 = builtin_save_expr (arg1);
4487 arg2 = builtin_save_expr (arg2);
4488 len = builtin_save_expr (len);
4490 arg1_rtx = get_memory_rtx (arg1, len);
4491 arg2_rtx = get_memory_rtx (arg2, len);
4492 arg3_rtx = expand_normal (len);
4493 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4494 GEN_INT (MIN (arg1_align, arg2_align)));
4499 /* Return the value in the proper mode for this function. */
4500 mode = TYPE_MODE (TREE_TYPE (exp));
4501 if (GET_MODE (result) == mode)
4504 return convert_to_mode (mode, result, 0);
4505 convert_move (target, result, 0);
4509 /* Expand the library call ourselves using a stabilized argument
4510 list to avoid re-evaluating the function's arguments twice. */
4511 fndecl = get_callee_fndecl (exp);
4512 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4513 if (TREE_CODE (fn) == CALL_EXPR)
4514 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4515 return expand_call (fn, target, target == const0_rtx);
4521 /* Expand expression EXP, which is a call to the strcat builtin.
4522 Return NULL_RTX if we failed the caller should emit a normal call,
4523 otherwise try to get the result in TARGET, if convenient. */
4526 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4528 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4532 tree dst = CALL_EXPR_ARG (exp, 0);
4533 tree src = CALL_EXPR_ARG (exp, 1);
4534 const char *p = c_getstr (src);
4536 /* If the string length is zero, return the dst parameter. */
4537 if (p && *p == '\0')
4538 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4540 if (optimize_insn_for_speed_p ())
4542 /* See if we can store by pieces into (dst + strlen(dst)). */
4543 tree newsrc, newdst,
4544 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4547 /* Stabilize the argument list. */
4548 newsrc = builtin_save_expr (src);
4549 dst = builtin_save_expr (dst);
4553 /* Create strlen (dst). */
4554 newdst = build_call_expr (strlen_fn, 1, dst);
4555 /* Create (dst p+ strlen (dst)). */
4557 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4558 newdst = builtin_save_expr (newdst);
4560 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4562 end_sequence (); /* Stop sequence. */
4566 /* Output the entire sequence. */
4567 insns = get_insns ();
4571 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4578 /* Expand expression EXP, which is a call to the strncat builtin.
4579 Return NULL_RTX if we failed the caller should emit a normal call,
4580 otherwise try to get the result in TARGET, if convenient. */
4583 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4585 if (validate_arglist (exp,
4586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4588 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4589 CALL_EXPR_ARG (exp, 1),
4590 CALL_EXPR_ARG (exp, 2));
4592 return expand_expr (result, target, mode, EXPAND_NORMAL);
4597 /* Expand expression EXP, which is a call to the strspn builtin.
4598 Return NULL_RTX if we failed the caller should emit a normal call,
4599 otherwise try to get the result in TARGET, if convenient. */
4602 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4604 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4606 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4607 CALL_EXPR_ARG (exp, 1));
4609 return expand_expr (result, target, mode, EXPAND_NORMAL);
4614 /* Expand expression EXP, which is a call to the strcspn builtin.
4615 Return NULL_RTX if we failed the caller should emit a normal call,
4616 otherwise try to get the result in TARGET, if convenient. */
4619 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4621 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4623 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4624 CALL_EXPR_ARG (exp, 1));
4626 return expand_expr (result, target, mode, EXPAND_NORMAL);
4631 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4632 if that's convenient. */
4635 expand_builtin_saveregs (void)
4639 /* Don't do __builtin_saveregs more than once in a function.
4640 Save the result of the first call and reuse it. */
4641 if (saveregs_value != 0)
4642 return saveregs_value;
4644 /* When this function is called, it means that registers must be
4645 saved on entry to this function. So we migrate the call to the
4646 first insn of this function. */
4650 /* Do whatever the machine needs done in this case. */
4651 val = targetm.calls.expand_builtin_saveregs ();
4656 saveregs_value = val;
4658 /* Put the insns after the NOTE that starts the function. If this
4659 is inside a start_sequence, make the outer-level insn chain current, so
4660 the code is placed at the start of the function. */
4661 push_topmost_sequence ();
4662 emit_insn_after (seq, entry_of_function ());
4663 pop_topmost_sequence ();
4668 /* __builtin_args_info (N) returns word N of the arg space info
4669 for the current function. The number and meanings of words
4670 is controlled by the definition of CUMULATIVE_ARGS. */
4673 expand_builtin_args_info (tree exp)
4675 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4676 int *word_ptr = (int *) &crtl->args.info;
4678 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4680 if (call_expr_nargs (exp) != 0)
4682 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4683 error ("argument of %<__builtin_args_info%> must be constant");
4686 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4688 if (wordnum < 0 || wordnum >= nwords)
4689 error ("argument of %<__builtin_args_info%> out of range");
4691 return GEN_INT (word_ptr[wordnum]);
4695 error ("missing argument in %<__builtin_args_info%>");
4700 /* Expand a call to __builtin_next_arg. */
4703 expand_builtin_next_arg (void)
4705 /* Checking arguments is already done in fold_builtin_next_arg
4706 that must be called before this function. */
4707 return expand_binop (ptr_mode, add_optab,
4708 crtl->args.internal_arg_pointer,
4709 crtl->args.arg_offset_rtx,
4710 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4713 /* Make it easier for the backends by protecting the valist argument
4714 from multiple evaluations. */
4717 stabilize_va_list (tree valist, int needs_lvalue)
4719 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4721 gcc_assert (vatype != NULL_TREE);
4723 if (TREE_CODE (vatype) == ARRAY_TYPE)
4725 if (TREE_SIDE_EFFECTS (valist))
4726 valist = save_expr (valist);
4728 /* For this case, the backends will be expecting a pointer to
4729 vatype, but it's possible we've actually been given an array
4730 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4732 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4734 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4735 valist = build_fold_addr_expr_with_type (valist, p1);
4744 if (! TREE_SIDE_EFFECTS (valist))
4747 pt = build_pointer_type (vatype);
4748 valist = fold_build1 (ADDR_EXPR, pt, valist);
4749 TREE_SIDE_EFFECTS (valist) = 1;
4752 if (TREE_SIDE_EFFECTS (valist))
4753 valist = save_expr (valist);
4754 valist = build_fold_indirect_ref (valist);
4760 /* The "standard" definition of va_list is void*. */
4763 std_build_builtin_va_list (void)
4765 return ptr_type_node;
4768 /* The "standard" abi va_list is va_list_type_node. */
4771 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4773 return va_list_type_node;
4776 /* The "standard" type of va_list is va_list_type_node. */
4779 std_canonical_va_list_type (tree type)
4783 if (INDIRECT_REF_P (type))
4784 type = TREE_TYPE (type);
4785 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4786 type = TREE_TYPE (type);
4787 wtype = va_list_type_node;
4789 /* Treat structure va_list types. */
4790 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4791 htype = TREE_TYPE (htype);
4792 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4794 /* If va_list is an array type, the argument may have decayed
4795 to a pointer type, e.g. by being passed to another function.
4796 In that case, unwrap both types so that we can compare the
4797 underlying records. */
4798 if (TREE_CODE (htype) == ARRAY_TYPE
4799 || POINTER_TYPE_P (htype))
4801 wtype = TREE_TYPE (wtype);
4802 htype = TREE_TYPE (htype);
4805 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4806 return va_list_type_node;
4811 /* The "standard" implementation of va_start: just assign `nextarg' to
4815 std_expand_builtin_va_start (tree valist, rtx nextarg)
4817 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4818 convert_move (va_r, nextarg, 0);
4821 /* Expand EXP, a call to __builtin_va_start. */
4824 expand_builtin_va_start (tree exp)
4829 if (call_expr_nargs (exp) < 2)
4831 error ("too few arguments to function %<va_start%>");
4835 if (fold_builtin_next_arg (exp, true))
4838 nextarg = expand_builtin_next_arg ();
4839 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4841 if (targetm.expand_builtin_va_start)
4842 targetm.expand_builtin_va_start (valist, nextarg);
4844 std_expand_builtin_va_start (valist, nextarg);
4849 /* The "standard" implementation of va_arg: read the value from the
4850 current (padded) address and increment by the (padded) size. */
4853 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4856 tree addr, t, type_size, rounded_size, valist_tmp;
4857 unsigned HOST_WIDE_INT align, boundary;
4860 #ifdef ARGS_GROW_DOWNWARD
4861 /* All of the alignment and movement below is for args-grow-up machines.
4862 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4863 implement their own specialized gimplify_va_arg_expr routines. */
4867 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4869 type = build_pointer_type (type);
4871 align = PARM_BOUNDARY / BITS_PER_UNIT;
4872 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4874 /* When we align parameter on stack for caller, if the parameter
4875 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4876 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4877 here with caller. */
4878 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4879 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4881 boundary /= BITS_PER_UNIT;
4883 /* Hoist the valist value into a temporary for the moment. */
4884 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4886 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4887 requires greater alignment, we must perform dynamic alignment. */
4888 if (boundary > align
4889 && !integer_zerop (TYPE_SIZE (type)))
4891 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4892 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4893 valist_tmp, size_int (boundary - 1)));
4894 gimplify_and_add (t, pre_p);
4896 t = fold_convert (sizetype, valist_tmp);
4897 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4898 fold_convert (TREE_TYPE (valist),
4899 fold_build2 (BIT_AND_EXPR, sizetype, t,
4900 size_int (-boundary))));
4901 gimplify_and_add (t, pre_p);
4906 /* If the actual alignment is less than the alignment of the type,
4907 adjust the type accordingly so that we don't assume strict alignment
4908 when dereferencing the pointer. */
4909 boundary *= BITS_PER_UNIT;
4910 if (boundary < TYPE_ALIGN (type))
4912 type = build_variant_type_copy (type);
4913 TYPE_ALIGN (type) = boundary;
4916 /* Compute the rounded size of the type. */
4917 type_size = size_in_bytes (type);
4918 rounded_size = round_up (type_size, align);
4920 /* Reduce rounded_size so it's sharable with the postqueue. */
4921 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4925 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4927 /* Small args are padded downward. */
4928 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4929 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4930 size_binop (MINUS_EXPR, rounded_size, type_size));
4931 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4934 /* Compute new value for AP. */
4935 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4936 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4937 gimplify_and_add (t, pre_p);
4939 addr = fold_convert (build_pointer_type (type), addr);
4942 addr = build_va_arg_indirect_ref (addr);
4944 return build_va_arg_indirect_ref (addr);
4947 /* Build an indirect-ref expression over the given TREE, which represents a
4948 piece of a va_arg() expansion. */
4950 build_va_arg_indirect_ref (tree addr)
4952 addr = build_fold_indirect_ref (addr);
4954 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4960 /* Return a dummy expression of type TYPE in order to keep going after an
4964 dummy_object (tree type)
4966 tree t = build_int_cst (build_pointer_type (type), 0);
4967 return build1 (INDIRECT_REF, type, t);
4970 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4971 builtin function, but a very special sort of operator. */
4973 enum gimplify_status
4974 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4976 tree promoted_type, have_va_type;
4977 tree valist = TREE_OPERAND (*expr_p, 0);
4978 tree type = TREE_TYPE (*expr_p);
4981 /* Verify that valist is of the proper type. */
4982 have_va_type = TREE_TYPE (valist);
4983 if (have_va_type == error_mark_node)
4985 have_va_type = targetm.canonical_va_list_type (have_va_type);
4987 if (have_va_type == NULL_TREE)
4989 error ("first argument to %<va_arg%> not of type %<va_list%>");
4993 /* Generate a diagnostic for requesting data of a type that cannot
4994 be passed through `...' due to type promotion at the call site. */
4995 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4998 static bool gave_help;
5001 /* Unfortunately, this is merely undefined, rather than a constraint
5002 violation, so we cannot make this an error. If this call is never
5003 executed, the program is still strictly conforming. */
5004 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
5005 type, promoted_type);
5006 if (!gave_help && warned)
5009 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
5010 promoted_type, type);
5013 /* We can, however, treat "undefined" any way we please.
5014 Call abort to encourage the user to fix the program. */
5016 inform (input_location, "if this code is reached, the program will abort");
5017 /* Before the abort, allow the evaluation of the va_list
5018 expression to exit or longjmp. */
5019 gimplify_and_add (valist, pre_p);
5020 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5021 gimplify_and_add (t, pre_p);
5023 /* This is dead code, but go ahead and finish so that the
5024 mode of the result comes out right. */
5025 *expr_p = dummy_object (type);
5030 /* Make it easier for the backends by protecting the valist argument
5031 from multiple evaluations. */
5032 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5034 /* For this case, the backends will be expecting a pointer to
5035 TREE_TYPE (abi), but it's possible we've
5036 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5038 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5040 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5041 valist = build_fold_addr_expr_with_type (valist, p1);
5044 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5047 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5049 if (!targetm.gimplify_va_arg_expr)
5050 /* FIXME: Once most targets are converted we should merely
5051 assert this is non-null. */
5054 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5059 /* Expand EXP, a call to __builtin_va_end. */
5062 expand_builtin_va_end (tree exp)
5064 tree valist = CALL_EXPR_ARG (exp, 0);
5066 /* Evaluate for side effects, if needed. I hate macros that don't
5068 if (TREE_SIDE_EFFECTS (valist))
5069 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5074 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5075 builtin rather than just as an assignment in stdarg.h because of the
5076 nastiness of array-type va_list types. */
5079 expand_builtin_va_copy (tree exp)
5083 dst = CALL_EXPR_ARG (exp, 0);
5084 src = CALL_EXPR_ARG (exp, 1);
5086 dst = stabilize_va_list (dst, 1);
5087 src = stabilize_va_list (src, 0);
5089 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5091 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5093 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5094 TREE_SIDE_EFFECTS (t) = 1;
5095 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5099 rtx dstb, srcb, size;
5101 /* Evaluate to pointers. */
5102 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5103 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5104 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5105 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5107 dstb = convert_memory_address (Pmode, dstb);
5108 srcb = convert_memory_address (Pmode, srcb);
5110 /* "Dereference" to BLKmode memories. */
5111 dstb = gen_rtx_MEM (BLKmode, dstb);
5112 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5113 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5114 srcb = gen_rtx_MEM (BLKmode, srcb);
5115 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5116 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5119 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5125 /* Expand a call to one of the builtin functions __builtin_frame_address or
5126 __builtin_return_address. */
5129 expand_builtin_frame_address (tree fndecl, tree exp)
5131 /* The argument must be a nonnegative integer constant.
5132 It counts the number of frames to scan up the stack.
5133 The value is the return address saved in that frame. */
5134 if (call_expr_nargs (exp) == 0)
5135 /* Warning about missing arg was already issued. */
5137 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5139 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5140 error ("invalid argument to %<__builtin_frame_address%>");
5142 error ("invalid argument to %<__builtin_return_address%>");
5148 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5149 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5151 /* Some ports cannot access arbitrary stack frames. */
5154 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5155 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5157 warning (0, "unsupported argument to %<__builtin_return_address%>");
5161 /* For __builtin_frame_address, return what we've got. */
5162 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5166 && ! CONSTANT_P (tem))
5167 tem = copy_to_mode_reg (Pmode, tem);
5172 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5173 we failed and the caller should emit a normal call, otherwise try to get
5174 the result in TARGET, if convenient. */
5177 expand_builtin_alloca (tree exp, rtx target)
5182 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5183 should always expand to function calls. These can be intercepted
5188 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5191 /* Compute the argument. */
5192 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5194 /* Allocate the desired space. */
5195 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5196 result = convert_memory_address (ptr_mode, result);
5201 /* Expand a call to a bswap builtin with argument ARG0. MODE
5202 is the mode to expand with. */
5205 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5207 enum machine_mode mode;
5211 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5214 arg = CALL_EXPR_ARG (exp, 0);
5215 mode = TYPE_MODE (TREE_TYPE (arg));
5216 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5218 target = expand_unop (mode, bswap_optab, op0, target, 1);
5220 gcc_assert (target);
5222 return convert_to_mode (mode, target, 0);
5225 /* Expand a call to a unary builtin in EXP.
5226 Return NULL_RTX if a normal call should be emitted rather than expanding the
5227 function in-line. If convenient, the result should be placed in TARGET.
5228 SUBTARGET may be used as the target for computing one of EXP's operands. */
5231 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5232 rtx subtarget, optab op_optab)
5236 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5239 /* Compute the argument. */
5240 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5241 VOIDmode, EXPAND_NORMAL);
5242 /* Compute op, into TARGET if possible.
5243 Set TARGET to wherever the result comes back. */
5244 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5245 op_optab, op0, target, 1);
5246 gcc_assert (target);
5248 return convert_to_mode (target_mode, target, 0);
5251 /* If the string passed to fputs is a constant and is one character
5252 long, we attempt to transform this call into __builtin_fputc(). */
5255 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5257 /* Verify the arguments in the original call. */
5258 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5260 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5261 CALL_EXPR_ARG (exp, 1),
5262 (target == const0_rtx),
5263 unlocked, NULL_TREE);
5265 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5270 /* Expand a call to __builtin_expect. We just return our argument
5271 as the builtin_expect semantic should've been already executed by
5272 tree branch prediction pass. */
5275 expand_builtin_expect (tree exp, rtx target)
5279 if (call_expr_nargs (exp) < 2)
5281 arg = CALL_EXPR_ARG (exp, 0);
5282 c = CALL_EXPR_ARG (exp, 1);
5284 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5285 /* When guessing was done, the hints should be already stripped away. */
5286 gcc_assert (!flag_guess_branch_prob
5287 || optimize == 0 || errorcount || sorrycount);
5292 expand_builtin_trap (void)
5296 emit_insn (gen_trap ());
5299 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5303 /* Expand EXP, a call to fabs, fabsf or fabsl.
5304 Return NULL_RTX if a normal call should be emitted rather than expanding
5305 the function inline. If convenient, the result should be placed
5306 in TARGET. SUBTARGET may be used as the target for computing
5310 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5312 enum machine_mode mode;
5316 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5319 arg = CALL_EXPR_ARG (exp, 0);
5320 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5321 mode = TYPE_MODE (TREE_TYPE (arg));
5322 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5323 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5326 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5327 Return NULL is a normal call should be emitted rather than expanding the
5328 function inline. If convenient, the result should be placed in TARGET.
5329 SUBTARGET may be used as the target for computing the operand. */
5332 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5337 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5340 arg = CALL_EXPR_ARG (exp, 0);
5341 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5343 arg = CALL_EXPR_ARG (exp, 1);
5344 op1 = expand_normal (arg);
5346 return expand_copysign (op0, op1, target);
5349 /* Create a new constant string literal and return a char* pointer to it.
5350 The STRING_CST value is the LEN characters at STR. */
5352 build_string_literal (int len, const char *str)
5354 tree t, elem, index, type;
5356 t = build_string (len, str);
5357 elem = build_type_variant (char_type_node, 1, 0);
5358 index = build_index_type (size_int (len - 1));
5359 type = build_array_type (elem, index);
5360 TREE_TYPE (t) = type;
5361 TREE_CONSTANT (t) = 1;
5362 TREE_READONLY (t) = 1;
5363 TREE_STATIC (t) = 1;
5365 type = build_pointer_type (elem);
5366 t = build1 (ADDR_EXPR, type,
5367 build4 (ARRAY_REF, elem,
5368 t, integer_zero_node, NULL_TREE, NULL_TREE));
5372 /* Expand EXP, a call to printf or printf_unlocked.
5373 Return NULL_RTX if a normal call should be emitted rather than transforming
5374 the function inline. If convenient, the result should be placed in
5375 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5378 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5381 /* If we're using an unlocked function, assume the other unlocked
5382 functions exist explicitly. */
5383 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5384 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5385 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5386 : implicit_built_in_decls[BUILT_IN_PUTS];
5387 const char *fmt_str;
5390 int nargs = call_expr_nargs (exp);
5392 /* If the return value is used, don't do the transformation. */
5393 if (target != const0_rtx)
5396 /* Verify the required arguments in the original call. */
5399 fmt = CALL_EXPR_ARG (exp, 0);
5400 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5403 /* Check whether the format is a literal string constant. */
5404 fmt_str = c_getstr (fmt);
5405 if (fmt_str == NULL)
5408 if (!init_target_chars ())
5411 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5412 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5415 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5418 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5420 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5421 else if (strcmp (fmt_str, target_percent_c) == 0)
5424 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5427 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5431 /* We can't handle anything else with % args or %% ... yet. */
5432 if (strchr (fmt_str, target_percent))
5438 /* If the format specifier was "", printf does nothing. */
5439 if (fmt_str[0] == '\0')
5441 /* If the format specifier has length of 1, call putchar. */
5442 if (fmt_str[1] == '\0')
5444 /* Given printf("c"), (where c is any one character,)
5445 convert "c"[0] to an int and pass that to the replacement
5447 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5449 fn = build_call_expr (fn_putchar, 1, arg);
5453 /* If the format specifier was "string\n", call puts("string"). */
5454 size_t len = strlen (fmt_str);
5455 if ((unsigned char)fmt_str[len - 1] == target_newline
5456 && (size_t) (int) len == len
5460 tree offset_node, string_cst;
5462 /* Create a NUL-terminated string that's one char shorter
5463 than the original, stripping off the trailing '\n'. */
5464 arg = build_string_literal (len, fmt_str);
5465 string_cst = string_constant (arg, &offset_node);
5466 #ifdef ENABLE_CHECKING
5467 gcc_assert (string_cst
5468 && (TREE_STRING_LENGTH (string_cst)
5470 && integer_zerop (offset_node)
5472 TREE_STRING_POINTER (string_cst)[len - 1]
5475 /* build_string_literal creates a new STRING_CST,
5476 modify it in place to avoid double copying. */
5477 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
5478 newstr[len - 1] = '\0';
5480 fn = build_call_expr (fn_puts, 1, arg);
5483 /* We'd like to arrange to call fputs(string,stdout) here,
5484 but we need stdout and don't have a way to get it yet. */
5491 if (TREE_CODE (fn) == CALL_EXPR)
5492 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5493 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5496 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5497 Return NULL_RTX if a normal call should be emitted rather than transforming
5498 the function inline. If convenient, the result should be placed in
5499 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5502 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5505 /* If we're using an unlocked function, assume the other unlocked
5506 functions exist explicitly. */
5507 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5508 : implicit_built_in_decls[BUILT_IN_FPUTC];
5509 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5510 : implicit_built_in_decls[BUILT_IN_FPUTS];
5511 const char *fmt_str;
5514 int nargs = call_expr_nargs (exp);
5516 /* If the return value is used, don't do the transformation. */
5517 if (target != const0_rtx)
5520 /* Verify the required arguments in the original call. */
5523 fp = CALL_EXPR_ARG (exp, 0);
5524 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5526 fmt = CALL_EXPR_ARG (exp, 1);
5527 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5530 /* Check whether the format is a literal string constant. */
5531 fmt_str = c_getstr (fmt);
5532 if (fmt_str == NULL)
5535 if (!init_target_chars ())
5538 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5539 if (strcmp (fmt_str, target_percent_s) == 0)
5542 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5544 arg = CALL_EXPR_ARG (exp, 2);
5546 fn = build_call_expr (fn_fputs, 2, arg, fp);
5548 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5549 else if (strcmp (fmt_str, target_percent_c) == 0)
5552 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5554 arg = CALL_EXPR_ARG (exp, 2);
5556 fn = build_call_expr (fn_fputc, 2, arg, fp);
5560 /* We can't handle anything else with % args or %% ... yet. */
5561 if (strchr (fmt_str, target_percent))
5567 /* If the format specifier was "", fprintf does nothing. */
5568 if (fmt_str[0] == '\0')
5570 /* Evaluate and ignore FILE* argument for side-effects. */
5571 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5575 /* When "string" doesn't contain %, replace all cases of
5576 fprintf(stream,string) with fputs(string,stream). The fputs
5577 builtin will take care of special cases like length == 1. */
5579 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5584 if (TREE_CODE (fn) == CALL_EXPR)
5585 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5586 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5589 /* Expand a call EXP to sprintf. Return NULL_RTX if
5590 a normal call should be emitted rather than expanding the function
5591 inline. If convenient, the result should be placed in TARGET with
5595 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5598 const char *fmt_str;
5599 int nargs = call_expr_nargs (exp);
5601 /* Verify the required arguments in the original call. */
5604 dest = CALL_EXPR_ARG (exp, 0);
5605 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5607 fmt = CALL_EXPR_ARG (exp, 0);
5608 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5611 /* Check whether the format is a literal string constant. */
5612 fmt_str = c_getstr (fmt);
5613 if (fmt_str == NULL)
5616 if (!init_target_chars ())
5619 /* If the format doesn't contain % args or %%, use strcpy. */
5620 if (strchr (fmt_str, target_percent) == 0)
5622 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5625 if ((nargs > 2) || ! fn)
5627 expand_expr (build_call_expr (fn, 2, dest, fmt),
5628 const0_rtx, VOIDmode, EXPAND_NORMAL);
5629 if (target == const0_rtx)
5631 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5632 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5634 /* If the format is "%s", use strcpy if the result isn't used. */
5635 else if (strcmp (fmt_str, target_percent_s) == 0)
5638 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5644 arg = CALL_EXPR_ARG (exp, 2);
5645 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5648 if (target != const0_rtx)
5650 len = c_strlen (arg, 1);
5651 if (! len || TREE_CODE (len) != INTEGER_CST)
5657 expand_expr (build_call_expr (fn, 2, dest, arg),
5658 const0_rtx, VOIDmode, EXPAND_NORMAL);
5660 if (target == const0_rtx)
5662 return expand_expr (len, target, mode, EXPAND_NORMAL);
5668 /* Expand a call to either the entry or exit function profiler. */
5671 expand_builtin_profile_func (bool exitp)
5673 rtx this_rtx, which;
5675 this_rtx = DECL_RTL (current_function_decl);
5676 gcc_assert (MEM_P (this_rtx));
5677 this_rtx = XEXP (this_rtx, 0);
5680 which = profile_function_exit_libfunc;
5682 which = profile_function_entry_libfunc;
5684 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5685 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5692 /* Expand a call to __builtin___clear_cache. */
5695 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5697 #ifndef HAVE_clear_cache
5698 #ifdef CLEAR_INSN_CACHE
5699 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5700 does something. Just do the default expansion to a call to
5704 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5705 does nothing. There is no need to call it. Do nothing. */
5707 #endif /* CLEAR_INSN_CACHE */
5709 /* We have a "clear_cache" insn, and it will handle everything. */
5711 rtx begin_rtx, end_rtx;
5712 enum insn_code icode;
5714 /* We must not expand to a library call. If we did, any
5715 fallback library function in libgcc that might contain a call to
5716 __builtin___clear_cache() would recurse infinitely. */
5717 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5719 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5723 if (HAVE_clear_cache)
5725 icode = CODE_FOR_clear_cache;
5727 begin = CALL_EXPR_ARG (exp, 0);
5728 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5729 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5730 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5731 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5733 end = CALL_EXPR_ARG (exp, 1);
5734 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5735 end_rtx = convert_memory_address (Pmode, end_rtx);
5736 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5737 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5739 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5742 #endif /* HAVE_clear_cache */
5745 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5748 round_trampoline_addr (rtx tramp)
5750 rtx temp, addend, mask;
5752 /* If we don't need too much alignment, we'll have been guaranteed
5753 proper alignment by get_trampoline_type. */
5754 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5757 /* Round address up to desired boundary. */
5758 temp = gen_reg_rtx (Pmode);
5759 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5760 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5762 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5763 temp, 0, OPTAB_LIB_WIDEN);
5764 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5765 temp, 0, OPTAB_LIB_WIDEN);
5771 expand_builtin_init_trampoline (tree exp)
5773 tree t_tramp, t_func, t_chain;
5774 rtx r_tramp, r_func, r_chain;
5775 #ifdef TRAMPOLINE_TEMPLATE
5779 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5780 POINTER_TYPE, VOID_TYPE))
5783 t_tramp = CALL_EXPR_ARG (exp, 0);
5784 t_func = CALL_EXPR_ARG (exp, 1);
5785 t_chain = CALL_EXPR_ARG (exp, 2);
5787 r_tramp = expand_normal (t_tramp);
5788 r_func = expand_normal (t_func);
5789 r_chain = expand_normal (t_chain);
5791 /* Generate insns to initialize the trampoline. */
5792 r_tramp = round_trampoline_addr (r_tramp);
5793 #ifdef TRAMPOLINE_TEMPLATE
5794 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5795 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5796 emit_block_move (blktramp, assemble_trampoline_template (),
5797 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5799 trampolines_created = 1;
5800 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5806 expand_builtin_adjust_trampoline (tree exp)
5810 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5813 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5814 tramp = round_trampoline_addr (tramp);
5815 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5816 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5822 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5823 function. The function first checks whether the back end provides
5824 an insn to implement signbit for the respective mode. If not, it
5825 checks whether the floating point format of the value is such that
5826 the sign bit can be extracted. If that is not the case, the
5827 function returns NULL_RTX to indicate that a normal call should be
5828 emitted rather than expanding the function in-line. EXP is the
5829 expression that is a call to the builtin function; if convenient,
5830 the result should be placed in TARGET. */
5832 expand_builtin_signbit (tree exp, rtx target)
5834 const struct real_format *fmt;
5835 enum machine_mode fmode, imode, rmode;
5836 HOST_WIDE_INT hi, lo;
5839 enum insn_code icode;
5842 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5845 arg = CALL_EXPR_ARG (exp, 0);
5846 fmode = TYPE_MODE (TREE_TYPE (arg));
5847 rmode = TYPE_MODE (TREE_TYPE (exp));
5848 fmt = REAL_MODE_FORMAT (fmode);
5850 arg = builtin_save_expr (arg);
5852 /* Expand the argument yielding a RTX expression. */
5853 temp = expand_normal (arg);
5855 /* Check if the back end provides an insn that handles signbit for the
5857 icode = signbit_optab->handlers [(int) fmode].insn_code;
5858 if (icode != CODE_FOR_nothing)
5860 rtx last = get_last_insn ();
5861 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5862 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5864 delete_insns_since (last);
5867 /* For floating point formats without a sign bit, implement signbit
5869 bitpos = fmt->signbit_ro;
5872 /* But we can't do this if the format supports signed zero. */
5873 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5876 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5877 build_real (TREE_TYPE (arg), dconst0));
5878 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5881 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5883 imode = int_mode_for_mode (fmode);
5884 if (imode == BLKmode)
5886 temp = gen_lowpart (imode, temp);
5891 /* Handle targets with different FP word orders. */
5892 if (FLOAT_WORDS_BIG_ENDIAN)
5893 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5895 word = bitpos / BITS_PER_WORD;
5896 temp = operand_subword_force (temp, word, fmode);
5897 bitpos = bitpos % BITS_PER_WORD;
5900 /* Force the intermediate word_mode (or narrower) result into a
5901 register. This avoids attempting to create paradoxical SUBREGs
5902 of floating point modes below. */
5903 temp = force_reg (imode, temp);
5905 /* If the bitpos is within the "result mode" lowpart, the operation
5906 can be implement with a single bitwise AND. Otherwise, we need
5907 a right shift and an AND. */
5909 if (bitpos < GET_MODE_BITSIZE (rmode))
5911 if (bitpos < HOST_BITS_PER_WIDE_INT)
5914 lo = (HOST_WIDE_INT) 1 << bitpos;
5918 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5922 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5923 temp = gen_lowpart (rmode, temp);
5924 temp = expand_binop (rmode, and_optab, temp,
5925 immed_double_const (lo, hi, rmode),
5926 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5930 /* Perform a logical right shift to place the signbit in the least
5931 significant bit, then truncate the result to the desired mode
5932 and mask just this bit. */
5933 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5934 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5935 temp = gen_lowpart (rmode, temp);
5936 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5937 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5943 /* Expand fork or exec calls. TARGET is the desired target of the
5944 call. EXP is the call. FN is the
5945 identificator of the actual function. IGNORE is nonzero if the
5946 value is to be ignored. */
5949 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5954 /* If we are not profiling, just call the function. */
5955 if (!profile_arc_flag)
5958 /* Otherwise call the wrapper. This should be equivalent for the rest of
5959 compiler, so the code does not diverge, and the wrapper may run the
5960 code necessary for keeping the profiling sane. */
5962 switch (DECL_FUNCTION_CODE (fn))
5965 id = get_identifier ("__gcov_fork");
5968 case BUILT_IN_EXECL:
5969 id = get_identifier ("__gcov_execl");
5972 case BUILT_IN_EXECV:
5973 id = get_identifier ("__gcov_execv");
5976 case BUILT_IN_EXECLP:
5977 id = get_identifier ("__gcov_execlp");
5980 case BUILT_IN_EXECLE:
5981 id = get_identifier ("__gcov_execle");
5984 case BUILT_IN_EXECVP:
5985 id = get_identifier ("__gcov_execvp");
5988 case BUILT_IN_EXECVE:
5989 id = get_identifier ("__gcov_execve");
5996 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5997 DECL_EXTERNAL (decl) = 1;
5998 TREE_PUBLIC (decl) = 1;
5999 DECL_ARTIFICIAL (decl) = 1;
6000 TREE_NOTHROW (decl) = 1;
6001 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6002 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6003 call = rewrite_call_expr (exp, 0, decl, 0);
6004 return expand_call (call, target, ignore);
6009 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6010 the pointer in these functions is void*, the tree optimizers may remove
6011 casts. The mode computed in expand_builtin isn't reliable either, due
6012 to __sync_bool_compare_and_swap.
6014 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6015 group of builtins. This gives us log2 of the mode size. */
6017 static inline enum machine_mode
6018 get_builtin_sync_mode (int fcode_diff)
6020 /* The size is not negotiable, so ask not to get BLKmode in return
6021 if the target indicates that a smaller size would be better. */
6022 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6025 /* Expand the memory expression LOC and return the appropriate memory operand
6026 for the builtin_sync operations. */
6029 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6033 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6035 /* Note that we explicitly do not want any alias information for this
6036 memory, so that we kill all other live memories. Otherwise we don't
6037 satisfy the full barrier semantics of the intrinsic. */
6038 mem = validize_mem (gen_rtx_MEM (mode, addr));
6040 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6041 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6042 MEM_VOLATILE_P (mem) = 1;
6047 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6048 EXP is the CALL_EXPR. CODE is the rtx code
6049 that corresponds to the arithmetic or logical operation from the name;
6050 an exception here is that NOT actually means NAND. TARGET is an optional
6051 place for us to store the results; AFTER is true if this is the
6052 fetch_and_xxx form. IGNORE is true if we don't actually care about
6053 the result of the operation at all. */
6056 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6057 enum rtx_code code, bool after,
6058 rtx target, bool ignore)
6061 enum machine_mode old_mode;
6063 if (code == NOT && warn_sync_nand)
6065 tree fndecl = get_callee_fndecl (exp);
6066 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6068 static bool warned_f_a_n, warned_n_a_f;
6072 case BUILT_IN_FETCH_AND_NAND_1:
6073 case BUILT_IN_FETCH_AND_NAND_2:
6074 case BUILT_IN_FETCH_AND_NAND_4:
6075 case BUILT_IN_FETCH_AND_NAND_8:
6076 case BUILT_IN_FETCH_AND_NAND_16:
6081 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6082 inform (input_location,
6083 "%qD changed semantics in GCC 4.4", fndecl);
6084 warned_f_a_n = true;
6087 case BUILT_IN_NAND_AND_FETCH_1:
6088 case BUILT_IN_NAND_AND_FETCH_2:
6089 case BUILT_IN_NAND_AND_FETCH_4:
6090 case BUILT_IN_NAND_AND_FETCH_8:
6091 case BUILT_IN_NAND_AND_FETCH_16:
6096 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6097 inform (input_location,
6098 "%qD changed semantics in GCC 4.4", fndecl);
6099 warned_n_a_f = true;
6107 /* Expand the operands. */
6108 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6110 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6111 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6112 of CONST_INTs, where we know the old_mode only from the call argument. */
6113 old_mode = GET_MODE (val);
6114 if (old_mode == VOIDmode)
6115 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6116 val = convert_modes (mode, old_mode, val, 1);
6119 return expand_sync_operation (mem, val, code);
6121 return expand_sync_fetch_operation (mem, val, code, after, target);
6124 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6125 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6126 true if this is the boolean form. TARGET is a place for us to store the
6127 results; this is NOT optional if IS_BOOL is true. */
6130 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6131 bool is_bool, rtx target)
6133 rtx old_val, new_val, mem;
6134 enum machine_mode old_mode;
6136 /* Expand the operands. */
6137 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6140 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6141 mode, EXPAND_NORMAL);
6142 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6143 of CONST_INTs, where we know the old_mode only from the call argument. */
6144 old_mode = GET_MODE (old_val);
6145 if (old_mode == VOIDmode)
6146 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6147 old_val = convert_modes (mode, old_mode, old_val, 1);
6149 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6150 mode, EXPAND_NORMAL);
6151 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6152 of CONST_INTs, where we know the old_mode only from the call argument. */
6153 old_mode = GET_MODE (new_val);
6154 if (old_mode == VOIDmode)
6155 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6156 new_val = convert_modes (mode, old_mode, new_val, 1);
6159 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6161 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6164 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6165 general form is actually an atomic exchange, and some targets only
6166 support a reduced form with the second argument being a constant 1.
6167 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6171 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6175 enum machine_mode old_mode;
6177 /* Expand the operands. */
6178 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6179 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6180 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6181 of CONST_INTs, where we know the old_mode only from the call argument. */
6182 old_mode = GET_MODE (val);
6183 if (old_mode == VOIDmode)
6184 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6185 val = convert_modes (mode, old_mode, val, 1);
6187 return expand_sync_lock_test_and_set (mem, val, target);
6190 /* Expand the __sync_synchronize intrinsic. */
6193 expand_builtin_synchronize (void)
6197 #ifdef HAVE_memory_barrier
6198 if (HAVE_memory_barrier)
6200 emit_insn (gen_memory_barrier ());
6205 if (synchronize_libfunc != NULL_RTX)
6207 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6211 /* If no explicit memory barrier instruction is available, create an
6212 empty asm stmt with a memory clobber. */
6213 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6214 tree_cons (NULL, build_string (6, "memory"), NULL));
6215 ASM_VOLATILE_P (x) = 1;
6216 expand_asm_expr (x);
6219 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6222 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6224 enum insn_code icode;
6226 rtx val = const0_rtx;
6228 /* Expand the operands. */
6229 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6231 /* If there is an explicit operation in the md file, use it. */
6232 icode = sync_lock_release[mode];
6233 if (icode != CODE_FOR_nothing)
6235 if (!insn_data[icode].operand[1].predicate (val, mode))
6236 val = force_reg (mode, val);
6238 insn = GEN_FCN (icode) (mem, val);
6246 /* Otherwise we can implement this operation by emitting a barrier
6247 followed by a store of zero. */
6248 expand_builtin_synchronize ();
6249 emit_move_insn (mem, val);
6252 /* Expand an expression EXP that calls a built-in function,
6253 with result going to TARGET if that's convenient
6254 (and in mode MODE if that's convenient).
6255 SUBTARGET may be used as the target for computing one of EXP's operands.
6256 IGNORE is nonzero if the value is to be ignored. */
6259 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6262 tree fndecl = get_callee_fndecl (exp);
6263 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6264 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6266 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6267 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6269 /* When not optimizing, generate calls to library functions for a certain
6272 && !called_as_built_in (fndecl)
6273 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6274 && fcode != BUILT_IN_ALLOCA
6275 && fcode != BUILT_IN_FREE)
6276 return expand_call (exp, target, ignore);
6278 /* The built-in function expanders test for target == const0_rtx
6279 to determine whether the function's result will be ignored. */
6281 target = const0_rtx;
6283 /* If the result of a pure or const built-in function is ignored, and
6284 none of its arguments are volatile, we can avoid expanding the
6285 built-in call and just evaluate the arguments for side-effects. */
6286 if (target == const0_rtx
6287 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6289 bool volatilep = false;
6291 call_expr_arg_iterator iter;
6293 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6294 if (TREE_THIS_VOLATILE (arg))
6302 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6303 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6310 CASE_FLT_FN (BUILT_IN_FABS):
6311 target = expand_builtin_fabs (exp, target, subtarget);
6316 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6317 target = expand_builtin_copysign (exp, target, subtarget);
6322 /* Just do a normal library call if we were unable to fold
6324 CASE_FLT_FN (BUILT_IN_CABS):
6327 CASE_FLT_FN (BUILT_IN_EXP):
6328 CASE_FLT_FN (BUILT_IN_EXP10):
6329 CASE_FLT_FN (BUILT_IN_POW10):
6330 CASE_FLT_FN (BUILT_IN_EXP2):
6331 CASE_FLT_FN (BUILT_IN_EXPM1):
6332 CASE_FLT_FN (BUILT_IN_LOGB):
6333 CASE_FLT_FN (BUILT_IN_LOG):
6334 CASE_FLT_FN (BUILT_IN_LOG10):
6335 CASE_FLT_FN (BUILT_IN_LOG2):
6336 CASE_FLT_FN (BUILT_IN_LOG1P):
6337 CASE_FLT_FN (BUILT_IN_TAN):
6338 CASE_FLT_FN (BUILT_IN_ASIN):
6339 CASE_FLT_FN (BUILT_IN_ACOS):
6340 CASE_FLT_FN (BUILT_IN_ATAN):
6341 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6342 because of possible accuracy problems. */
6343 if (! flag_unsafe_math_optimizations)
6345 CASE_FLT_FN (BUILT_IN_SQRT):
6346 CASE_FLT_FN (BUILT_IN_FLOOR):
6347 CASE_FLT_FN (BUILT_IN_CEIL):
6348 CASE_FLT_FN (BUILT_IN_TRUNC):
6349 CASE_FLT_FN (BUILT_IN_ROUND):
6350 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6351 CASE_FLT_FN (BUILT_IN_RINT):
6352 target = expand_builtin_mathfn (exp, target, subtarget);
6357 CASE_FLT_FN (BUILT_IN_ILOGB):
6358 if (! flag_unsafe_math_optimizations)
6360 CASE_FLT_FN (BUILT_IN_ISINF):
6361 CASE_FLT_FN (BUILT_IN_FINITE):
6362 case BUILT_IN_ISFINITE:
6363 case BUILT_IN_ISNORMAL:
6364 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6369 CASE_FLT_FN (BUILT_IN_LCEIL):
6370 CASE_FLT_FN (BUILT_IN_LLCEIL):
6371 CASE_FLT_FN (BUILT_IN_LFLOOR):
6372 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6373 target = expand_builtin_int_roundingfn (exp, target);
6378 CASE_FLT_FN (BUILT_IN_LRINT):
6379 CASE_FLT_FN (BUILT_IN_LLRINT):
6380 CASE_FLT_FN (BUILT_IN_LROUND):
6381 CASE_FLT_FN (BUILT_IN_LLROUND):
6382 target = expand_builtin_int_roundingfn_2 (exp, target);
6387 CASE_FLT_FN (BUILT_IN_POW):
6388 target = expand_builtin_pow (exp, target, subtarget);
6393 CASE_FLT_FN (BUILT_IN_POWI):
6394 target = expand_builtin_powi (exp, target, subtarget);
6399 CASE_FLT_FN (BUILT_IN_ATAN2):
6400 CASE_FLT_FN (BUILT_IN_LDEXP):
6401 CASE_FLT_FN (BUILT_IN_SCALB):
6402 CASE_FLT_FN (BUILT_IN_SCALBN):
6403 CASE_FLT_FN (BUILT_IN_SCALBLN):
6404 if (! flag_unsafe_math_optimizations)
6407 CASE_FLT_FN (BUILT_IN_FMOD):
6408 CASE_FLT_FN (BUILT_IN_REMAINDER):
6409 CASE_FLT_FN (BUILT_IN_DREM):
6410 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6415 CASE_FLT_FN (BUILT_IN_CEXPI):
6416 target = expand_builtin_cexpi (exp, target, subtarget);
6417 gcc_assert (target);
6420 CASE_FLT_FN (BUILT_IN_SIN):
6421 CASE_FLT_FN (BUILT_IN_COS):
6422 if (! flag_unsafe_math_optimizations)
6424 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6429 CASE_FLT_FN (BUILT_IN_SINCOS):
6430 if (! flag_unsafe_math_optimizations)
6432 target = expand_builtin_sincos (exp);
6437 case BUILT_IN_APPLY_ARGS:
6438 return expand_builtin_apply_args ();
6440 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6441 FUNCTION with a copy of the parameters described by
6442 ARGUMENTS, and ARGSIZE. It returns a block of memory
6443 allocated on the stack into which is stored all the registers
6444 that might possibly be used for returning the result of a
6445 function. ARGUMENTS is the value returned by
6446 __builtin_apply_args. ARGSIZE is the number of bytes of
6447 arguments that must be copied. ??? How should this value be
6448 computed? We'll also need a safe worst case value for varargs
6450 case BUILT_IN_APPLY:
6451 if (!validate_arglist (exp, POINTER_TYPE,
6452 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6453 && !validate_arglist (exp, REFERENCE_TYPE,
6454 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6460 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6461 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6462 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6464 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6467 /* __builtin_return (RESULT) causes the function to return the
6468 value described by RESULT. RESULT is address of the block of
6469 memory returned by __builtin_apply. */
6470 case BUILT_IN_RETURN:
6471 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6472 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6475 case BUILT_IN_SAVEREGS:
6476 return expand_builtin_saveregs ();
6478 case BUILT_IN_ARGS_INFO:
6479 return expand_builtin_args_info (exp);
6481 case BUILT_IN_VA_ARG_PACK:
6482 /* All valid uses of __builtin_va_arg_pack () are removed during
6484 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6487 case BUILT_IN_VA_ARG_PACK_LEN:
6488 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6490 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6493 /* Return the address of the first anonymous stack arg. */
6494 case BUILT_IN_NEXT_ARG:
6495 if (fold_builtin_next_arg (exp, false))
6497 return expand_builtin_next_arg ();
6499 case BUILT_IN_CLEAR_CACHE:
6500 target = expand_builtin___clear_cache (exp);
6505 case BUILT_IN_CLASSIFY_TYPE:
6506 return expand_builtin_classify_type (exp);
6508 case BUILT_IN_CONSTANT_P:
6511 case BUILT_IN_FRAME_ADDRESS:
6512 case BUILT_IN_RETURN_ADDRESS:
6513 return expand_builtin_frame_address (fndecl, exp);
6515 /* Returns the address of the area where the structure is returned.
6517 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6518 if (call_expr_nargs (exp) != 0
6519 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6520 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6523 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6525 case BUILT_IN_ALLOCA:
6526 target = expand_builtin_alloca (exp, target);
6531 case BUILT_IN_STACK_SAVE:
6532 return expand_stack_save ();
6534 case BUILT_IN_STACK_RESTORE:
6535 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6538 case BUILT_IN_BSWAP32:
6539 case BUILT_IN_BSWAP64:
6540 target = expand_builtin_bswap (exp, target, subtarget);
6546 CASE_INT_FN (BUILT_IN_FFS):
6547 case BUILT_IN_FFSIMAX:
6548 target = expand_builtin_unop (target_mode, exp, target,
6549 subtarget, ffs_optab);
6554 CASE_INT_FN (BUILT_IN_CLZ):
6555 case BUILT_IN_CLZIMAX:
6556 target = expand_builtin_unop (target_mode, exp, target,
6557 subtarget, clz_optab);
6562 CASE_INT_FN (BUILT_IN_CTZ):
6563 case BUILT_IN_CTZIMAX:
6564 target = expand_builtin_unop (target_mode, exp, target,
6565 subtarget, ctz_optab);
6570 CASE_INT_FN (BUILT_IN_POPCOUNT):
6571 case BUILT_IN_POPCOUNTIMAX:
6572 target = expand_builtin_unop (target_mode, exp, target,
6573 subtarget, popcount_optab);
6578 CASE_INT_FN (BUILT_IN_PARITY):
6579 case BUILT_IN_PARITYIMAX:
6580 target = expand_builtin_unop (target_mode, exp, target,
6581 subtarget, parity_optab);
6586 case BUILT_IN_STRLEN:
6587 target = expand_builtin_strlen (exp, target, target_mode);
6592 case BUILT_IN_STRCPY:
6593 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6598 case BUILT_IN_STRNCPY:
6599 target = expand_builtin_strncpy (exp, target, mode);
6604 case BUILT_IN_STPCPY:
6605 target = expand_builtin_stpcpy (exp, target, mode);
6610 case BUILT_IN_STRCAT:
6611 target = expand_builtin_strcat (fndecl, exp, target, mode);
6616 case BUILT_IN_STRNCAT:
6617 target = expand_builtin_strncat (exp, target, mode);
6622 case BUILT_IN_STRSPN:
6623 target = expand_builtin_strspn (exp, target, mode);
6628 case BUILT_IN_STRCSPN:
6629 target = expand_builtin_strcspn (exp, target, mode);
6634 case BUILT_IN_STRSTR:
6635 target = expand_builtin_strstr (exp, target, mode);
6640 case BUILT_IN_STRPBRK:
6641 target = expand_builtin_strpbrk (exp, target, mode);
6646 case BUILT_IN_INDEX:
6647 case BUILT_IN_STRCHR:
6648 target = expand_builtin_strchr (exp, target, mode);
6653 case BUILT_IN_RINDEX:
6654 case BUILT_IN_STRRCHR:
6655 target = expand_builtin_strrchr (exp, target, mode);
6660 case BUILT_IN_MEMCPY:
6661 target = expand_builtin_memcpy (exp, target, mode);
6666 case BUILT_IN_MEMPCPY:
6667 target = expand_builtin_mempcpy (exp, target, mode);
6672 case BUILT_IN_MEMMOVE:
6673 target = expand_builtin_memmove (exp, target, mode, ignore);
6678 case BUILT_IN_BCOPY:
6679 target = expand_builtin_bcopy (exp, ignore);
6684 case BUILT_IN_MEMSET:
6685 target = expand_builtin_memset (exp, target, mode);
6690 case BUILT_IN_BZERO:
6691 target = expand_builtin_bzero (exp);
6696 case BUILT_IN_STRCMP:
6697 target = expand_builtin_strcmp (exp, target, mode);
6702 case BUILT_IN_STRNCMP:
6703 target = expand_builtin_strncmp (exp, target, mode);
6708 case BUILT_IN_MEMCHR:
6709 target = expand_builtin_memchr (exp, target, mode);
6715 case BUILT_IN_MEMCMP:
6716 target = expand_builtin_memcmp (exp, target, mode);
6721 case BUILT_IN_SETJMP:
6722 /* This should have been lowered to the builtins below. */
6725 case BUILT_IN_SETJMP_SETUP:
6726 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6727 and the receiver label. */
6728 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6730 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6731 VOIDmode, EXPAND_NORMAL);
6732 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6733 rtx label_r = label_rtx (label);
6735 /* This is copied from the handling of non-local gotos. */
6736 expand_builtin_setjmp_setup (buf_addr, label_r);
6737 nonlocal_goto_handler_labels
6738 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6739 nonlocal_goto_handler_labels);
6740 /* ??? Do not let expand_label treat us as such since we would
6741 not want to be both on the list of non-local labels and on
6742 the list of forced labels. */
6743 FORCED_LABEL (label) = 0;
6748 case BUILT_IN_SETJMP_DISPATCHER:
6749 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6750 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6752 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6753 rtx label_r = label_rtx (label);
6755 /* Remove the dispatcher label from the list of non-local labels
6756 since the receiver labels have been added to it above. */
6757 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6762 case BUILT_IN_SETJMP_RECEIVER:
6763 /* __builtin_setjmp_receiver is passed the receiver label. */
6764 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6766 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6767 rtx label_r = label_rtx (label);
6769 expand_builtin_setjmp_receiver (label_r);
6774 /* __builtin_longjmp is passed a pointer to an array of five words.
6775 It's similar to the C library longjmp function but works with
6776 __builtin_setjmp above. */
6777 case BUILT_IN_LONGJMP:
6778 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6780 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6781 VOIDmode, EXPAND_NORMAL);
6782 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6784 if (value != const1_rtx)
6786 error ("%<__builtin_longjmp%> second argument must be 1");
6790 expand_builtin_longjmp (buf_addr, value);
6795 case BUILT_IN_NONLOCAL_GOTO:
6796 target = expand_builtin_nonlocal_goto (exp);
6801 /* This updates the setjmp buffer that is its argument with the value
6802 of the current stack pointer. */
6803 case BUILT_IN_UPDATE_SETJMP_BUF:
6804 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6807 = expand_normal (CALL_EXPR_ARG (exp, 0));
6809 expand_builtin_update_setjmp_buf (buf_addr);
6815 expand_builtin_trap ();
6818 case BUILT_IN_PRINTF:
6819 target = expand_builtin_printf (exp, target, mode, false);
6824 case BUILT_IN_PRINTF_UNLOCKED:
6825 target = expand_builtin_printf (exp, target, mode, true);
6830 case BUILT_IN_FPUTS:
6831 target = expand_builtin_fputs (exp, target, false);
6835 case BUILT_IN_FPUTS_UNLOCKED:
6836 target = expand_builtin_fputs (exp, target, true);
6841 case BUILT_IN_FPRINTF:
6842 target = expand_builtin_fprintf (exp, target, mode, false);
6847 case BUILT_IN_FPRINTF_UNLOCKED:
6848 target = expand_builtin_fprintf (exp, target, mode, true);
6853 case BUILT_IN_SPRINTF:
6854 target = expand_builtin_sprintf (exp, target, mode);
6859 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6860 case BUILT_IN_SIGNBITD32:
6861 case BUILT_IN_SIGNBITD64:
6862 case BUILT_IN_SIGNBITD128:
6863 target = expand_builtin_signbit (exp, target);
6868 /* Various hooks for the DWARF 2 __throw routine. */
6869 case BUILT_IN_UNWIND_INIT:
6870 expand_builtin_unwind_init ();
6872 case BUILT_IN_DWARF_CFA:
6873 return virtual_cfa_rtx;
6874 #ifdef DWARF2_UNWIND_INFO
6875 case BUILT_IN_DWARF_SP_COLUMN:
6876 return expand_builtin_dwarf_sp_column ();
6877 case BUILT_IN_INIT_DWARF_REG_SIZES:
6878 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6881 case BUILT_IN_FROB_RETURN_ADDR:
6882 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6883 case BUILT_IN_EXTRACT_RETURN_ADDR:
6884 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6885 case BUILT_IN_EH_RETURN:
6886 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6887 CALL_EXPR_ARG (exp, 1));
6889 #ifdef EH_RETURN_DATA_REGNO
6890 case BUILT_IN_EH_RETURN_DATA_REGNO:
6891 return expand_builtin_eh_return_data_regno (exp);
6893 case BUILT_IN_EXTEND_POINTER:
6894 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6896 case BUILT_IN_VA_START:
6897 return expand_builtin_va_start (exp);
6898 case BUILT_IN_VA_END:
6899 return expand_builtin_va_end (exp);
6900 case BUILT_IN_VA_COPY:
6901 return expand_builtin_va_copy (exp);
6902 case BUILT_IN_EXPECT:
6903 return expand_builtin_expect (exp, target);
6904 case BUILT_IN_PREFETCH:
6905 expand_builtin_prefetch (exp);
6908 case BUILT_IN_PROFILE_FUNC_ENTER:
6909 return expand_builtin_profile_func (false);
6910 case BUILT_IN_PROFILE_FUNC_EXIT:
6911 return expand_builtin_profile_func (true);
6913 case BUILT_IN_INIT_TRAMPOLINE:
6914 return expand_builtin_init_trampoline (exp);
6915 case BUILT_IN_ADJUST_TRAMPOLINE:
6916 return expand_builtin_adjust_trampoline (exp);
6919 case BUILT_IN_EXECL:
6920 case BUILT_IN_EXECV:
6921 case BUILT_IN_EXECLP:
6922 case BUILT_IN_EXECLE:
6923 case BUILT_IN_EXECVP:
6924 case BUILT_IN_EXECVE:
6925 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6930 case BUILT_IN_FETCH_AND_ADD_1:
6931 case BUILT_IN_FETCH_AND_ADD_2:
6932 case BUILT_IN_FETCH_AND_ADD_4:
6933 case BUILT_IN_FETCH_AND_ADD_8:
6934 case BUILT_IN_FETCH_AND_ADD_16:
6935 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6936 target = expand_builtin_sync_operation (mode, exp, PLUS,
6937 false, target, ignore);
6942 case BUILT_IN_FETCH_AND_SUB_1:
6943 case BUILT_IN_FETCH_AND_SUB_2:
6944 case BUILT_IN_FETCH_AND_SUB_4:
6945 case BUILT_IN_FETCH_AND_SUB_8:
6946 case BUILT_IN_FETCH_AND_SUB_16:
6947 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6948 target = expand_builtin_sync_operation (mode, exp, MINUS,
6949 false, target, ignore);
6954 case BUILT_IN_FETCH_AND_OR_1:
6955 case BUILT_IN_FETCH_AND_OR_2:
6956 case BUILT_IN_FETCH_AND_OR_4:
6957 case BUILT_IN_FETCH_AND_OR_8:
6958 case BUILT_IN_FETCH_AND_OR_16:
6959 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6960 target = expand_builtin_sync_operation (mode, exp, IOR,
6961 false, target, ignore);
6966 case BUILT_IN_FETCH_AND_AND_1:
6967 case BUILT_IN_FETCH_AND_AND_2:
6968 case BUILT_IN_FETCH_AND_AND_4:
6969 case BUILT_IN_FETCH_AND_AND_8:
6970 case BUILT_IN_FETCH_AND_AND_16:
6971 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6972 target = expand_builtin_sync_operation (mode, exp, AND,
6973 false, target, ignore);
6978 case BUILT_IN_FETCH_AND_XOR_1:
6979 case BUILT_IN_FETCH_AND_XOR_2:
6980 case BUILT_IN_FETCH_AND_XOR_4:
6981 case BUILT_IN_FETCH_AND_XOR_8:
6982 case BUILT_IN_FETCH_AND_XOR_16:
6983 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6984 target = expand_builtin_sync_operation (mode, exp, XOR,
6985 false, target, ignore);
6990 case BUILT_IN_FETCH_AND_NAND_1:
6991 case BUILT_IN_FETCH_AND_NAND_2:
6992 case BUILT_IN_FETCH_AND_NAND_4:
6993 case BUILT_IN_FETCH_AND_NAND_8:
6994 case BUILT_IN_FETCH_AND_NAND_16:
6995 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6996 target = expand_builtin_sync_operation (mode, exp, NOT,
6997 false, target, ignore);
7002 case BUILT_IN_ADD_AND_FETCH_1:
7003 case BUILT_IN_ADD_AND_FETCH_2:
7004 case BUILT_IN_ADD_AND_FETCH_4:
7005 case BUILT_IN_ADD_AND_FETCH_8:
7006 case BUILT_IN_ADD_AND_FETCH_16:
7007 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
7008 target = expand_builtin_sync_operation (mode, exp, PLUS,
7009 true, target, ignore);
7014 case BUILT_IN_SUB_AND_FETCH_1:
7015 case BUILT_IN_SUB_AND_FETCH_2:
7016 case BUILT_IN_SUB_AND_FETCH_4:
7017 case BUILT_IN_SUB_AND_FETCH_8:
7018 case BUILT_IN_SUB_AND_FETCH_16:
7019 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7020 target = expand_builtin_sync_operation (mode, exp, MINUS,
7021 true, target, ignore);
7026 case BUILT_IN_OR_AND_FETCH_1:
7027 case BUILT_IN_OR_AND_FETCH_2:
7028 case BUILT_IN_OR_AND_FETCH_4:
7029 case BUILT_IN_OR_AND_FETCH_8:
7030 case BUILT_IN_OR_AND_FETCH_16:
7031 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7032 target = expand_builtin_sync_operation (mode, exp, IOR,
7033 true, target, ignore);
7038 case BUILT_IN_AND_AND_FETCH_1:
7039 case BUILT_IN_AND_AND_FETCH_2:
7040 case BUILT_IN_AND_AND_FETCH_4:
7041 case BUILT_IN_AND_AND_FETCH_8:
7042 case BUILT_IN_AND_AND_FETCH_16:
7043 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7044 target = expand_builtin_sync_operation (mode, exp, AND,
7045 true, target, ignore);
7050 case BUILT_IN_XOR_AND_FETCH_1:
7051 case BUILT_IN_XOR_AND_FETCH_2:
7052 case BUILT_IN_XOR_AND_FETCH_4:
7053 case BUILT_IN_XOR_AND_FETCH_8:
7054 case BUILT_IN_XOR_AND_FETCH_16:
7055 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7056 target = expand_builtin_sync_operation (mode, exp, XOR,
7057 true, target, ignore);
7062 case BUILT_IN_NAND_AND_FETCH_1:
7063 case BUILT_IN_NAND_AND_FETCH_2:
7064 case BUILT_IN_NAND_AND_FETCH_4:
7065 case BUILT_IN_NAND_AND_FETCH_8:
7066 case BUILT_IN_NAND_AND_FETCH_16:
7067 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7068 target = expand_builtin_sync_operation (mode, exp, NOT,
7069 true, target, ignore);
7074 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7075 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7076 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7077 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7078 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7079 if (mode == VOIDmode)
7080 mode = TYPE_MODE (boolean_type_node);
7081 if (!target || !register_operand (target, mode))
7082 target = gen_reg_rtx (mode);
7084 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7085 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7090 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7091 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7092 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7093 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7094 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7095 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7096 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7101 case BUILT_IN_LOCK_TEST_AND_SET_1:
7102 case BUILT_IN_LOCK_TEST_AND_SET_2:
7103 case BUILT_IN_LOCK_TEST_AND_SET_4:
7104 case BUILT_IN_LOCK_TEST_AND_SET_8:
7105 case BUILT_IN_LOCK_TEST_AND_SET_16:
7106 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7107 target = expand_builtin_lock_test_and_set (mode, exp, target);
7112 case BUILT_IN_LOCK_RELEASE_1:
7113 case BUILT_IN_LOCK_RELEASE_2:
7114 case BUILT_IN_LOCK_RELEASE_4:
7115 case BUILT_IN_LOCK_RELEASE_8:
7116 case BUILT_IN_LOCK_RELEASE_16:
7117 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7118 expand_builtin_lock_release (mode, exp);
7121 case BUILT_IN_SYNCHRONIZE:
7122 expand_builtin_synchronize ();
7125 case BUILT_IN_OBJECT_SIZE:
7126 return expand_builtin_object_size (exp);
7128 case BUILT_IN_MEMCPY_CHK:
7129 case BUILT_IN_MEMPCPY_CHK:
7130 case BUILT_IN_MEMMOVE_CHK:
7131 case BUILT_IN_MEMSET_CHK:
7132 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7137 case BUILT_IN_STRCPY_CHK:
7138 case BUILT_IN_STPCPY_CHK:
7139 case BUILT_IN_STRNCPY_CHK:
7140 case BUILT_IN_STRCAT_CHK:
7141 case BUILT_IN_STRNCAT_CHK:
7142 case BUILT_IN_SNPRINTF_CHK:
7143 case BUILT_IN_VSNPRINTF_CHK:
7144 maybe_emit_chk_warning (exp, fcode);
7147 case BUILT_IN_SPRINTF_CHK:
7148 case BUILT_IN_VSPRINTF_CHK:
7149 maybe_emit_sprintf_chk_warning (exp, fcode);
7153 maybe_emit_free_warning (exp);
7156 default: /* just do library call, if unknown builtin */
7160 /* The switch statement above can drop through to cause the function
7161 to be called normally. */
7162 return expand_call (exp, target, ignore);
7165 /* Determine whether a tree node represents a call to a built-in
7166 function. If the tree T is a call to a built-in function with
7167 the right number of arguments of the appropriate types, return
7168 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7169 Otherwise the return value is END_BUILTINS. */
7171 enum built_in_function
7172 builtin_mathfn_code (const_tree t)
7174 const_tree fndecl, arg, parmlist;
7175 const_tree argtype, parmtype;
7176 const_call_expr_arg_iterator iter;
7178 if (TREE_CODE (t) != CALL_EXPR
7179 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7180 return END_BUILTINS;
7182 fndecl = get_callee_fndecl (t);
7183 if (fndecl == NULL_TREE
7184 || TREE_CODE (fndecl) != FUNCTION_DECL
7185 || ! DECL_BUILT_IN (fndecl)
7186 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7187 return END_BUILTINS;
7189 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7190 init_const_call_expr_arg_iterator (t, &iter);
7191 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7193 /* If a function doesn't take a variable number of arguments,
7194 the last element in the list will have type `void'. */
7195 parmtype = TREE_VALUE (parmlist);
7196 if (VOID_TYPE_P (parmtype))
7198 if (more_const_call_expr_args_p (&iter))
7199 return END_BUILTINS;
7200 return DECL_FUNCTION_CODE (fndecl);
7203 if (! more_const_call_expr_args_p (&iter))
7204 return END_BUILTINS;
7206 arg = next_const_call_expr_arg (&iter);
7207 argtype = TREE_TYPE (arg);
7209 if (SCALAR_FLOAT_TYPE_P (parmtype))
7211 if (! SCALAR_FLOAT_TYPE_P (argtype))
7212 return END_BUILTINS;
7214 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7216 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7217 return END_BUILTINS;
7219 else if (POINTER_TYPE_P (parmtype))
7221 if (! POINTER_TYPE_P (argtype))
7222 return END_BUILTINS;
7224 else if (INTEGRAL_TYPE_P (parmtype))
7226 if (! INTEGRAL_TYPE_P (argtype))
7227 return END_BUILTINS;
7230 return END_BUILTINS;
7233 /* Variable-length argument list. */
7234 return DECL_FUNCTION_CODE (fndecl);
7237 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7238 evaluate to a constant. */
7241 fold_builtin_constant_p (tree arg)
7243 /* We return 1 for a numeric type that's known to be a constant
7244 value at compile-time or for an aggregate type that's a
7245 literal constant. */
7248 /* If we know this is a constant, emit the constant of one. */
7249 if (CONSTANT_CLASS_P (arg)
7250 || (TREE_CODE (arg) == CONSTRUCTOR
7251 && TREE_CONSTANT (arg)))
7252 return integer_one_node;
7253 if (TREE_CODE (arg) == ADDR_EXPR)
7255 tree op = TREE_OPERAND (arg, 0);
7256 if (TREE_CODE (op) == STRING_CST
7257 || (TREE_CODE (op) == ARRAY_REF
7258 && integer_zerop (TREE_OPERAND (op, 1))
7259 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7260 return integer_one_node;
7263 /* If this expression has side effects, show we don't know it to be a
7264 constant. Likewise if it's a pointer or aggregate type since in
7265 those case we only want literals, since those are only optimized
7266 when generating RTL, not later.
7267 And finally, if we are compiling an initializer, not code, we
7268 need to return a definite result now; there's not going to be any
7269 more optimization done. */
7270 if (TREE_SIDE_EFFECTS (arg)
7271 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7272 || POINTER_TYPE_P (TREE_TYPE (arg))
7274 || folding_initializer)
7275 return integer_zero_node;
7280 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7281 return it as a truthvalue. */
7284 build_builtin_expect_predicate (tree pred, tree expected)
7286 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7288 fn = built_in_decls[BUILT_IN_EXPECT];
7289 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7290 ret_type = TREE_TYPE (TREE_TYPE (fn));
7291 pred_type = TREE_VALUE (arg_types);
7292 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7294 pred = fold_convert (pred_type, pred);
7295 expected = fold_convert (expected_type, expected);
7296 call_expr = build_call_expr (fn, 2, pred, expected);
7298 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7299 build_int_cst (ret_type, 0));
7302 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7303 NULL_TREE if no simplification is possible. */
7306 fold_builtin_expect (tree arg0, tree arg1)
7309 enum tree_code code;
7311 /* If this is a builtin_expect within a builtin_expect keep the
7312 inner one. See through a comparison against a constant. It
7313 might have been added to create a thruthvalue. */
7315 if (COMPARISON_CLASS_P (inner)
7316 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7317 inner = TREE_OPERAND (inner, 0);
7319 if (TREE_CODE (inner) == CALL_EXPR
7320 && (fndecl = get_callee_fndecl (inner))
7321 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7322 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7325 /* Distribute the expected value over short-circuiting operators.
7326 See through the cast from truthvalue_type_node to long. */
7328 while (TREE_CODE (inner) == NOP_EXPR
7329 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7330 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7331 inner = TREE_OPERAND (inner, 0);
7333 code = TREE_CODE (inner);
7334 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7336 tree op0 = TREE_OPERAND (inner, 0);
7337 tree op1 = TREE_OPERAND (inner, 1);
7339 op0 = build_builtin_expect_predicate (op0, arg1);
7340 op1 = build_builtin_expect_predicate (op1, arg1);
7341 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7343 return fold_convert (TREE_TYPE (arg0), inner);
7346 /* If the argument isn't invariant then there's nothing else we can do. */
7347 if (!TREE_CONSTANT (arg0))
7350 /* If we expect that a comparison against the argument will fold to
7351 a constant return the constant. In practice, this means a true
7352 constant or the address of a non-weak symbol. */
7355 if (TREE_CODE (inner) == ADDR_EXPR)
7359 inner = TREE_OPERAND (inner, 0);
7361 while (TREE_CODE (inner) == COMPONENT_REF
7362 || TREE_CODE (inner) == ARRAY_REF);
7363 if ((TREE_CODE (inner) == VAR_DECL
7364 || TREE_CODE (inner) == FUNCTION_DECL)
7365 && DECL_WEAK (inner))
7369 /* Otherwise, ARG0 already has the proper type for the return value. */
7373 /* Fold a call to __builtin_classify_type with argument ARG. */
7376 fold_builtin_classify_type (tree arg)
7379 return build_int_cst (NULL_TREE, no_type_class);
7381 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7384 /* Fold a call to __builtin_strlen with argument ARG. */
7387 fold_builtin_strlen (tree type, tree arg)
7389 if (!validate_arg (arg, POINTER_TYPE))
7393 tree len = c_strlen (arg, 0);
7396 return fold_convert (type, len);
7402 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7405 fold_builtin_inf (tree type, int warn)
7407 REAL_VALUE_TYPE real;
7409 /* __builtin_inff is intended to be usable to define INFINITY on all
7410 targets. If an infinity is not available, INFINITY expands "to a
7411 positive constant of type float that overflows at translation
7412 time", footnote "In this case, using INFINITY will violate the
7413 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7414 Thus we pedwarn to ensure this constraint violation is
7416 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7417 pedwarn (input_location, 0, "target format does not support infinity");
7420 return build_real (type, real);
7423 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7426 fold_builtin_nan (tree arg, tree type, int quiet)
7428 REAL_VALUE_TYPE real;
7431 if (!validate_arg (arg, POINTER_TYPE))
7433 str = c_getstr (arg);
7437 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7440 return build_real (type, real);
7443 /* Return true if the floating point expression T has an integer value.
7444 We also allow +Inf, -Inf and NaN to be considered integer values. */
7447 integer_valued_real_p (tree t)
7449 switch (TREE_CODE (t))
7456 return integer_valued_real_p (TREE_OPERAND (t, 0));
7461 return integer_valued_real_p (TREE_OPERAND (t, 1));
7468 return integer_valued_real_p (TREE_OPERAND (t, 0))
7469 && integer_valued_real_p (TREE_OPERAND (t, 1));
7472 return integer_valued_real_p (TREE_OPERAND (t, 1))
7473 && integer_valued_real_p (TREE_OPERAND (t, 2));
7476 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7480 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7481 if (TREE_CODE (type) == INTEGER_TYPE)
7483 if (TREE_CODE (type) == REAL_TYPE)
7484 return integer_valued_real_p (TREE_OPERAND (t, 0));
7489 switch (builtin_mathfn_code (t))
7491 CASE_FLT_FN (BUILT_IN_CEIL):
7492 CASE_FLT_FN (BUILT_IN_FLOOR):
7493 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7494 CASE_FLT_FN (BUILT_IN_RINT):
7495 CASE_FLT_FN (BUILT_IN_ROUND):
7496 CASE_FLT_FN (BUILT_IN_TRUNC):
7499 CASE_FLT_FN (BUILT_IN_FMIN):
7500 CASE_FLT_FN (BUILT_IN_FMAX):
7501 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7502 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7515 /* FNDECL is assumed to be a builtin where truncation can be propagated
7516 across (for instance floor((double)f) == (double)floorf (f).
7517 Do the transformation for a call with argument ARG. */
7520 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7522 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7524 if (!validate_arg (arg, REAL_TYPE))
7527 /* Integer rounding functions are idempotent. */
7528 if (fcode == builtin_mathfn_code (arg))
7531 /* If argument is already integer valued, and we don't need to worry
7532 about setting errno, there's no need to perform rounding. */
7533 if (! flag_errno_math && integer_valued_real_p (arg))
7538 tree arg0 = strip_float_extensions (arg);
7539 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7540 tree newtype = TREE_TYPE (arg0);
7543 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7544 && (decl = mathfn_built_in (newtype, fcode)))
7545 return fold_convert (ftype,
7546 build_call_expr (decl, 1,
7547 fold_convert (newtype, arg0)));
7552 /* FNDECL is assumed to be builtin which can narrow the FP type of
7553 the argument, for instance lround((double)f) -> lroundf (f).
7554 Do the transformation for a call with argument ARG. */
7557 fold_fixed_mathfn (tree fndecl, tree arg)
7559 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7561 if (!validate_arg (arg, REAL_TYPE))
7564 /* If argument is already integer valued, and we don't need to worry
7565 about setting errno, there's no need to perform rounding. */
7566 if (! flag_errno_math && integer_valued_real_p (arg))
7567 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7571 tree ftype = TREE_TYPE (arg);
7572 tree arg0 = strip_float_extensions (arg);
7573 tree newtype = TREE_TYPE (arg0);
7576 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7577 && (decl = mathfn_built_in (newtype, fcode)))
7578 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7581 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7582 sizeof (long long) == sizeof (long). */
7583 if (TYPE_PRECISION (long_long_integer_type_node)
7584 == TYPE_PRECISION (long_integer_type_node))
7586 tree newfn = NULL_TREE;
7589 CASE_FLT_FN (BUILT_IN_LLCEIL):
7590 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7593 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7594 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7597 CASE_FLT_FN (BUILT_IN_LLROUND):
7598 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7601 CASE_FLT_FN (BUILT_IN_LLRINT):
7602 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7611 tree newcall = build_call_expr(newfn, 1, arg);
7612 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7619 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7620 return type. Return NULL_TREE if no simplification can be made. */
7623 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7627 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7628 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7631 /* Calculate the result when the argument is a constant. */
7632 if (TREE_CODE (arg) == COMPLEX_CST
7633 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7637 if (TREE_CODE (arg) == COMPLEX_EXPR)
7639 tree real = TREE_OPERAND (arg, 0);
7640 tree imag = TREE_OPERAND (arg, 1);
7642 /* If either part is zero, cabs is fabs of the other. */
7643 if (real_zerop (real))
7644 return fold_build1 (ABS_EXPR, type, imag);
7645 if (real_zerop (imag))
7646 return fold_build1 (ABS_EXPR, type, real);
7648 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7649 if (flag_unsafe_math_optimizations
7650 && operand_equal_p (real, imag, OEP_PURE_SAME))
7652 const REAL_VALUE_TYPE sqrt2_trunc
7653 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7655 return fold_build2 (MULT_EXPR, type,
7656 fold_build1 (ABS_EXPR, type, real),
7657 build_real (type, sqrt2_trunc));
7661 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7662 if (TREE_CODE (arg) == NEGATE_EXPR
7663 || TREE_CODE (arg) == CONJ_EXPR)
7664 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7666 /* Don't do this when optimizing for size. */
7667 if (flag_unsafe_math_optimizations
7668 && optimize && optimize_function_for_speed_p (cfun))
7670 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7672 if (sqrtfn != NULL_TREE)
7674 tree rpart, ipart, result;
7676 arg = builtin_save_expr (arg);
7678 rpart = fold_build1 (REALPART_EXPR, type, arg);
7679 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7681 rpart = builtin_save_expr (rpart);
7682 ipart = builtin_save_expr (ipart);
7684 result = fold_build2 (PLUS_EXPR, type,
7685 fold_build2 (MULT_EXPR, type,
7687 fold_build2 (MULT_EXPR, type,
7690 return build_call_expr (sqrtfn, 1, result);
7697 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7698 Return NULL_TREE if no simplification can be made. */
7701 fold_builtin_sqrt (tree arg, tree type)
7704 enum built_in_function fcode;
7707 if (!validate_arg (arg, REAL_TYPE))
7710 /* Calculate the result when the argument is a constant. */
7711 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7714 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7715 fcode = builtin_mathfn_code (arg);
7716 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7718 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7719 arg = fold_build2 (MULT_EXPR, type,
7720 CALL_EXPR_ARG (arg, 0),
7721 build_real (type, dconsthalf));
7722 return build_call_expr (expfn, 1, arg);
7725 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7726 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7728 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7732 tree arg0 = CALL_EXPR_ARG (arg, 0);
7734 /* The inner root was either sqrt or cbrt. */
7735 /* This was a conditional expression but it triggered a bug
7737 REAL_VALUE_TYPE dconstroot;
7738 if (BUILTIN_SQRT_P (fcode))
7739 dconstroot = dconsthalf;
7741 dconstroot = dconst_third ();
7743 /* Adjust for the outer root. */
7744 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7745 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7746 tree_root = build_real (type, dconstroot);
7747 return build_call_expr (powfn, 2, arg0, tree_root);
7751 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7752 if (flag_unsafe_math_optimizations
7753 && (fcode == BUILT_IN_POW
7754 || fcode == BUILT_IN_POWF
7755 || fcode == BUILT_IN_POWL))
7757 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7758 tree arg0 = CALL_EXPR_ARG (arg, 0);
7759 tree arg1 = CALL_EXPR_ARG (arg, 1);
7761 if (!tree_expr_nonnegative_p (arg0))
7762 arg0 = build1 (ABS_EXPR, type, arg0);
7763 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7764 build_real (type, dconsthalf));
7765 return build_call_expr (powfn, 2, arg0, narg1);
7771 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7772 Return NULL_TREE if no simplification can be made. */
7775 fold_builtin_cbrt (tree arg, tree type)
7777 const enum built_in_function fcode = builtin_mathfn_code (arg);
7780 if (!validate_arg (arg, REAL_TYPE))
7783 /* Calculate the result when the argument is a constant. */
7784 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7787 if (flag_unsafe_math_optimizations)
7789 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7790 if (BUILTIN_EXPONENT_P (fcode))
7792 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7793 const REAL_VALUE_TYPE third_trunc =
7794 real_value_truncate (TYPE_MODE (type), dconst_third ());
7795 arg = fold_build2 (MULT_EXPR, type,
7796 CALL_EXPR_ARG (arg, 0),
7797 build_real (type, third_trunc));
7798 return build_call_expr (expfn, 1, arg);
7801 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7802 if (BUILTIN_SQRT_P (fcode))
7804 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7808 tree arg0 = CALL_EXPR_ARG (arg, 0);
7810 REAL_VALUE_TYPE dconstroot = dconst_third ();
7812 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7813 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7814 tree_root = build_real (type, dconstroot);
7815 return build_call_expr (powfn, 2, arg0, tree_root);
7819 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7820 if (BUILTIN_CBRT_P (fcode))
7822 tree arg0 = CALL_EXPR_ARG (arg, 0);
7823 if (tree_expr_nonnegative_p (arg0))
7825 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7830 REAL_VALUE_TYPE dconstroot;
7832 real_arithmetic (&dconstroot, MULT_EXPR,
7833 dconst_third_ptr (), dconst_third_ptr ());
7834 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7835 tree_root = build_real (type, dconstroot);
7836 return build_call_expr (powfn, 2, arg0, tree_root);
7841 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7842 if (fcode == BUILT_IN_POW
7843 || fcode == BUILT_IN_POWF
7844 || fcode == BUILT_IN_POWL)
7846 tree arg00 = CALL_EXPR_ARG (arg, 0);
7847 tree arg01 = CALL_EXPR_ARG (arg, 1);
7848 if (tree_expr_nonnegative_p (arg00))
7850 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7851 const REAL_VALUE_TYPE dconstroot
7852 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7853 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7854 build_real (type, dconstroot));
7855 return build_call_expr (powfn, 2, arg00, narg01);
7862 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7863 TYPE is the type of the return value. Return NULL_TREE if no
7864 simplification can be made. */
7867 fold_builtin_cos (tree arg, tree type, tree fndecl)
7871 if (!validate_arg (arg, REAL_TYPE))
7874 /* Calculate the result when the argument is a constant. */
7875 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7878 /* Optimize cos(-x) into cos (x). */
7879 if ((narg = fold_strip_sign_ops (arg)))
7880 return build_call_expr (fndecl, 1, narg);
7885 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7886 Return NULL_TREE if no simplification can be made. */
7889 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7891 if (validate_arg (arg, REAL_TYPE))
7895 /* Calculate the result when the argument is a constant. */
7896 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7899 /* Optimize cosh(-x) into cosh (x). */
7900 if ((narg = fold_strip_sign_ops (arg)))
7901 return build_call_expr (fndecl, 1, narg);
7907 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7908 Return NULL_TREE if no simplification can be made. */
7911 fold_builtin_tan (tree arg, tree type)
7913 enum built_in_function fcode;
7916 if (!validate_arg (arg, REAL_TYPE))
7919 /* Calculate the result when the argument is a constant. */
7920 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7923 /* Optimize tan(atan(x)) = x. */
7924 fcode = builtin_mathfn_code (arg);
7925 if (flag_unsafe_math_optimizations
7926 && (fcode == BUILT_IN_ATAN
7927 || fcode == BUILT_IN_ATANF
7928 || fcode == BUILT_IN_ATANL))
7929 return CALL_EXPR_ARG (arg, 0);
7934 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7935 NULL_TREE if no simplification can be made. */
7938 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7943 if (!validate_arg (arg0, REAL_TYPE)
7944 || !validate_arg (arg1, POINTER_TYPE)
7945 || !validate_arg (arg2, POINTER_TYPE))
7948 type = TREE_TYPE (arg0);
7950 /* Calculate the result when the argument is a constant. */
7951 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7954 /* Canonicalize sincos to cexpi. */
7955 if (!TARGET_C99_FUNCTIONS)
7957 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7961 call = build_call_expr (fn, 1, arg0);
7962 call = builtin_save_expr (call);
7964 return build2 (COMPOUND_EXPR, type,
7965 build2 (MODIFY_EXPR, void_type_node,
7966 build_fold_indirect_ref (arg1),
7967 build1 (IMAGPART_EXPR, type, call)),
7968 build2 (MODIFY_EXPR, void_type_node,
7969 build_fold_indirect_ref (arg2),
7970 build1 (REALPART_EXPR, type, call)));
7973 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7974 NULL_TREE if no simplification can be made. */
7977 fold_builtin_cexp (tree arg0, tree type)
7980 tree realp, imagp, ifn;
7982 if (!validate_arg (arg0, COMPLEX_TYPE))
7985 rtype = TREE_TYPE (TREE_TYPE (arg0));
7987 /* In case we can figure out the real part of arg0 and it is constant zero
7989 if (!TARGET_C99_FUNCTIONS)
7991 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7995 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7996 && real_zerop (realp))
7998 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7999 return build_call_expr (ifn, 1, narg);
8002 /* In case we can easily decompose real and imaginary parts split cexp
8003 to exp (r) * cexpi (i). */
8004 if (flag_unsafe_math_optimizations
8007 tree rfn, rcall, icall;
8009 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8013 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
8017 icall = build_call_expr (ifn, 1, imagp);
8018 icall = builtin_save_expr (icall);
8019 rcall = build_call_expr (rfn, 1, realp);
8020 rcall = builtin_save_expr (rcall);
8021 return fold_build2 (COMPLEX_EXPR, type,
8022 fold_build2 (MULT_EXPR, rtype,
8024 fold_build1 (REALPART_EXPR, rtype, icall)),
8025 fold_build2 (MULT_EXPR, rtype,
8027 fold_build1 (IMAGPART_EXPR, rtype, icall)));
8033 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8034 Return NULL_TREE if no simplification can be made. */
8037 fold_builtin_trunc (tree fndecl, tree arg)
8039 if (!validate_arg (arg, REAL_TYPE))
8042 /* Optimize trunc of constant value. */
8043 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8045 REAL_VALUE_TYPE r, x;
8046 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8048 x = TREE_REAL_CST (arg);
8049 real_trunc (&r, TYPE_MODE (type), &x);
8050 return build_real (type, r);
8053 return fold_trunc_transparent_mathfn (fndecl, arg);
8056 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8057 Return NULL_TREE if no simplification can be made. */
8060 fold_builtin_floor (tree fndecl, tree arg)
8062 if (!validate_arg (arg, REAL_TYPE))
8065 /* Optimize floor of constant value. */
8066 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8070 x = TREE_REAL_CST (arg);
8071 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8073 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8076 real_floor (&r, TYPE_MODE (type), &x);
8077 return build_real (type, r);
8081 /* Fold floor (x) where x is nonnegative to trunc (x). */
8082 if (tree_expr_nonnegative_p (arg))
8084 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8086 return build_call_expr (truncfn, 1, arg);
8089 return fold_trunc_transparent_mathfn (fndecl, arg);
8092 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8093 Return NULL_TREE if no simplification can be made. */
8096 fold_builtin_ceil (tree fndecl, tree arg)
8098 if (!validate_arg (arg, REAL_TYPE))
8101 /* Optimize ceil of constant value. */
8102 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8106 x = TREE_REAL_CST (arg);
8107 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8109 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8112 real_ceil (&r, TYPE_MODE (type), &x);
8113 return build_real (type, r);
8117 return fold_trunc_transparent_mathfn (fndecl, arg);
8120 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8121 Return NULL_TREE if no simplification can be made. */
8124 fold_builtin_round (tree fndecl, tree arg)
8126 if (!validate_arg (arg, REAL_TYPE))
8129 /* Optimize round of constant value. */
8130 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8134 x = TREE_REAL_CST (arg);
8135 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8137 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8140 real_round (&r, TYPE_MODE (type), &x);
8141 return build_real (type, r);
8145 return fold_trunc_transparent_mathfn (fndecl, arg);
8148 /* Fold function call to builtin lround, lroundf or lroundl (or the
8149 corresponding long long versions) and other rounding functions. ARG
8150 is the argument to the call. Return NULL_TREE if no simplification
8154 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8156 if (!validate_arg (arg, REAL_TYPE))
8159 /* Optimize lround of constant value. */
8160 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8162 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8164 if (real_isfinite (&x))
8166 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8167 tree ftype = TREE_TYPE (arg);
8168 unsigned HOST_WIDE_INT lo2;
8169 HOST_WIDE_INT hi, lo;
8172 switch (DECL_FUNCTION_CODE (fndecl))
8174 CASE_FLT_FN (BUILT_IN_LFLOOR):
8175 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8176 real_floor (&r, TYPE_MODE (ftype), &x);
8179 CASE_FLT_FN (BUILT_IN_LCEIL):
8180 CASE_FLT_FN (BUILT_IN_LLCEIL):
8181 real_ceil (&r, TYPE_MODE (ftype), &x);
8184 CASE_FLT_FN (BUILT_IN_LROUND):
8185 CASE_FLT_FN (BUILT_IN_LLROUND):
8186 real_round (&r, TYPE_MODE (ftype), &x);
8193 REAL_VALUE_TO_INT (&lo, &hi, r);
8194 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8195 return build_int_cst_wide (itype, lo2, hi);
8199 switch (DECL_FUNCTION_CODE (fndecl))
8201 CASE_FLT_FN (BUILT_IN_LFLOOR):
8202 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8203 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8204 if (tree_expr_nonnegative_p (arg))
8205 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8211 return fold_fixed_mathfn (fndecl, arg);
8214 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8215 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8216 the argument to the call. Return NULL_TREE if no simplification can
8220 fold_builtin_bitop (tree fndecl, tree arg)
8222 if (!validate_arg (arg, INTEGER_TYPE))
8225 /* Optimize for constant argument. */
8226 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8228 HOST_WIDE_INT hi, width, result;
8229 unsigned HOST_WIDE_INT lo;
8232 type = TREE_TYPE (arg);
8233 width = TYPE_PRECISION (type);
8234 lo = TREE_INT_CST_LOW (arg);
8236 /* Clear all the bits that are beyond the type's precision. */
8237 if (width > HOST_BITS_PER_WIDE_INT)
8239 hi = TREE_INT_CST_HIGH (arg);
8240 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8241 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8246 if (width < HOST_BITS_PER_WIDE_INT)
8247 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8250 switch (DECL_FUNCTION_CODE (fndecl))
8252 CASE_INT_FN (BUILT_IN_FFS):
8254 result = exact_log2 (lo & -lo) + 1;
8256 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8261 CASE_INT_FN (BUILT_IN_CLZ):
8263 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8265 result = width - floor_log2 (lo) - 1;
8266 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8270 CASE_INT_FN (BUILT_IN_CTZ):
8272 result = exact_log2 (lo & -lo);
8274 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8275 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8279 CASE_INT_FN (BUILT_IN_POPCOUNT):
8282 result++, lo &= lo - 1;
8284 result++, hi &= hi - 1;
8287 CASE_INT_FN (BUILT_IN_PARITY):
8290 result++, lo &= lo - 1;
8292 result++, hi &= hi - 1;
8300 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8306 /* Fold function call to builtin_bswap and the long and long long
8307 variants. Return NULL_TREE if no simplification can be made. */
8309 fold_builtin_bswap (tree fndecl, tree arg)
8311 if (! validate_arg (arg, INTEGER_TYPE))
8314 /* Optimize constant value. */
8315 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8317 HOST_WIDE_INT hi, width, r_hi = 0;
8318 unsigned HOST_WIDE_INT lo, r_lo = 0;
8321 type = TREE_TYPE (arg);
8322 width = TYPE_PRECISION (type);
8323 lo = TREE_INT_CST_LOW (arg);
8324 hi = TREE_INT_CST_HIGH (arg);
8326 switch (DECL_FUNCTION_CODE (fndecl))
8328 case BUILT_IN_BSWAP32:
8329 case BUILT_IN_BSWAP64:
8333 for (s = 0; s < width; s += 8)
8335 int d = width - s - 8;
8336 unsigned HOST_WIDE_INT byte;
8338 if (s < HOST_BITS_PER_WIDE_INT)
8339 byte = (lo >> s) & 0xff;
8341 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8343 if (d < HOST_BITS_PER_WIDE_INT)
8346 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8356 if (width < HOST_BITS_PER_WIDE_INT)
8357 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8359 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8365 /* Return true if EXPR is the real constant contained in VALUE. */
8368 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8372 return ((TREE_CODE (expr) == REAL_CST
8373 && !TREE_OVERFLOW (expr)
8374 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8375 || (TREE_CODE (expr) == COMPLEX_CST
8376 && real_dconstp (TREE_REALPART (expr), value)
8377 && real_zerop (TREE_IMAGPART (expr))));
8380 /* A subroutine of fold_builtin to fold the various logarithmic
8381 functions. Return NULL_TREE if no simplification can me made.
8382 FUNC is the corresponding MPFR logarithm function. */
8385 fold_builtin_logarithm (tree fndecl, tree arg,
8386 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8388 if (validate_arg (arg, REAL_TYPE))
8390 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8392 const enum built_in_function fcode = builtin_mathfn_code (arg);
8394 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8395 instead we'll look for 'e' truncated to MODE. So only do
8396 this if flag_unsafe_math_optimizations is set. */
8397 if (flag_unsafe_math_optimizations && func == mpfr_log)
8399 const REAL_VALUE_TYPE e_truncated =
8400 real_value_truncate (TYPE_MODE (type), dconst_e ());
8401 if (real_dconstp (arg, &e_truncated))
8402 return build_real (type, dconst1);
8405 /* Calculate the result when the argument is a constant. */
8406 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8409 /* Special case, optimize logN(expN(x)) = x. */
8410 if (flag_unsafe_math_optimizations
8411 && ((func == mpfr_log
8412 && (fcode == BUILT_IN_EXP
8413 || fcode == BUILT_IN_EXPF
8414 || fcode == BUILT_IN_EXPL))
8415 || (func == mpfr_log2
8416 && (fcode == BUILT_IN_EXP2
8417 || fcode == BUILT_IN_EXP2F
8418 || fcode == BUILT_IN_EXP2L))
8419 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8420 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8422 /* Optimize logN(func()) for various exponential functions. We
8423 want to determine the value "x" and the power "exponent" in
8424 order to transform logN(x**exponent) into exponent*logN(x). */
8425 if (flag_unsafe_math_optimizations)
8427 tree exponent = 0, x = 0;
8431 CASE_FLT_FN (BUILT_IN_EXP):
8432 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8433 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8435 exponent = CALL_EXPR_ARG (arg, 0);
8437 CASE_FLT_FN (BUILT_IN_EXP2):
8438 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8439 x = build_real (type, dconst2);
8440 exponent = CALL_EXPR_ARG (arg, 0);
8442 CASE_FLT_FN (BUILT_IN_EXP10):
8443 CASE_FLT_FN (BUILT_IN_POW10):
8444 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8446 REAL_VALUE_TYPE dconst10;
8447 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8448 x = build_real (type, dconst10);
8450 exponent = CALL_EXPR_ARG (arg, 0);
8452 CASE_FLT_FN (BUILT_IN_SQRT):
8453 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8454 x = CALL_EXPR_ARG (arg, 0);
8455 exponent = build_real (type, dconsthalf);
8457 CASE_FLT_FN (BUILT_IN_CBRT):
8458 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8459 x = CALL_EXPR_ARG (arg, 0);
8460 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8463 CASE_FLT_FN (BUILT_IN_POW):
8464 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8465 x = CALL_EXPR_ARG (arg, 0);
8466 exponent = CALL_EXPR_ARG (arg, 1);
8472 /* Now perform the optimization. */
8475 tree logfn = build_call_expr (fndecl, 1, x);
8476 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8484 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8485 NULL_TREE if no simplification can be made. */
8488 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8490 tree res, narg0, narg1;
8492 if (!validate_arg (arg0, REAL_TYPE)
8493 || !validate_arg (arg1, REAL_TYPE))
8496 /* Calculate the result when the argument is a constant. */
8497 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8500 /* If either argument to hypot has a negate or abs, strip that off.
8501 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8502 narg0 = fold_strip_sign_ops (arg0);
8503 narg1 = fold_strip_sign_ops (arg1);
8506 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8507 narg1 ? narg1 : arg1);
8510 /* If either argument is zero, hypot is fabs of the other. */
8511 if (real_zerop (arg0))
8512 return fold_build1 (ABS_EXPR, type, arg1);
8513 else if (real_zerop (arg1))
8514 return fold_build1 (ABS_EXPR, type, arg0);
8516 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8517 if (flag_unsafe_math_optimizations
8518 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8520 const REAL_VALUE_TYPE sqrt2_trunc
8521 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8522 return fold_build2 (MULT_EXPR, type,
8523 fold_build1 (ABS_EXPR, type, arg0),
8524 build_real (type, sqrt2_trunc));
8531 /* Fold a builtin function call to pow, powf, or powl. Return
8532 NULL_TREE if no simplification can be made. */
8534 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8538 if (!validate_arg (arg0, REAL_TYPE)
8539 || !validate_arg (arg1, REAL_TYPE))
8542 /* Calculate the result when the argument is a constant. */
8543 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8546 /* Optimize pow(1.0,y) = 1.0. */
8547 if (real_onep (arg0))
8548 return omit_one_operand (type, build_real (type, dconst1), arg1);
8550 if (TREE_CODE (arg1) == REAL_CST
8551 && !TREE_OVERFLOW (arg1))
8553 REAL_VALUE_TYPE cint;
8557 c = TREE_REAL_CST (arg1);
8559 /* Optimize pow(x,0.0) = 1.0. */
8560 if (REAL_VALUES_EQUAL (c, dconst0))
8561 return omit_one_operand (type, build_real (type, dconst1),
8564 /* Optimize pow(x,1.0) = x. */
8565 if (REAL_VALUES_EQUAL (c, dconst1))
8568 /* Optimize pow(x,-1.0) = 1.0/x. */
8569 if (REAL_VALUES_EQUAL (c, dconstm1))
8570 return fold_build2 (RDIV_EXPR, type,
8571 build_real (type, dconst1), arg0);
8573 /* Optimize pow(x,0.5) = sqrt(x). */
8574 if (flag_unsafe_math_optimizations
8575 && REAL_VALUES_EQUAL (c, dconsthalf))
8577 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8579 if (sqrtfn != NULL_TREE)
8580 return build_call_expr (sqrtfn, 1, arg0);
8583 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8584 if (flag_unsafe_math_optimizations)
8586 const REAL_VALUE_TYPE dconstroot
8587 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8589 if (REAL_VALUES_EQUAL (c, dconstroot))
8591 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8592 if (cbrtfn != NULL_TREE)
8593 return build_call_expr (cbrtfn, 1, arg0);
8597 /* Check for an integer exponent. */
8598 n = real_to_integer (&c);
8599 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8600 if (real_identical (&c, &cint))
8602 /* Attempt to evaluate pow at compile-time, unless this should
8603 raise an exception. */
8604 if (TREE_CODE (arg0) == REAL_CST
8605 && !TREE_OVERFLOW (arg0)
8607 || (!flag_trapping_math && !flag_errno_math)
8608 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8613 x = TREE_REAL_CST (arg0);
8614 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8615 if (flag_unsafe_math_optimizations || !inexact)
8616 return build_real (type, x);
8619 /* Strip sign ops from even integer powers. */
8620 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8622 tree narg0 = fold_strip_sign_ops (arg0);
8624 return build_call_expr (fndecl, 2, narg0, arg1);
8629 if (flag_unsafe_math_optimizations)
8631 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8633 /* Optimize pow(expN(x),y) = expN(x*y). */
8634 if (BUILTIN_EXPONENT_P (fcode))
8636 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8637 tree arg = CALL_EXPR_ARG (arg0, 0);
8638 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8639 return build_call_expr (expfn, 1, arg);
8642 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8643 if (BUILTIN_SQRT_P (fcode))
8645 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8646 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8647 build_real (type, dconsthalf));
8648 return build_call_expr (fndecl, 2, narg0, narg1);
8651 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8652 if (BUILTIN_CBRT_P (fcode))
8654 tree arg = CALL_EXPR_ARG (arg0, 0);
8655 if (tree_expr_nonnegative_p (arg))
8657 const REAL_VALUE_TYPE dconstroot
8658 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8659 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8660 build_real (type, dconstroot));
8661 return build_call_expr (fndecl, 2, arg, narg1);
8665 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8666 if (fcode == BUILT_IN_POW
8667 || fcode == BUILT_IN_POWF
8668 || fcode == BUILT_IN_POWL)
8670 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8671 if (tree_expr_nonnegative_p (arg00))
8673 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8674 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8675 return build_call_expr (fndecl, 2, arg00, narg1);
8683 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8684 Return NULL_TREE if no simplification can be made. */
8686 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8687 tree arg0, tree arg1, tree type)
8689 if (!validate_arg (arg0, REAL_TYPE)
8690 || !validate_arg (arg1, INTEGER_TYPE))
8693 /* Optimize pow(1.0,y) = 1.0. */
8694 if (real_onep (arg0))
8695 return omit_one_operand (type, build_real (type, dconst1), arg1);
8697 if (host_integerp (arg1, 0))
8699 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8701 /* Evaluate powi at compile-time. */
8702 if (TREE_CODE (arg0) == REAL_CST
8703 && !TREE_OVERFLOW (arg0))
8706 x = TREE_REAL_CST (arg0);
8707 real_powi (&x, TYPE_MODE (type), &x, c);
8708 return build_real (type, x);
8711 /* Optimize pow(x,0) = 1.0. */
8713 return omit_one_operand (type, build_real (type, dconst1),
8716 /* Optimize pow(x,1) = x. */
8720 /* Optimize pow(x,-1) = 1.0/x. */
8722 return fold_build2 (RDIV_EXPR, type,
8723 build_real (type, dconst1), arg0);
8729 /* A subroutine of fold_builtin to fold the various exponent
8730 functions. Return NULL_TREE if no simplification can be made.
8731 FUNC is the corresponding MPFR exponent function. */
8734 fold_builtin_exponent (tree fndecl, tree arg,
8735 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8737 if (validate_arg (arg, REAL_TYPE))
8739 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8742 /* Calculate the result when the argument is a constant. */
8743 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8746 /* Optimize expN(logN(x)) = x. */
8747 if (flag_unsafe_math_optimizations)
8749 const enum built_in_function fcode = builtin_mathfn_code (arg);
8751 if ((func == mpfr_exp
8752 && (fcode == BUILT_IN_LOG
8753 || fcode == BUILT_IN_LOGF
8754 || fcode == BUILT_IN_LOGL))
8755 || (func == mpfr_exp2
8756 && (fcode == BUILT_IN_LOG2
8757 || fcode == BUILT_IN_LOG2F
8758 || fcode == BUILT_IN_LOG2L))
8759 || (func == mpfr_exp10
8760 && (fcode == BUILT_IN_LOG10
8761 || fcode == BUILT_IN_LOG10F
8762 || fcode == BUILT_IN_LOG10L)))
8763 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8770 /* Return true if VAR is a VAR_DECL or a component thereof. */
8773 var_decl_component_p (tree var)
8776 while (handled_component_p (inner))
8777 inner = TREE_OPERAND (inner, 0);
8778 return SSA_VAR_P (inner);
8781 /* Fold function call to builtin memset. Return
8782 NULL_TREE if no simplification can be made. */
8785 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8788 unsigned HOST_WIDE_INT length, cval;
8790 if (! validate_arg (dest, POINTER_TYPE)
8791 || ! validate_arg (c, INTEGER_TYPE)
8792 || ! validate_arg (len, INTEGER_TYPE))
8795 if (! host_integerp (len, 1))
8798 /* If the LEN parameter is zero, return DEST. */
8799 if (integer_zerop (len))
8800 return omit_one_operand (type, dest, c);
8802 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8807 if (TREE_CODE (var) != ADDR_EXPR)
8810 var = TREE_OPERAND (var, 0);
8811 if (TREE_THIS_VOLATILE (var))
8814 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8815 && !POINTER_TYPE_P (TREE_TYPE (var)))
8818 if (! var_decl_component_p (var))
8821 length = tree_low_cst (len, 1);
8822 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8823 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8827 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8830 if (integer_zerop (c))
8834 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8837 cval = tree_low_cst (c, 1);
8841 cval |= (cval << 31) << 1;
8844 ret = build_int_cst_type (TREE_TYPE (var), cval);
8845 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8849 return omit_one_operand (type, dest, ret);
8852 /* Fold function call to builtin memset. Return
8853 NULL_TREE if no simplification can be made. */
8856 fold_builtin_bzero (tree dest, tree size, bool ignore)
8858 if (! validate_arg (dest, POINTER_TYPE)
8859 || ! validate_arg (size, INTEGER_TYPE))
8865 /* New argument list transforming bzero(ptr x, int y) to
8866 memset(ptr x, int 0, size_t y). This is done this way
8867 so that if it isn't expanded inline, we fallback to
8868 calling bzero instead of memset. */
8870 return fold_builtin_memset (dest, integer_zero_node,
8871 fold_convert (sizetype, size),
8872 void_type_node, ignore);
8875 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8876 NULL_TREE if no simplification can be made.
8877 If ENDP is 0, return DEST (like memcpy).
8878 If ENDP is 1, return DEST+LEN (like mempcpy).
8879 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8880 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8884 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8886 tree destvar, srcvar, expr;
8888 if (! validate_arg (dest, POINTER_TYPE)
8889 || ! validate_arg (src, POINTER_TYPE)
8890 || ! validate_arg (len, INTEGER_TYPE))
8893 /* If the LEN parameter is zero, return DEST. */
8894 if (integer_zerop (len))
8895 return omit_one_operand (type, dest, src);
8897 /* If SRC and DEST are the same (and not volatile), return
8898 DEST{,+LEN,+LEN-1}. */
8899 if (operand_equal_p (src, dest, 0))
8903 tree srctype, desttype;
8904 int src_align, dest_align;
8908 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8909 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8911 /* Both DEST and SRC must be pointer types.
8912 ??? This is what old code did. Is the testing for pointer types
8915 If either SRC is readonly or length is 1, we can use memcpy. */
8916 if (dest_align && src_align
8917 && (readonly_data_expr (src)
8918 || (host_integerp (len, 1)
8919 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8920 tree_low_cst (len, 1)))))
8922 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8925 return build_call_expr (fn, 3, dest, src, len);
8930 if (!host_integerp (len, 0))
8933 This logic lose for arguments like (type *)malloc (sizeof (type)),
8934 since we strip the casts of up to VOID return value from malloc.
8935 Perhaps we ought to inherit type from non-VOID argument here? */
8938 srctype = TREE_TYPE (TREE_TYPE (src));
8939 desttype = TREE_TYPE (TREE_TYPE (dest));
8940 if (!srctype || !desttype
8941 || !TYPE_SIZE_UNIT (srctype)
8942 || !TYPE_SIZE_UNIT (desttype)
8943 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8944 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8945 || TYPE_VOLATILE (srctype)
8946 || TYPE_VOLATILE (desttype))
8949 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8950 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8951 if (dest_align < (int) TYPE_ALIGN (desttype)
8952 || src_align < (int) TYPE_ALIGN (srctype))
8956 dest = builtin_save_expr (dest);
8959 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8961 srcvar = build_fold_indirect_ref (src);
8962 if (TREE_THIS_VOLATILE (srcvar))
8964 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8966 /* With memcpy, it is possible to bypass aliasing rules, so without
8967 this check i.e. execute/20060930-2.c would be misoptimized,
8968 because it use conflicting alias set to hold argument for the
8969 memcpy call. This check is probably unnecessary with
8970 -fno-strict-aliasing. Similarly for destvar. See also
8972 else if (!var_decl_component_p (srcvar))
8976 destvar = NULL_TREE;
8977 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8979 destvar = build_fold_indirect_ref (dest);
8980 if (TREE_THIS_VOLATILE (destvar))
8982 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8983 destvar = NULL_TREE;
8984 else if (!var_decl_component_p (destvar))
8985 destvar = NULL_TREE;
8988 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8991 if (srcvar == NULL_TREE)
8994 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8997 srctype = build_qualified_type (desttype, 0);
8998 if (src_align < (int) TYPE_ALIGN (srctype))
9000 if (AGGREGATE_TYPE_P (srctype)
9001 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9004 srctype = build_variant_type_copy (srctype);
9005 TYPE_ALIGN (srctype) = src_align;
9006 TYPE_USER_ALIGN (srctype) = 1;
9007 TYPE_PACKED (srctype) = 1;
9009 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9010 src = fold_convert (srcptype, src);
9011 srcvar = build_fold_indirect_ref (src);
9013 else if (destvar == NULL_TREE)
9016 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9019 desttype = build_qualified_type (srctype, 0);
9020 if (dest_align < (int) TYPE_ALIGN (desttype))
9022 if (AGGREGATE_TYPE_P (desttype)
9023 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9026 desttype = build_variant_type_copy (desttype);
9027 TYPE_ALIGN (desttype) = dest_align;
9028 TYPE_USER_ALIGN (desttype) = 1;
9029 TYPE_PACKED (desttype) = 1;
9031 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9032 dest = fold_convert (destptype, dest);
9033 destvar = build_fold_indirect_ref (dest);
9036 if (srctype == desttype
9037 || (gimple_in_ssa_p (cfun)
9038 && useless_type_conversion_p (desttype, srctype)))
9040 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9041 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9042 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9043 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9044 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9046 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9047 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9053 if (endp == 0 || endp == 3)
9054 return omit_one_operand (type, dest, expr);
9060 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9063 len = fold_convert (sizetype, len);
9064 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9065 dest = fold_convert (type, dest);
9067 dest = omit_one_operand (type, dest, expr);
9071 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9072 If LEN is not NULL, it represents the length of the string to be
9073 copied. Return NULL_TREE if no simplification can be made. */
9076 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9080 if (!validate_arg (dest, POINTER_TYPE)
9081 || !validate_arg (src, POINTER_TYPE))
9084 /* If SRC and DEST are the same (and not volatile), return DEST. */
9085 if (operand_equal_p (src, dest, 0))
9086 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9088 if (optimize_function_for_size_p (cfun))
9091 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9097 len = c_strlen (src, 1);
9098 if (! len || TREE_SIDE_EFFECTS (len))
9102 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9103 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9104 build_call_expr (fn, 3, dest, src, len));
9107 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9108 If SLEN is not NULL, it represents the length of the source string.
9109 Return NULL_TREE if no simplification can be made. */
9112 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9116 if (!validate_arg (dest, POINTER_TYPE)
9117 || !validate_arg (src, POINTER_TYPE)
9118 || !validate_arg (len, INTEGER_TYPE))
9121 /* If the LEN parameter is zero, return DEST. */
9122 if (integer_zerop (len))
9123 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9125 /* We can't compare slen with len as constants below if len is not a
9127 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9131 slen = c_strlen (src, 1);
9133 /* Now, we must be passed a constant src ptr parameter. */
9134 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9137 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9139 /* We do not support simplification of this case, though we do
9140 support it when expanding trees into RTL. */
9141 /* FIXME: generate a call to __builtin_memset. */
9142 if (tree_int_cst_lt (slen, len))
9145 /* OK transform into builtin memcpy. */
9146 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9149 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9150 build_call_expr (fn, 3, dest, src, len));
9153 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9154 arguments to the call, and TYPE is its return type.
9155 Return NULL_TREE if no simplification can be made. */
9158 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9160 if (!validate_arg (arg1, POINTER_TYPE)
9161 || !validate_arg (arg2, INTEGER_TYPE)
9162 || !validate_arg (len, INTEGER_TYPE))
9168 if (TREE_CODE (arg2) != INTEGER_CST
9169 || !host_integerp (len, 1))
9172 p1 = c_getstr (arg1);
9173 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9179 if (target_char_cast (arg2, &c))
9182 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9185 return build_int_cst (TREE_TYPE (arg1), 0);
9187 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9189 return fold_convert (type, tem);
9195 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9196 Return NULL_TREE if no simplification can be made. */
9199 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9201 const char *p1, *p2;
9203 if (!validate_arg (arg1, POINTER_TYPE)
9204 || !validate_arg (arg2, POINTER_TYPE)
9205 || !validate_arg (len, INTEGER_TYPE))
9208 /* If the LEN parameter is zero, return zero. */
9209 if (integer_zerop (len))
9210 return omit_two_operands (integer_type_node, integer_zero_node,
9213 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9214 if (operand_equal_p (arg1, arg2, 0))
9215 return omit_one_operand (integer_type_node, integer_zero_node, len);
9217 p1 = c_getstr (arg1);
9218 p2 = c_getstr (arg2);
9220 /* If all arguments are constant, and the value of len is not greater
9221 than the lengths of arg1 and arg2, evaluate at compile-time. */
9222 if (host_integerp (len, 1) && p1 && p2
9223 && compare_tree_int (len, strlen (p1) + 1) <= 0
9224 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9226 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9229 return integer_one_node;
9231 return integer_minus_one_node;
9233 return integer_zero_node;
9236 /* If len parameter is one, return an expression corresponding to
9237 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9238 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9240 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9241 tree cst_uchar_ptr_node
9242 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9244 tree ind1 = fold_convert (integer_type_node,
9245 build1 (INDIRECT_REF, cst_uchar_node,
9246 fold_convert (cst_uchar_ptr_node,
9248 tree ind2 = fold_convert (integer_type_node,
9249 build1 (INDIRECT_REF, cst_uchar_node,
9250 fold_convert (cst_uchar_ptr_node,
9252 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9258 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9259 Return NULL_TREE if no simplification can be made. */
9262 fold_builtin_strcmp (tree arg1, tree arg2)
9264 const char *p1, *p2;
9266 if (!validate_arg (arg1, POINTER_TYPE)
9267 || !validate_arg (arg2, POINTER_TYPE))
9270 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9271 if (operand_equal_p (arg1, arg2, 0))
9272 return integer_zero_node;
9274 p1 = c_getstr (arg1);
9275 p2 = c_getstr (arg2);
9279 const int i = strcmp (p1, p2);
9281 return integer_minus_one_node;
9283 return integer_one_node;
9285 return integer_zero_node;
9288 /* If the second arg is "", return *(const unsigned char*)arg1. */
9289 if (p2 && *p2 == '\0')
9291 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9292 tree cst_uchar_ptr_node
9293 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9295 return fold_convert (integer_type_node,
9296 build1 (INDIRECT_REF, cst_uchar_node,
9297 fold_convert (cst_uchar_ptr_node,
9301 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9302 if (p1 && *p1 == '\0')
9304 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9305 tree cst_uchar_ptr_node
9306 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9308 tree temp = fold_convert (integer_type_node,
9309 build1 (INDIRECT_REF, cst_uchar_node,
9310 fold_convert (cst_uchar_ptr_node,
9312 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9318 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9319 Return NULL_TREE if no simplification can be made. */
9322 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9324 const char *p1, *p2;
9326 if (!validate_arg (arg1, POINTER_TYPE)
9327 || !validate_arg (arg2, POINTER_TYPE)
9328 || !validate_arg (len, INTEGER_TYPE))
9331 /* If the LEN parameter is zero, return zero. */
9332 if (integer_zerop (len))
9333 return omit_two_operands (integer_type_node, integer_zero_node,
9336 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9337 if (operand_equal_p (arg1, arg2, 0))
9338 return omit_one_operand (integer_type_node, integer_zero_node, len);
9340 p1 = c_getstr (arg1);
9341 p2 = c_getstr (arg2);
9343 if (host_integerp (len, 1) && p1 && p2)
9345 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9347 return integer_one_node;
9349 return integer_minus_one_node;
9351 return integer_zero_node;
9354 /* If the second arg is "", and the length is greater than zero,
9355 return *(const unsigned char*)arg1. */
9356 if (p2 && *p2 == '\0'
9357 && TREE_CODE (len) == INTEGER_CST
9358 && tree_int_cst_sgn (len) == 1)
9360 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9361 tree cst_uchar_ptr_node
9362 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9364 return fold_convert (integer_type_node,
9365 build1 (INDIRECT_REF, cst_uchar_node,
9366 fold_convert (cst_uchar_ptr_node,
9370 /* If the first arg is "", and the length is greater than zero,
9371 return -*(const unsigned char*)arg2. */
9372 if (p1 && *p1 == '\0'
9373 && TREE_CODE (len) == INTEGER_CST
9374 && tree_int_cst_sgn (len) == 1)
9376 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9377 tree cst_uchar_ptr_node
9378 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9380 tree temp = fold_convert (integer_type_node,
9381 build1 (INDIRECT_REF, cst_uchar_node,
9382 fold_convert (cst_uchar_ptr_node,
9384 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9387 /* If len parameter is one, return an expression corresponding to
9388 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9389 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9391 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9392 tree cst_uchar_ptr_node
9393 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9395 tree ind1 = fold_convert (integer_type_node,
9396 build1 (INDIRECT_REF, cst_uchar_node,
9397 fold_convert (cst_uchar_ptr_node,
9399 tree ind2 = fold_convert (integer_type_node,
9400 build1 (INDIRECT_REF, cst_uchar_node,
9401 fold_convert (cst_uchar_ptr_node,
9403 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9409 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9410 ARG. Return NULL_TREE if no simplification can be made. */
9413 fold_builtin_signbit (tree arg, tree type)
9417 if (!validate_arg (arg, REAL_TYPE))
9420 /* If ARG is a compile-time constant, determine the result. */
9421 if (TREE_CODE (arg) == REAL_CST
9422 && !TREE_OVERFLOW (arg))
9426 c = TREE_REAL_CST (arg);
9427 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9428 return fold_convert (type, temp);
9431 /* If ARG is non-negative, the result is always zero. */
9432 if (tree_expr_nonnegative_p (arg))
9433 return omit_one_operand (type, integer_zero_node, arg);
9435 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9436 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9437 return fold_build2 (LT_EXPR, type, arg,
9438 build_real (TREE_TYPE (arg), dconst0));
9443 /* Fold function call to builtin copysign, copysignf or copysignl with
9444 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9448 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9452 if (!validate_arg (arg1, REAL_TYPE)
9453 || !validate_arg (arg2, REAL_TYPE))
9456 /* copysign(X,X) is X. */
9457 if (operand_equal_p (arg1, arg2, 0))
9458 return fold_convert (type, arg1);
9460 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9461 if (TREE_CODE (arg1) == REAL_CST
9462 && TREE_CODE (arg2) == REAL_CST
9463 && !TREE_OVERFLOW (arg1)
9464 && !TREE_OVERFLOW (arg2))
9466 REAL_VALUE_TYPE c1, c2;
9468 c1 = TREE_REAL_CST (arg1);
9469 c2 = TREE_REAL_CST (arg2);
9470 /* c1.sign := c2.sign. */
9471 real_copysign (&c1, &c2);
9472 return build_real (type, c1);
9475 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9476 Remember to evaluate Y for side-effects. */
9477 if (tree_expr_nonnegative_p (arg2))
9478 return omit_one_operand (type,
9479 fold_build1 (ABS_EXPR, type, arg1),
9482 /* Strip sign changing operations for the first argument. */
9483 tem = fold_strip_sign_ops (arg1);
9485 return build_call_expr (fndecl, 2, tem, arg2);
9490 /* Fold a call to builtin isascii with argument ARG. */
9493 fold_builtin_isascii (tree arg)
9495 if (!validate_arg (arg, INTEGER_TYPE))
9499 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9500 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9501 build_int_cst (NULL_TREE,
9502 ~ (unsigned HOST_WIDE_INT) 0x7f));
9503 return fold_build2 (EQ_EXPR, integer_type_node,
9504 arg, integer_zero_node);
9508 /* Fold a call to builtin toascii with argument ARG. */
9511 fold_builtin_toascii (tree arg)
9513 if (!validate_arg (arg, INTEGER_TYPE))
9516 /* Transform toascii(c) -> (c & 0x7f). */
9517 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9518 build_int_cst (NULL_TREE, 0x7f));
9521 /* Fold a call to builtin isdigit with argument ARG. */
9524 fold_builtin_isdigit (tree arg)
9526 if (!validate_arg (arg, INTEGER_TYPE))
9530 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9531 /* According to the C standard, isdigit is unaffected by locale.
9532 However, it definitely is affected by the target character set. */
9533 unsigned HOST_WIDE_INT target_digit0
9534 = lang_hooks.to_target_charset ('0');
9536 if (target_digit0 == 0)
9539 arg = fold_convert (unsigned_type_node, arg);
9540 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9541 build_int_cst (unsigned_type_node, target_digit0));
9542 return fold_build2 (LE_EXPR, integer_type_node, arg,
9543 build_int_cst (unsigned_type_node, 9));
9547 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9550 fold_builtin_fabs (tree arg, tree type)
9552 if (!validate_arg (arg, REAL_TYPE))
9555 arg = fold_convert (type, arg);
9556 if (TREE_CODE (arg) == REAL_CST)
9557 return fold_abs_const (arg, type);
9558 return fold_build1 (ABS_EXPR, type, arg);
9561 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9564 fold_builtin_abs (tree arg, tree type)
9566 if (!validate_arg (arg, INTEGER_TYPE))
9569 arg = fold_convert (type, arg);
9570 if (TREE_CODE (arg) == INTEGER_CST)
9571 return fold_abs_const (arg, type);
9572 return fold_build1 (ABS_EXPR, type, arg);
9575 /* Fold a call to builtin fmin or fmax. */
9578 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9580 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9582 /* Calculate the result when the argument is a constant. */
9583 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9588 /* If either argument is NaN, return the other one. Avoid the
9589 transformation if we get (and honor) a signalling NaN. Using
9590 omit_one_operand() ensures we create a non-lvalue. */
9591 if (TREE_CODE (arg0) == REAL_CST
9592 && real_isnan (&TREE_REAL_CST (arg0))
9593 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9594 || ! TREE_REAL_CST (arg0).signalling))
9595 return omit_one_operand (type, arg1, arg0);
9596 if (TREE_CODE (arg1) == REAL_CST
9597 && real_isnan (&TREE_REAL_CST (arg1))
9598 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9599 || ! TREE_REAL_CST (arg1).signalling))
9600 return omit_one_operand (type, arg0, arg1);
9602 /* Transform fmin/fmax(x,x) -> x. */
9603 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9604 return omit_one_operand (type, arg0, arg1);
9606 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9607 functions to return the numeric arg if the other one is NaN.
9608 These tree codes don't honor that, so only transform if
9609 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9610 handled, so we don't have to worry about it either. */
9611 if (flag_finite_math_only)
9612 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9613 fold_convert (type, arg0),
9614 fold_convert (type, arg1));
9619 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9622 fold_builtin_carg (tree arg, tree type)
9624 if (validate_arg (arg, COMPLEX_TYPE))
9626 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9630 tree new_arg = builtin_save_expr (arg);
9631 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9632 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9633 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9640 /* Fold a call to builtin logb/ilogb. */
9643 fold_builtin_logb (tree arg, tree rettype)
9645 if (! validate_arg (arg, REAL_TYPE))
9650 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9652 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9658 /* If arg is Inf or NaN and we're logb, return it. */
9659 if (TREE_CODE (rettype) == REAL_TYPE)
9660 return fold_convert (rettype, arg);
9661 /* Fall through... */
9663 /* Zero may set errno and/or raise an exception for logb, also
9664 for ilogb we don't know FP_ILOGB0. */
9667 /* For normal numbers, proceed iff radix == 2. In GCC,
9668 normalized significands are in the range [0.5, 1.0). We
9669 want the exponent as if they were [1.0, 2.0) so get the
9670 exponent and subtract 1. */
9671 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9672 return fold_convert (rettype, build_int_cst (NULL_TREE,
9673 REAL_EXP (value)-1));
9681 /* Fold a call to builtin significand, if radix == 2. */
9684 fold_builtin_significand (tree arg, tree rettype)
9686 if (! validate_arg (arg, REAL_TYPE))
9691 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9693 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9700 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9701 return fold_convert (rettype, arg);
9703 /* For normal numbers, proceed iff radix == 2. */
9704 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9706 REAL_VALUE_TYPE result = *value;
9707 /* In GCC, normalized significands are in the range [0.5,
9708 1.0). We want them to be [1.0, 2.0) so set the
9710 SET_REAL_EXP (&result, 1);
9711 return build_real (rettype, result);
9720 /* Fold a call to builtin frexp, we can assume the base is 2. */
9723 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9725 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9730 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9733 arg1 = build_fold_indirect_ref (arg1);
9735 /* Proceed if a valid pointer type was passed in. */
9736 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9738 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9744 /* For +-0, return (*exp = 0, +-0). */
9745 exp = integer_zero_node;
9750 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9751 return omit_one_operand (rettype, arg0, arg1);
9754 /* Since the frexp function always expects base 2, and in
9755 GCC normalized significands are already in the range
9756 [0.5, 1.0), we have exactly what frexp wants. */
9757 REAL_VALUE_TYPE frac_rvt = *value;
9758 SET_REAL_EXP (&frac_rvt, 0);
9759 frac = build_real (rettype, frac_rvt);
9760 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9767 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9768 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9769 TREE_SIDE_EFFECTS (arg1) = 1;
9770 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9776 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9777 then we can assume the base is two. If it's false, then we have to
9778 check the mode of the TYPE parameter in certain cases. */
9781 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9783 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9788 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9789 if (real_zerop (arg0) || integer_zerop (arg1)
9790 || (TREE_CODE (arg0) == REAL_CST
9791 && !real_isfinite (&TREE_REAL_CST (arg0))))
9792 return omit_one_operand (type, arg0, arg1);
9794 /* If both arguments are constant, then try to evaluate it. */
9795 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9796 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9797 && host_integerp (arg1, 0))
9799 /* Bound the maximum adjustment to twice the range of the
9800 mode's valid exponents. Use abs to ensure the range is
9801 positive as a sanity check. */
9802 const long max_exp_adj = 2 *
9803 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9804 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9806 /* Get the user-requested adjustment. */
9807 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9809 /* The requested adjustment must be inside this range. This
9810 is a preliminary cap to avoid things like overflow, we
9811 may still fail to compute the result for other reasons. */
9812 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9814 REAL_VALUE_TYPE initial_result;
9816 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9818 /* Ensure we didn't overflow. */
9819 if (! real_isinf (&initial_result))
9821 const REAL_VALUE_TYPE trunc_result
9822 = real_value_truncate (TYPE_MODE (type), initial_result);
9824 /* Only proceed if the target mode can hold the
9826 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9827 return build_real (type, trunc_result);
9836 /* Fold a call to builtin modf. */
9839 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9841 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9846 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9849 arg1 = build_fold_indirect_ref (arg1);
9851 /* Proceed if a valid pointer type was passed in. */
9852 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9854 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9855 REAL_VALUE_TYPE trunc, frac;
9861 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9862 trunc = frac = *value;
9865 /* For +-Inf, return (*arg1 = arg0, +-0). */
9867 frac.sign = value->sign;
9871 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9872 real_trunc (&trunc, VOIDmode, value);
9873 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9874 /* If the original number was negative and already
9875 integral, then the fractional part is -0.0. */
9876 if (value->sign && frac.cl == rvc_zero)
9877 frac.sign = value->sign;
9881 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9882 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9883 build_real (rettype, trunc));
9884 TREE_SIDE_EFFECTS (arg1) = 1;
9885 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9886 build_real (rettype, frac));
9892 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9893 ARG is the argument for the call. */
9896 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9898 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9901 if (!validate_arg (arg, REAL_TYPE))
9904 switch (builtin_index)
9906 case BUILT_IN_ISINF:
9907 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9908 return omit_one_operand (type, integer_zero_node, arg);
9910 if (TREE_CODE (arg) == REAL_CST)
9912 r = TREE_REAL_CST (arg);
9913 if (real_isinf (&r))
9914 return real_compare (GT_EXPR, &r, &dconst0)
9915 ? integer_one_node : integer_minus_one_node;
9917 return integer_zero_node;
9922 case BUILT_IN_ISINF_SIGN:
9924 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9925 /* In a boolean context, GCC will fold the inner COND_EXPR to
9926 1. So e.g. "if (isinf_sign(x))" would be folded to just
9927 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9928 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9929 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9930 tree tmp = NULL_TREE;
9932 arg = builtin_save_expr (arg);
9934 if (signbit_fn && isinf_fn)
9936 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9937 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9939 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9940 signbit_call, integer_zero_node);
9941 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9942 isinf_call, integer_zero_node);
9944 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9945 integer_minus_one_node, integer_one_node);
9946 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9953 case BUILT_IN_ISFINITE:
9954 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9955 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9956 return omit_one_operand (type, integer_one_node, arg);
9958 if (TREE_CODE (arg) == REAL_CST)
9960 r = TREE_REAL_CST (arg);
9961 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9966 case BUILT_IN_ISNAN:
9967 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9968 return omit_one_operand (type, integer_zero_node, arg);
9970 if (TREE_CODE (arg) == REAL_CST)
9972 r = TREE_REAL_CST (arg);
9973 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9976 arg = builtin_save_expr (arg);
9977 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9984 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9985 This builtin will generate code to return the appropriate floating
9986 point classification depending on the value of the floating point
9987 number passed in. The possible return values must be supplied as
9988 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9989 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9990 one floating point argument which is "type generic". */
9993 fold_builtin_fpclassify (tree exp)
9995 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9996 arg, type, res, tmp;
9997 enum machine_mode mode;
10001 /* Verify the required arguments in the original call. */
10002 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10003 INTEGER_TYPE, INTEGER_TYPE,
10004 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10007 fp_nan = CALL_EXPR_ARG (exp, 0);
10008 fp_infinite = CALL_EXPR_ARG (exp, 1);
10009 fp_normal = CALL_EXPR_ARG (exp, 2);
10010 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10011 fp_zero = CALL_EXPR_ARG (exp, 4);
10012 arg = CALL_EXPR_ARG (exp, 5);
10013 type = TREE_TYPE (arg);
10014 mode = TYPE_MODE (type);
10015 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
10017 /* fpclassify(x) ->
10018 isnan(x) ? FP_NAN :
10019 (fabs(x) == Inf ? FP_INFINITE :
10020 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10021 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10023 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10024 build_real (type, dconst0));
10025 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
10027 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10028 real_from_string (&r, buf);
10029 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
10030 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10032 if (HONOR_INFINITIES (mode))
10035 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10036 build_real (type, r));
10037 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10040 if (HONOR_NANS (mode))
10042 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10043 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10049 /* Fold a call to an unordered comparison function such as
10050 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10051 being called and ARG0 and ARG1 are the arguments for the call.
10052 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10053 the opposite of the desired result. UNORDERED_CODE is used
10054 for modes that can hold NaNs and ORDERED_CODE is used for
10058 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10059 enum tree_code unordered_code,
10060 enum tree_code ordered_code)
10062 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10063 enum tree_code code;
10065 enum tree_code code0, code1;
10066 tree cmp_type = NULL_TREE;
10068 type0 = TREE_TYPE (arg0);
10069 type1 = TREE_TYPE (arg1);
10071 code0 = TREE_CODE (type0);
10072 code1 = TREE_CODE (type1);
10074 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10075 /* Choose the wider of two real types. */
10076 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10078 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10080 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10083 arg0 = fold_convert (cmp_type, arg0);
10084 arg1 = fold_convert (cmp_type, arg1);
10086 if (unordered_code == UNORDERED_EXPR)
10088 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10089 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10090 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10093 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10095 return fold_build1 (TRUTH_NOT_EXPR, type,
10096 fold_build2 (code, type, arg0, arg1));
10099 /* Fold a call to built-in function FNDECL with 0 arguments.
10100 IGNORE is true if the result of the function call is ignored. This
10101 function returns NULL_TREE if no simplification was possible. */
10104 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10106 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10107 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10110 CASE_FLT_FN (BUILT_IN_INF):
10111 case BUILT_IN_INFD32:
10112 case BUILT_IN_INFD64:
10113 case BUILT_IN_INFD128:
10114 return fold_builtin_inf (type, true);
10116 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10117 return fold_builtin_inf (type, false);
10119 case BUILT_IN_CLASSIFY_TYPE:
10120 return fold_builtin_classify_type (NULL_TREE);
10128 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10129 IGNORE is true if the result of the function call is ignored. This
10130 function returns NULL_TREE if no simplification was possible. */
10133 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10135 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10136 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10140 case BUILT_IN_CONSTANT_P:
10142 tree val = fold_builtin_constant_p (arg0);
10144 /* Gimplification will pull the CALL_EXPR for the builtin out of
10145 an if condition. When not optimizing, we'll not CSE it back.
10146 To avoid link error types of regressions, return false now. */
10147 if (!val && !optimize)
10148 val = integer_zero_node;
10153 case BUILT_IN_CLASSIFY_TYPE:
10154 return fold_builtin_classify_type (arg0);
10156 case BUILT_IN_STRLEN:
10157 return fold_builtin_strlen (type, arg0);
10159 CASE_FLT_FN (BUILT_IN_FABS):
10160 return fold_builtin_fabs (arg0, type);
10163 case BUILT_IN_LABS:
10164 case BUILT_IN_LLABS:
10165 case BUILT_IN_IMAXABS:
10166 return fold_builtin_abs (arg0, type);
10168 CASE_FLT_FN (BUILT_IN_CONJ):
10169 if (validate_arg (arg0, COMPLEX_TYPE))
10170 return fold_build1 (CONJ_EXPR, type, arg0);
10173 CASE_FLT_FN (BUILT_IN_CREAL):
10174 if (validate_arg (arg0, COMPLEX_TYPE))
10175 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10178 CASE_FLT_FN (BUILT_IN_CIMAG):
10179 if (validate_arg (arg0, COMPLEX_TYPE))
10180 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10183 CASE_FLT_FN (BUILT_IN_CCOS):
10184 CASE_FLT_FN (BUILT_IN_CCOSH):
10185 /* These functions are "even", i.e. f(x) == f(-x). */
10186 if (validate_arg (arg0, COMPLEX_TYPE))
10188 tree narg = fold_strip_sign_ops (arg0);
10190 return build_call_expr (fndecl, 1, narg);
10194 CASE_FLT_FN (BUILT_IN_CABS):
10195 return fold_builtin_cabs (arg0, type, fndecl);
10197 CASE_FLT_FN (BUILT_IN_CARG):
10198 return fold_builtin_carg (arg0, type);
10200 CASE_FLT_FN (BUILT_IN_SQRT):
10201 return fold_builtin_sqrt (arg0, type);
10203 CASE_FLT_FN (BUILT_IN_CBRT):
10204 return fold_builtin_cbrt (arg0, type);
10206 CASE_FLT_FN (BUILT_IN_ASIN):
10207 if (validate_arg (arg0, REAL_TYPE))
10208 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10209 &dconstm1, &dconst1, true);
10212 CASE_FLT_FN (BUILT_IN_ACOS):
10213 if (validate_arg (arg0, REAL_TYPE))
10214 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10215 &dconstm1, &dconst1, true);
10218 CASE_FLT_FN (BUILT_IN_ATAN):
10219 if (validate_arg (arg0, REAL_TYPE))
10220 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10223 CASE_FLT_FN (BUILT_IN_ASINH):
10224 if (validate_arg (arg0, REAL_TYPE))
10225 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10228 CASE_FLT_FN (BUILT_IN_ACOSH):
10229 if (validate_arg (arg0, REAL_TYPE))
10230 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10231 &dconst1, NULL, true);
10234 CASE_FLT_FN (BUILT_IN_ATANH):
10235 if (validate_arg (arg0, REAL_TYPE))
10236 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10237 &dconstm1, &dconst1, false);
10240 CASE_FLT_FN (BUILT_IN_SIN):
10241 if (validate_arg (arg0, REAL_TYPE))
10242 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10245 CASE_FLT_FN (BUILT_IN_COS):
10246 return fold_builtin_cos (arg0, type, fndecl);
10249 CASE_FLT_FN (BUILT_IN_TAN):
10250 return fold_builtin_tan (arg0, type);
10252 CASE_FLT_FN (BUILT_IN_CEXP):
10253 return fold_builtin_cexp (arg0, type);
10255 CASE_FLT_FN (BUILT_IN_CEXPI):
10256 if (validate_arg (arg0, REAL_TYPE))
10257 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10260 CASE_FLT_FN (BUILT_IN_SINH):
10261 if (validate_arg (arg0, REAL_TYPE))
10262 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10265 CASE_FLT_FN (BUILT_IN_COSH):
10266 return fold_builtin_cosh (arg0, type, fndecl);
10268 CASE_FLT_FN (BUILT_IN_TANH):
10269 if (validate_arg (arg0, REAL_TYPE))
10270 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10273 CASE_FLT_FN (BUILT_IN_ERF):
10274 if (validate_arg (arg0, REAL_TYPE))
10275 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10278 CASE_FLT_FN (BUILT_IN_ERFC):
10279 if (validate_arg (arg0, REAL_TYPE))
10280 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10283 CASE_FLT_FN (BUILT_IN_TGAMMA):
10284 if (validate_arg (arg0, REAL_TYPE))
10285 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10288 CASE_FLT_FN (BUILT_IN_EXP):
10289 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10291 CASE_FLT_FN (BUILT_IN_EXP2):
10292 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10294 CASE_FLT_FN (BUILT_IN_EXP10):
10295 CASE_FLT_FN (BUILT_IN_POW10):
10296 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10298 CASE_FLT_FN (BUILT_IN_EXPM1):
10299 if (validate_arg (arg0, REAL_TYPE))
10300 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10303 CASE_FLT_FN (BUILT_IN_LOG):
10304 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10306 CASE_FLT_FN (BUILT_IN_LOG2):
10307 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10309 CASE_FLT_FN (BUILT_IN_LOG10):
10310 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10312 CASE_FLT_FN (BUILT_IN_LOG1P):
10313 if (validate_arg (arg0, REAL_TYPE))
10314 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10315 &dconstm1, NULL, false);
10318 CASE_FLT_FN (BUILT_IN_J0):
10319 if (validate_arg (arg0, REAL_TYPE))
10320 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10324 CASE_FLT_FN (BUILT_IN_J1):
10325 if (validate_arg (arg0, REAL_TYPE))
10326 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10330 CASE_FLT_FN (BUILT_IN_Y0):
10331 if (validate_arg (arg0, REAL_TYPE))
10332 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10333 &dconst0, NULL, false);
10336 CASE_FLT_FN (BUILT_IN_Y1):
10337 if (validate_arg (arg0, REAL_TYPE))
10338 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10339 &dconst0, NULL, false);
10342 CASE_FLT_FN (BUILT_IN_NAN):
10343 case BUILT_IN_NAND32:
10344 case BUILT_IN_NAND64:
10345 case BUILT_IN_NAND128:
10346 return fold_builtin_nan (arg0, type, true);
10348 CASE_FLT_FN (BUILT_IN_NANS):
10349 return fold_builtin_nan (arg0, type, false);
10351 CASE_FLT_FN (BUILT_IN_FLOOR):
10352 return fold_builtin_floor (fndecl, arg0);
10354 CASE_FLT_FN (BUILT_IN_CEIL):
10355 return fold_builtin_ceil (fndecl, arg0);
10357 CASE_FLT_FN (BUILT_IN_TRUNC):
10358 return fold_builtin_trunc (fndecl, arg0);
10360 CASE_FLT_FN (BUILT_IN_ROUND):
10361 return fold_builtin_round (fndecl, arg0);
10363 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10364 CASE_FLT_FN (BUILT_IN_RINT):
10365 return fold_trunc_transparent_mathfn (fndecl, arg0);
10367 CASE_FLT_FN (BUILT_IN_LCEIL):
10368 CASE_FLT_FN (BUILT_IN_LLCEIL):
10369 CASE_FLT_FN (BUILT_IN_LFLOOR):
10370 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10371 CASE_FLT_FN (BUILT_IN_LROUND):
10372 CASE_FLT_FN (BUILT_IN_LLROUND):
10373 return fold_builtin_int_roundingfn (fndecl, arg0);
10375 CASE_FLT_FN (BUILT_IN_LRINT):
10376 CASE_FLT_FN (BUILT_IN_LLRINT):
10377 return fold_fixed_mathfn (fndecl, arg0);
10379 case BUILT_IN_BSWAP32:
10380 case BUILT_IN_BSWAP64:
10381 return fold_builtin_bswap (fndecl, arg0);
10383 CASE_INT_FN (BUILT_IN_FFS):
10384 CASE_INT_FN (BUILT_IN_CLZ):
10385 CASE_INT_FN (BUILT_IN_CTZ):
10386 CASE_INT_FN (BUILT_IN_POPCOUNT):
10387 CASE_INT_FN (BUILT_IN_PARITY):
10388 return fold_builtin_bitop (fndecl, arg0);
10390 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10391 return fold_builtin_signbit (arg0, type);
10393 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10394 return fold_builtin_significand (arg0, type);
10396 CASE_FLT_FN (BUILT_IN_ILOGB):
10397 CASE_FLT_FN (BUILT_IN_LOGB):
10398 return fold_builtin_logb (arg0, type);
10400 case BUILT_IN_ISASCII:
10401 return fold_builtin_isascii (arg0);
10403 case BUILT_IN_TOASCII:
10404 return fold_builtin_toascii (arg0);
10406 case BUILT_IN_ISDIGIT:
10407 return fold_builtin_isdigit (arg0);
10409 CASE_FLT_FN (BUILT_IN_FINITE):
10410 case BUILT_IN_FINITED32:
10411 case BUILT_IN_FINITED64:
10412 case BUILT_IN_FINITED128:
10413 case BUILT_IN_ISFINITE:
10414 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10416 CASE_FLT_FN (BUILT_IN_ISINF):
10417 case BUILT_IN_ISINFD32:
10418 case BUILT_IN_ISINFD64:
10419 case BUILT_IN_ISINFD128:
10420 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10422 case BUILT_IN_ISINF_SIGN:
10423 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10425 CASE_FLT_FN (BUILT_IN_ISNAN):
10426 case BUILT_IN_ISNAND32:
10427 case BUILT_IN_ISNAND64:
10428 case BUILT_IN_ISNAND128:
10429 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10431 case BUILT_IN_PRINTF:
10432 case BUILT_IN_PRINTF_UNLOCKED:
10433 case BUILT_IN_VPRINTF:
10434 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10444 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10445 IGNORE is true if the result of the function call is ignored. This
10446 function returns NULL_TREE if no simplification was possible. */
10449 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10451 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10452 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10456 CASE_FLT_FN (BUILT_IN_JN):
10457 if (validate_arg (arg0, INTEGER_TYPE)
10458 && validate_arg (arg1, REAL_TYPE))
10459 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10462 CASE_FLT_FN (BUILT_IN_YN):
10463 if (validate_arg (arg0, INTEGER_TYPE)
10464 && validate_arg (arg1, REAL_TYPE))
10465 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10469 CASE_FLT_FN (BUILT_IN_DREM):
10470 CASE_FLT_FN (BUILT_IN_REMAINDER):
10471 if (validate_arg (arg0, REAL_TYPE)
10472 && validate_arg(arg1, REAL_TYPE))
10473 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10476 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10477 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10478 if (validate_arg (arg0, REAL_TYPE)
10479 && validate_arg(arg1, POINTER_TYPE))
10480 return do_mpfr_lgamma_r (arg0, arg1, type);
10483 CASE_FLT_FN (BUILT_IN_ATAN2):
10484 if (validate_arg (arg0, REAL_TYPE)
10485 && validate_arg(arg1, REAL_TYPE))
10486 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10489 CASE_FLT_FN (BUILT_IN_FDIM):
10490 if (validate_arg (arg0, REAL_TYPE)
10491 && validate_arg(arg1, REAL_TYPE))
10492 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10495 CASE_FLT_FN (BUILT_IN_HYPOT):
10496 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10498 CASE_FLT_FN (BUILT_IN_LDEXP):
10499 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10500 CASE_FLT_FN (BUILT_IN_SCALBN):
10501 CASE_FLT_FN (BUILT_IN_SCALBLN):
10502 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10504 CASE_FLT_FN (BUILT_IN_FREXP):
10505 return fold_builtin_frexp (arg0, arg1, type);
10507 CASE_FLT_FN (BUILT_IN_MODF):
10508 return fold_builtin_modf (arg0, arg1, type);
10510 case BUILT_IN_BZERO:
10511 return fold_builtin_bzero (arg0, arg1, ignore);
10513 case BUILT_IN_FPUTS:
10514 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10516 case BUILT_IN_FPUTS_UNLOCKED:
10517 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10519 case BUILT_IN_STRSTR:
10520 return fold_builtin_strstr (arg0, arg1, type);
10522 case BUILT_IN_STRCAT:
10523 return fold_builtin_strcat (arg0, arg1);
10525 case BUILT_IN_STRSPN:
10526 return fold_builtin_strspn (arg0, arg1);
10528 case BUILT_IN_STRCSPN:
10529 return fold_builtin_strcspn (arg0, arg1);
10531 case BUILT_IN_STRCHR:
10532 case BUILT_IN_INDEX:
10533 return fold_builtin_strchr (arg0, arg1, type);
10535 case BUILT_IN_STRRCHR:
10536 case BUILT_IN_RINDEX:
10537 return fold_builtin_strrchr (arg0, arg1, type);
10539 case BUILT_IN_STRCPY:
10540 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10542 case BUILT_IN_STPCPY:
10545 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10549 return build_call_expr (fn, 2, arg0, arg1);
10553 case BUILT_IN_STRCMP:
10554 return fold_builtin_strcmp (arg0, arg1);
10556 case BUILT_IN_STRPBRK:
10557 return fold_builtin_strpbrk (arg0, arg1, type);
10559 case BUILT_IN_EXPECT:
10560 return fold_builtin_expect (arg0, arg1);
10562 CASE_FLT_FN (BUILT_IN_POW):
10563 return fold_builtin_pow (fndecl, arg0, arg1, type);
10565 CASE_FLT_FN (BUILT_IN_POWI):
10566 return fold_builtin_powi (fndecl, arg0, arg1, type);
10568 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10569 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10571 CASE_FLT_FN (BUILT_IN_FMIN):
10572 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10574 CASE_FLT_FN (BUILT_IN_FMAX):
10575 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10577 case BUILT_IN_ISGREATER:
10578 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10579 case BUILT_IN_ISGREATEREQUAL:
10580 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10581 case BUILT_IN_ISLESS:
10582 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10583 case BUILT_IN_ISLESSEQUAL:
10584 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10585 case BUILT_IN_ISLESSGREATER:
10586 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10587 case BUILT_IN_ISUNORDERED:
10588 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10591 /* We do the folding for va_start in the expander. */
10592 case BUILT_IN_VA_START:
10595 case BUILT_IN_SPRINTF:
10596 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10598 case BUILT_IN_OBJECT_SIZE:
10599 return fold_builtin_object_size (arg0, arg1);
10601 case BUILT_IN_PRINTF:
10602 case BUILT_IN_PRINTF_UNLOCKED:
10603 case BUILT_IN_VPRINTF:
10604 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10606 case BUILT_IN_PRINTF_CHK:
10607 case BUILT_IN_VPRINTF_CHK:
10608 if (!validate_arg (arg0, INTEGER_TYPE)
10609 || TREE_SIDE_EFFECTS (arg0))
10612 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10615 case BUILT_IN_FPRINTF:
10616 case BUILT_IN_FPRINTF_UNLOCKED:
10617 case BUILT_IN_VFPRINTF:
10618 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10627 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10628 and ARG2. IGNORE is true if the result of the function call is ignored.
10629 This function returns NULL_TREE if no simplification was possible. */
10632 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10634 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10635 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10639 CASE_FLT_FN (BUILT_IN_SINCOS):
10640 return fold_builtin_sincos (arg0, arg1, arg2);
10642 CASE_FLT_FN (BUILT_IN_FMA):
10643 if (validate_arg (arg0, REAL_TYPE)
10644 && validate_arg(arg1, REAL_TYPE)
10645 && validate_arg(arg2, REAL_TYPE))
10646 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10649 CASE_FLT_FN (BUILT_IN_REMQUO):
10650 if (validate_arg (arg0, REAL_TYPE)
10651 && validate_arg(arg1, REAL_TYPE)
10652 && validate_arg(arg2, POINTER_TYPE))
10653 return do_mpfr_remquo (arg0, arg1, arg2);
10656 case BUILT_IN_MEMSET:
10657 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10659 case BUILT_IN_BCOPY:
10660 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10662 case BUILT_IN_MEMCPY:
10663 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10665 case BUILT_IN_MEMPCPY:
10666 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10668 case BUILT_IN_MEMMOVE:
10669 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10671 case BUILT_IN_STRNCAT:
10672 return fold_builtin_strncat (arg0, arg1, arg2);
10674 case BUILT_IN_STRNCPY:
10675 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10677 case BUILT_IN_STRNCMP:
10678 return fold_builtin_strncmp (arg0, arg1, arg2);
10680 case BUILT_IN_MEMCHR:
10681 return fold_builtin_memchr (arg0, arg1, arg2, type);
10683 case BUILT_IN_BCMP:
10684 case BUILT_IN_MEMCMP:
10685 return fold_builtin_memcmp (arg0, arg1, arg2);;
10687 case BUILT_IN_SPRINTF:
10688 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10690 case BUILT_IN_STRCPY_CHK:
10691 case BUILT_IN_STPCPY_CHK:
10692 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10695 case BUILT_IN_STRCAT_CHK:
10696 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10698 case BUILT_IN_PRINTF_CHK:
10699 case BUILT_IN_VPRINTF_CHK:
10700 if (!validate_arg (arg0, INTEGER_TYPE)
10701 || TREE_SIDE_EFFECTS (arg0))
10704 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10707 case BUILT_IN_FPRINTF:
10708 case BUILT_IN_FPRINTF_UNLOCKED:
10709 case BUILT_IN_VFPRINTF:
10710 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10712 case BUILT_IN_FPRINTF_CHK:
10713 case BUILT_IN_VFPRINTF_CHK:
10714 if (!validate_arg (arg1, INTEGER_TYPE)
10715 || TREE_SIDE_EFFECTS (arg1))
10718 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10727 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10728 ARG2, and ARG3. IGNORE is true if the result of the function call is
10729 ignored. This function returns NULL_TREE if no simplification was
10733 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10736 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10740 case BUILT_IN_MEMCPY_CHK:
10741 case BUILT_IN_MEMPCPY_CHK:
10742 case BUILT_IN_MEMMOVE_CHK:
10743 case BUILT_IN_MEMSET_CHK:
10744 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10746 DECL_FUNCTION_CODE (fndecl));
10748 case BUILT_IN_STRNCPY_CHK:
10749 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10751 case BUILT_IN_STRNCAT_CHK:
10752 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10754 case BUILT_IN_FPRINTF_CHK:
10755 case BUILT_IN_VFPRINTF_CHK:
10756 if (!validate_arg (arg1, INTEGER_TYPE)
10757 || TREE_SIDE_EFFECTS (arg1))
10760 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10770 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10771 arguments, where NARGS <= 4. IGNORE is true if the result of the
10772 function call is ignored. This function returns NULL_TREE if no
10773 simplification was possible. Note that this only folds builtins with
10774 fixed argument patterns. Foldings that do varargs-to-varargs
10775 transformations, or that match calls with more than 4 arguments,
10776 need to be handled with fold_builtin_varargs instead. */
10778 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10781 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10783 tree ret = NULL_TREE;
10788 ret = fold_builtin_0 (fndecl, ignore);
10791 ret = fold_builtin_1 (fndecl, args[0], ignore);
10794 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10797 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10800 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10808 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10809 TREE_NO_WARNING (ret) = 1;
10815 /* Builtins with folding operations that operate on "..." arguments
10816 need special handling; we need to store the arguments in a convenient
10817 data structure before attempting any folding. Fortunately there are
10818 only a few builtins that fall into this category. FNDECL is the
10819 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10820 result of the function call is ignored. */
10823 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10825 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10826 tree ret = NULL_TREE;
10830 case BUILT_IN_SPRINTF_CHK:
10831 case BUILT_IN_VSPRINTF_CHK:
10832 ret = fold_builtin_sprintf_chk (exp, fcode);
10835 case BUILT_IN_SNPRINTF_CHK:
10836 case BUILT_IN_VSNPRINTF_CHK:
10837 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10840 case BUILT_IN_FPCLASSIFY:
10841 ret = fold_builtin_fpclassify (exp);
10849 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10850 TREE_NO_WARNING (ret) = 1;
10856 /* Return true if FNDECL shouldn't be folded right now.
10857 If a built-in function has an inline attribute always_inline
10858 wrapper, defer folding it after always_inline functions have
10859 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10860 might not be performed. */
10863 avoid_folding_inline_builtin (tree fndecl)
10865 return (DECL_DECLARED_INLINE_P (fndecl)
10866 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10868 && !cfun->always_inline_functions_inlined
10869 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10872 /* A wrapper function for builtin folding that prevents warnings for
10873 "statement without effect" and the like, caused by removing the
10874 call node earlier than the warning is generated. */
10877 fold_call_expr (tree exp, bool ignore)
10879 tree ret = NULL_TREE;
10880 tree fndecl = get_callee_fndecl (exp);
10882 && TREE_CODE (fndecl) == FUNCTION_DECL
10883 && DECL_BUILT_IN (fndecl)
10884 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10885 yet. Defer folding until we see all the arguments
10886 (after inlining). */
10887 && !CALL_EXPR_VA_ARG_PACK (exp))
10889 int nargs = call_expr_nargs (exp);
10891 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10892 instead last argument is __builtin_va_arg_pack (). Defer folding
10893 even in that case, until arguments are finalized. */
10894 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10896 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10898 && TREE_CODE (fndecl2) == FUNCTION_DECL
10899 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10900 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10904 if (avoid_folding_inline_builtin (fndecl))
10907 /* FIXME: Don't use a list in this interface. */
10908 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10909 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10912 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10914 tree *args = CALL_EXPR_ARGP (exp);
10915 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10918 ret = fold_builtin_varargs (fndecl, exp, ignore);
10921 /* Propagate location information from original call to
10922 expansion of builtin. Otherwise things like
10923 maybe_emit_chk_warning, that operate on the expansion
10924 of a builtin, will use the wrong location information. */
10925 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10927 tree realret = ret;
10928 if (TREE_CODE (ret) == NOP_EXPR)
10929 realret = TREE_OPERAND (ret, 0);
10930 if (CAN_HAVE_LOCATION_P (realret)
10931 && !EXPR_HAS_LOCATION (realret))
10932 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10942 /* Conveniently construct a function call expression. FNDECL names the
10943 function to be called and ARGLIST is a TREE_LIST of arguments. */
10946 build_function_call_expr (tree fndecl, tree arglist)
10948 tree fntype = TREE_TYPE (fndecl);
10949 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10950 int n = list_length (arglist);
10951 tree *argarray = (tree *) alloca (n * sizeof (tree));
10954 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10955 argarray[i] = TREE_VALUE (arglist);
10956 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10959 /* Conveniently construct a function call expression. FNDECL names the
10960 function to be called, N is the number of arguments, and the "..."
10961 parameters are the argument expressions. */
10964 build_call_expr (tree fndecl, int n, ...)
10967 tree fntype = TREE_TYPE (fndecl);
10968 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10969 tree *argarray = (tree *) alloca (n * sizeof (tree));
10973 for (i = 0; i < n; i++)
10974 argarray[i] = va_arg (ap, tree);
10976 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10979 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10980 N arguments are passed in the array ARGARRAY. */
10983 fold_builtin_call_array (tree type,
10988 tree ret = NULL_TREE;
10992 if (TREE_CODE (fn) == ADDR_EXPR)
10994 tree fndecl = TREE_OPERAND (fn, 0);
10995 if (TREE_CODE (fndecl) == FUNCTION_DECL
10996 && DECL_BUILT_IN (fndecl))
10998 /* If last argument is __builtin_va_arg_pack (), arguments to this
10999 function are not finalized yet. Defer folding until they are. */
11000 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11002 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11004 && TREE_CODE (fndecl2) == FUNCTION_DECL
11005 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11006 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11007 return build_call_array (type, fn, n, argarray);
11009 if (avoid_folding_inline_builtin (fndecl))
11010 return build_call_array (type, fn, n, argarray);
11011 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11013 tree arglist = NULL_TREE;
11014 for (i = n - 1; i >= 0; i--)
11015 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11016 ret = targetm.fold_builtin (fndecl, arglist, false);
11019 return build_call_array (type, fn, n, argarray);
11021 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11023 /* First try the transformations that don't require consing up
11025 ret = fold_builtin_n (fndecl, argarray, n, false);
11030 /* If we got this far, we need to build an exp. */
11031 exp = build_call_array (type, fn, n, argarray);
11032 ret = fold_builtin_varargs (fndecl, exp, false);
11033 return ret ? ret : exp;
11037 return build_call_array (type, fn, n, argarray);
11040 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11041 along with N new arguments specified as the "..." parameters. SKIP
11042 is the number of arguments in EXP to be omitted. This function is used
11043 to do varargs-to-varargs transformations. */
11046 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11048 int oldnargs = call_expr_nargs (exp);
11049 int nargs = oldnargs - skip + n;
11050 tree fntype = TREE_TYPE (fndecl);
11051 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11059 buffer = XALLOCAVEC (tree, nargs);
11061 for (i = 0; i < n; i++)
11062 buffer[i] = va_arg (ap, tree);
11064 for (j = skip; j < oldnargs; j++, i++)
11065 buffer[i] = CALL_EXPR_ARG (exp, j);
11068 buffer = CALL_EXPR_ARGP (exp) + skip;
11070 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11073 /* Validate a single argument ARG against a tree code CODE representing
11077 validate_arg (const_tree arg, enum tree_code code)
11081 else if (code == POINTER_TYPE)
11082 return POINTER_TYPE_P (TREE_TYPE (arg));
11083 else if (code == INTEGER_TYPE)
11084 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11085 return code == TREE_CODE (TREE_TYPE (arg));
11088 /* This function validates the types of a function call argument list
11089 against a specified list of tree_codes. If the last specifier is a 0,
11090 that represents an ellipses, otherwise the last specifier must be a
11093 This is the GIMPLE version of validate_arglist. Eventually we want to
11094 completely convert builtins.c to work from GIMPLEs and the tree based
11095 validate_arglist will then be removed. */
11098 validate_gimple_arglist (const_gimple call, ...)
11100 enum tree_code code;
11106 va_start (ap, call);
11111 code = va_arg (ap, enum tree_code);
11115 /* This signifies an ellipses, any further arguments are all ok. */
11119 /* This signifies an endlink, if no arguments remain, return
11120 true, otherwise return false. */
11121 res = (i == gimple_call_num_args (call));
11124 /* If no parameters remain or the parameter's code does not
11125 match the specified code, return false. Otherwise continue
11126 checking any remaining arguments. */
11127 arg = gimple_call_arg (call, i++);
11128 if (!validate_arg (arg, code))
11135 /* We need gotos here since we can only have one VA_CLOSE in a
11143 /* This function validates the types of a function call argument list
11144 against a specified list of tree_codes. If the last specifier is a 0,
11145 that represents an ellipses, otherwise the last specifier must be a
11149 validate_arglist (const_tree callexpr, ...)
11151 enum tree_code code;
11154 const_call_expr_arg_iterator iter;
11157 va_start (ap, callexpr);
11158 init_const_call_expr_arg_iterator (callexpr, &iter);
11162 code = va_arg (ap, enum tree_code);
11166 /* This signifies an ellipses, any further arguments are all ok. */
11170 /* This signifies an endlink, if no arguments remain, return
11171 true, otherwise return false. */
11172 res = !more_const_call_expr_args_p (&iter);
11175 /* If no parameters remain or the parameter's code does not
11176 match the specified code, return false. Otherwise continue
11177 checking any remaining arguments. */
11178 arg = next_const_call_expr_arg (&iter);
11179 if (!validate_arg (arg, code))
11186 /* We need gotos here since we can only have one VA_CLOSE in a
11194 /* Default target-specific builtin expander that does nothing. */
11197 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11198 rtx target ATTRIBUTE_UNUSED,
11199 rtx subtarget ATTRIBUTE_UNUSED,
11200 enum machine_mode mode ATTRIBUTE_UNUSED,
11201 int ignore ATTRIBUTE_UNUSED)
11206 /* Returns true is EXP represents data that would potentially reside
11207 in a readonly section. */
11210 readonly_data_expr (tree exp)
11214 if (TREE_CODE (exp) != ADDR_EXPR)
11217 exp = get_base_address (TREE_OPERAND (exp, 0));
11221 /* Make sure we call decl_readonly_section only for trees it
11222 can handle (since it returns true for everything it doesn't
11224 if (TREE_CODE (exp) == STRING_CST
11225 || TREE_CODE (exp) == CONSTRUCTOR
11226 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11227 return decl_readonly_section (exp, 0);
11232 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11233 to the call, and TYPE is its return type.
11235 Return NULL_TREE if no simplification was possible, otherwise return the
11236 simplified form of the call as a tree.
11238 The simplified form may be a constant or other expression which
11239 computes the same value, but in a more efficient manner (including
11240 calls to other builtin functions).
11242 The call may contain arguments which need to be evaluated, but
11243 which are not useful to determine the result of the call. In
11244 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11245 COMPOUND_EXPR will be an argument which must be evaluated.
11246 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11247 COMPOUND_EXPR in the chain will contain the tree for the simplified
11248 form of the builtin function call. */
11251 fold_builtin_strstr (tree s1, tree s2, tree type)
11253 if (!validate_arg (s1, POINTER_TYPE)
11254 || !validate_arg (s2, POINTER_TYPE))
11259 const char *p1, *p2;
11261 p2 = c_getstr (s2);
11265 p1 = c_getstr (s1);
11268 const char *r = strstr (p1, p2);
11272 return build_int_cst (TREE_TYPE (s1), 0);
11274 /* Return an offset into the constant string argument. */
11275 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11276 s1, size_int (r - p1));
11277 return fold_convert (type, tem);
11280 /* The argument is const char *, and the result is char *, so we need
11281 a type conversion here to avoid a warning. */
11283 return fold_convert (type, s1);
11288 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11292 /* New argument list transforming strstr(s1, s2) to
11293 strchr(s1, s2[0]). */
11294 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11298 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11299 the call, and TYPE is its return type.
11301 Return NULL_TREE if no simplification was possible, otherwise return the
11302 simplified form of the call as a tree.
11304 The simplified form may be a constant or other expression which
11305 computes the same value, but in a more efficient manner (including
11306 calls to other builtin functions).
11308 The call may contain arguments which need to be evaluated, but
11309 which are not useful to determine the result of the call. In
11310 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11311 COMPOUND_EXPR will be an argument which must be evaluated.
11312 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11313 COMPOUND_EXPR in the chain will contain the tree for the simplified
11314 form of the builtin function call. */
11317 fold_builtin_strchr (tree s1, tree s2, tree type)
11319 if (!validate_arg (s1, POINTER_TYPE)
11320 || !validate_arg (s2, INTEGER_TYPE))
11326 if (TREE_CODE (s2) != INTEGER_CST)
11329 p1 = c_getstr (s1);
11336 if (target_char_cast (s2, &c))
11339 r = strchr (p1, c);
11342 return build_int_cst (TREE_TYPE (s1), 0);
11344 /* Return an offset into the constant string argument. */
11345 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11346 s1, size_int (r - p1));
11347 return fold_convert (type, tem);
11353 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11354 the call, and TYPE is its return type.
11356 Return NULL_TREE if no simplification was possible, otherwise return the
11357 simplified form of the call as a tree.
11359 The simplified form may be a constant or other expression which
11360 computes the same value, but in a more efficient manner (including
11361 calls to other builtin functions).
11363 The call may contain arguments which need to be evaluated, but
11364 which are not useful to determine the result of the call. In
11365 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11366 COMPOUND_EXPR will be an argument which must be evaluated.
11367 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11368 COMPOUND_EXPR in the chain will contain the tree for the simplified
11369 form of the builtin function call. */
11372 fold_builtin_strrchr (tree s1, tree s2, tree type)
11374 if (!validate_arg (s1, POINTER_TYPE)
11375 || !validate_arg (s2, INTEGER_TYPE))
11382 if (TREE_CODE (s2) != INTEGER_CST)
11385 p1 = c_getstr (s1);
11392 if (target_char_cast (s2, &c))
11395 r = strrchr (p1, c);
11398 return build_int_cst (TREE_TYPE (s1), 0);
11400 /* Return an offset into the constant string argument. */
11401 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11402 s1, size_int (r - p1));
11403 return fold_convert (type, tem);
11406 if (! integer_zerop (s2))
11409 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11413 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11414 return build_call_expr (fn, 2, s1, s2);
11418 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11419 to the call, and TYPE is its return type.
11421 Return NULL_TREE if no simplification was possible, otherwise return the
11422 simplified form of the call as a tree.
11424 The simplified form may be a constant or other expression which
11425 computes the same value, but in a more efficient manner (including
11426 calls to other builtin functions).
11428 The call may contain arguments which need to be evaluated, but
11429 which are not useful to determine the result of the call. In
11430 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11431 COMPOUND_EXPR will be an argument which must be evaluated.
11432 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11433 COMPOUND_EXPR in the chain will contain the tree for the simplified
11434 form of the builtin function call. */
11437 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11439 if (!validate_arg (s1, POINTER_TYPE)
11440 || !validate_arg (s2, POINTER_TYPE))
11445 const char *p1, *p2;
11447 p2 = c_getstr (s2);
11451 p1 = c_getstr (s1);
11454 const char *r = strpbrk (p1, p2);
11458 return build_int_cst (TREE_TYPE (s1), 0);
11460 /* Return an offset into the constant string argument. */
11461 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11462 s1, size_int (r - p1));
11463 return fold_convert (type, tem);
11467 /* strpbrk(x, "") == NULL.
11468 Evaluate and ignore s1 in case it had side-effects. */
11469 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11472 return NULL_TREE; /* Really call strpbrk. */
11474 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11478 /* New argument list transforming strpbrk(s1, s2) to
11479 strchr(s1, s2[0]). */
11480 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11484 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11487 Return NULL_TREE if no simplification was possible, otherwise return the
11488 simplified form of the call as a tree.
11490 The simplified form may be a constant or other expression which
11491 computes the same value, but in a more efficient manner (including
11492 calls to other builtin functions).
11494 The call may contain arguments which need to be evaluated, but
11495 which are not useful to determine the result of the call. In
11496 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11497 COMPOUND_EXPR will be an argument which must be evaluated.
11498 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11499 COMPOUND_EXPR in the chain will contain the tree for the simplified
11500 form of the builtin function call. */
11503 fold_builtin_strcat (tree dst, tree src)
11505 if (!validate_arg (dst, POINTER_TYPE)
11506 || !validate_arg (src, POINTER_TYPE))
11510 const char *p = c_getstr (src);
11512 /* If the string length is zero, return the dst parameter. */
11513 if (p && *p == '\0')
11520 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11521 arguments to the call.
11523 Return NULL_TREE if no simplification was possible, otherwise return the
11524 simplified form of the call as a tree.
11526 The simplified form may be a constant or other expression which
11527 computes the same value, but in a more efficient manner (including
11528 calls to other builtin functions).
11530 The call may contain arguments which need to be evaluated, but
11531 which are not useful to determine the result of the call. In
11532 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11533 COMPOUND_EXPR will be an argument which must be evaluated.
11534 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11535 COMPOUND_EXPR in the chain will contain the tree for the simplified
11536 form of the builtin function call. */
11539 fold_builtin_strncat (tree dst, tree src, tree len)
11541 if (!validate_arg (dst, POINTER_TYPE)
11542 || !validate_arg (src, POINTER_TYPE)
11543 || !validate_arg (len, INTEGER_TYPE))
11547 const char *p = c_getstr (src);
11549 /* If the requested length is zero, or the src parameter string
11550 length is zero, return the dst parameter. */
11551 if (integer_zerop (len) || (p && *p == '\0'))
11552 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11554 /* If the requested len is greater than or equal to the string
11555 length, call strcat. */
11556 if (TREE_CODE (len) == INTEGER_CST && p
11557 && compare_tree_int (len, strlen (p)) >= 0)
11559 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11561 /* If the replacement _DECL isn't initialized, don't do the
11566 return build_call_expr (fn, 2, dst, src);
11572 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11575 Return NULL_TREE if no simplification was possible, otherwise return the
11576 simplified form of the call as a tree.
11578 The simplified form may be a constant or other expression which
11579 computes the same value, but in a more efficient manner (including
11580 calls to other builtin functions).
11582 The call may contain arguments which need to be evaluated, but
11583 which are not useful to determine the result of the call. In
11584 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11585 COMPOUND_EXPR will be an argument which must be evaluated.
11586 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11587 COMPOUND_EXPR in the chain will contain the tree for the simplified
11588 form of the builtin function call. */
11591 fold_builtin_strspn (tree s1, tree s2)
11593 if (!validate_arg (s1, POINTER_TYPE)
11594 || !validate_arg (s2, POINTER_TYPE))
11598 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11600 /* If both arguments are constants, evaluate at compile-time. */
11603 const size_t r = strspn (p1, p2);
11604 return size_int (r);
11607 /* If either argument is "", return NULL_TREE. */
11608 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11609 /* Evaluate and ignore both arguments in case either one has
11611 return omit_two_operands (size_type_node, size_zero_node,
11617 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11620 Return NULL_TREE if no simplification was possible, otherwise return the
11621 simplified form of the call as a tree.
11623 The simplified form may be a constant or other expression which
11624 computes the same value, but in a more efficient manner (including
11625 calls to other builtin functions).
11627 The call may contain arguments which need to be evaluated, but
11628 which are not useful to determine the result of the call. In
11629 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11630 COMPOUND_EXPR will be an argument which must be evaluated.
11631 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11632 COMPOUND_EXPR in the chain will contain the tree for the simplified
11633 form of the builtin function call. */
11636 fold_builtin_strcspn (tree s1, tree s2)
11638 if (!validate_arg (s1, POINTER_TYPE)
11639 || !validate_arg (s2, POINTER_TYPE))
11643 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11645 /* If both arguments are constants, evaluate at compile-time. */
11648 const size_t r = strcspn (p1, p2);
11649 return size_int (r);
11652 /* If the first argument is "", return NULL_TREE. */
11653 if (p1 && *p1 == '\0')
11655 /* Evaluate and ignore argument s2 in case it has
11657 return omit_one_operand (size_type_node,
11658 size_zero_node, s2);
11661 /* If the second argument is "", return __builtin_strlen(s1). */
11662 if (p2 && *p2 == '\0')
11664 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11666 /* If the replacement _DECL isn't initialized, don't do the
11671 return build_call_expr (fn, 1, s1);
11677 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11678 to the call. IGNORE is true if the value returned
11679 by the builtin will be ignored. UNLOCKED is true is true if this
11680 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11681 the known length of the string. Return NULL_TREE if no simplification
11685 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11687 /* If we're using an unlocked function, assume the other unlocked
11688 functions exist explicitly. */
11689 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11690 : implicit_built_in_decls[BUILT_IN_FPUTC];
11691 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11692 : implicit_built_in_decls[BUILT_IN_FWRITE];
11694 /* If the return value is used, don't do the transformation. */
11698 /* Verify the arguments in the original call. */
11699 if (!validate_arg (arg0, POINTER_TYPE)
11700 || !validate_arg (arg1, POINTER_TYPE))
11704 len = c_strlen (arg0, 0);
11706 /* Get the length of the string passed to fputs. If the length
11707 can't be determined, punt. */
11709 || TREE_CODE (len) != INTEGER_CST)
11712 switch (compare_tree_int (len, 1))
11714 case -1: /* length is 0, delete the call entirely . */
11715 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11717 case 0: /* length is 1, call fputc. */
11719 const char *p = c_getstr (arg0);
11724 return build_call_expr (fn_fputc, 2,
11725 build_int_cst (NULL_TREE, p[0]), arg1);
11731 case 1: /* length is greater than 1, call fwrite. */
11733 /* If optimizing for size keep fputs. */
11734 if (optimize_function_for_size_p (cfun))
11736 /* New argument list transforming fputs(string, stream) to
11737 fwrite(string, 1, len, stream). */
11739 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11744 gcc_unreachable ();
11749 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11750 produced. False otherwise. This is done so that we don't output the error
11751 or warning twice or three times. */
11754 fold_builtin_next_arg (tree exp, bool va_start_p)
11756 tree fntype = TREE_TYPE (current_function_decl);
11757 int nargs = call_expr_nargs (exp);
11760 if (TYPE_ARG_TYPES (fntype) == 0
11761 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11762 == void_type_node))
11764 error ("%<va_start%> used in function with fixed args");
11770 if (va_start_p && (nargs != 2))
11772 error ("wrong number of arguments to function %<va_start%>");
11775 arg = CALL_EXPR_ARG (exp, 1);
11777 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11778 when we checked the arguments and if needed issued a warning. */
11783 /* Evidently an out of date version of <stdarg.h>; can't validate
11784 va_start's second argument, but can still work as intended. */
11785 warning (0, "%<__builtin_next_arg%> called without an argument");
11788 else if (nargs > 1)
11790 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11793 arg = CALL_EXPR_ARG (exp, 0);
11796 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11797 or __builtin_next_arg (0) the first time we see it, after checking
11798 the arguments and if needed issuing a warning. */
11799 if (!integer_zerop (arg))
11801 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11803 /* Strip off all nops for the sake of the comparison. This
11804 is not quite the same as STRIP_NOPS. It does more.
11805 We must also strip off INDIRECT_EXPR for C++ reference
11807 while (CONVERT_EXPR_P (arg)
11808 || TREE_CODE (arg) == INDIRECT_REF)
11809 arg = TREE_OPERAND (arg, 0);
11810 if (arg != last_parm)
11812 /* FIXME: Sometimes with the tree optimizers we can get the
11813 not the last argument even though the user used the last
11814 argument. We just warn and set the arg to be the last
11815 argument so that we will get wrong-code because of
11817 warning (0, "second parameter of %<va_start%> not last named argument");
11820 /* Undefined by C99 7.15.1.4p4 (va_start):
11821 "If the parameter parmN is declared with the register storage
11822 class, with a function or array type, or with a type that is
11823 not compatible with the type that results after application of
11824 the default argument promotions, the behavior is undefined."
11826 else if (DECL_REGISTER (arg))
11827 warning (0, "undefined behaviour when second parameter of "
11828 "%<va_start%> is declared with %<register%> storage");
11830 /* We want to verify the second parameter just once before the tree
11831 optimizers are run and then avoid keeping it in the tree,
11832 as otherwise we could warn even for correct code like:
11833 void foo (int i, ...)
11834 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11836 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11838 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11844 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11845 ORIG may be null if this is a 2-argument call. We don't attempt to
11846 simplify calls with more than 3 arguments.
11848 Return NULL_TREE if no simplification was possible, otherwise return the
11849 simplified form of the call as a tree. If IGNORED is true, it means that
11850 the caller does not use the returned value of the function. */
11853 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11856 const char *fmt_str = NULL;
11858 /* Verify the required arguments in the original call. We deal with two
11859 types of sprintf() calls: 'sprintf (str, fmt)' and
11860 'sprintf (dest, "%s", orig)'. */
11861 if (!validate_arg (dest, POINTER_TYPE)
11862 || !validate_arg (fmt, POINTER_TYPE))
11864 if (orig && !validate_arg (orig, POINTER_TYPE))
11867 /* Check whether the format is a literal string constant. */
11868 fmt_str = c_getstr (fmt);
11869 if (fmt_str == NULL)
11873 retval = NULL_TREE;
11875 if (!init_target_chars ())
11878 /* If the format doesn't contain % args or %%, use strcpy. */
11879 if (strchr (fmt_str, target_percent) == NULL)
11881 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11886 /* Don't optimize sprintf (buf, "abc", ptr++). */
11890 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11891 'format' is known to contain no % formats. */
11892 call = build_call_expr (fn, 2, dest, fmt);
11894 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11897 /* If the format is "%s", use strcpy if the result isn't used. */
11898 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11901 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11906 /* Don't crash on sprintf (str1, "%s"). */
11910 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11913 retval = c_strlen (orig, 1);
11914 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11917 call = build_call_expr (fn, 2, dest, orig);
11920 if (call && retval)
11922 retval = fold_convert
11923 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11925 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11931 /* Expand a call EXP to __builtin_object_size. */
11934 expand_builtin_object_size (tree exp)
11937 int object_size_type;
11938 tree fndecl = get_callee_fndecl (exp);
11940 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11942 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11944 expand_builtin_trap ();
11948 ost = CALL_EXPR_ARG (exp, 1);
11951 if (TREE_CODE (ost) != INTEGER_CST
11952 || tree_int_cst_sgn (ost) < 0
11953 || compare_tree_int (ost, 3) > 0)
11955 error ("%Klast argument of %D is not integer constant between 0 and 3",
11957 expand_builtin_trap ();
11961 object_size_type = tree_low_cst (ost, 0);
11963 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11966 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11967 FCODE is the BUILT_IN_* to use.
11968 Return NULL_RTX if we failed; the caller should emit a normal call,
11969 otherwise try to get the result in TARGET, if convenient (and in
11970 mode MODE if that's convenient). */
11973 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11974 enum built_in_function fcode)
11976 tree dest, src, len, size;
11978 if (!validate_arglist (exp,
11980 fcode == BUILT_IN_MEMSET_CHK
11981 ? INTEGER_TYPE : POINTER_TYPE,
11982 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11985 dest = CALL_EXPR_ARG (exp, 0);
11986 src = CALL_EXPR_ARG (exp, 1);
11987 len = CALL_EXPR_ARG (exp, 2);
11988 size = CALL_EXPR_ARG (exp, 3);
11990 if (! host_integerp (size, 1))
11993 if (host_integerp (len, 1) || integer_all_onesp (size))
11997 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11999 warning_at (tree_nonartificial_location (exp),
12000 0, "%Kcall to %D will always overflow destination buffer",
12001 exp, get_callee_fndecl (exp));
12006 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12007 mem{cpy,pcpy,move,set} is available. */
12010 case BUILT_IN_MEMCPY_CHK:
12011 fn = built_in_decls[BUILT_IN_MEMCPY];
12013 case BUILT_IN_MEMPCPY_CHK:
12014 fn = built_in_decls[BUILT_IN_MEMPCPY];
12016 case BUILT_IN_MEMMOVE_CHK:
12017 fn = built_in_decls[BUILT_IN_MEMMOVE];
12019 case BUILT_IN_MEMSET_CHK:
12020 fn = built_in_decls[BUILT_IN_MEMSET];
12029 fn = build_call_expr (fn, 3, dest, src, len);
12030 STRIP_TYPE_NOPS (fn);
12031 while (TREE_CODE (fn) == COMPOUND_EXPR)
12033 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12035 fn = TREE_OPERAND (fn, 1);
12037 if (TREE_CODE (fn) == CALL_EXPR)
12038 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12039 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12041 else if (fcode == BUILT_IN_MEMSET_CHK)
12045 unsigned int dest_align
12046 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12048 /* If DEST is not a pointer type, call the normal function. */
12049 if (dest_align == 0)
12052 /* If SRC and DEST are the same (and not volatile), do nothing. */
12053 if (operand_equal_p (src, dest, 0))
12057 if (fcode != BUILT_IN_MEMPCPY_CHK)
12059 /* Evaluate and ignore LEN in case it has side-effects. */
12060 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12061 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12064 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12065 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12068 /* __memmove_chk special case. */
12069 if (fcode == BUILT_IN_MEMMOVE_CHK)
12071 unsigned int src_align
12072 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12074 if (src_align == 0)
12077 /* If src is categorized for a readonly section we can use
12078 normal __memcpy_chk. */
12079 if (readonly_data_expr (src))
12081 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12084 fn = build_call_expr (fn, 4, dest, src, len, size);
12085 STRIP_TYPE_NOPS (fn);
12086 while (TREE_CODE (fn) == COMPOUND_EXPR)
12088 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12090 fn = TREE_OPERAND (fn, 1);
12092 if (TREE_CODE (fn) == CALL_EXPR)
12093 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12094 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12101 /* Emit warning if a buffer overflow is detected at compile time. */
12104 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12108 location_t loc = tree_nonartificial_location (exp);
12112 case BUILT_IN_STRCPY_CHK:
12113 case BUILT_IN_STPCPY_CHK:
12114 /* For __strcat_chk the warning will be emitted only if overflowing
12115 by at least strlen (dest) + 1 bytes. */
12116 case BUILT_IN_STRCAT_CHK:
12117 len = CALL_EXPR_ARG (exp, 1);
12118 size = CALL_EXPR_ARG (exp, 2);
12121 case BUILT_IN_STRNCAT_CHK:
12122 case BUILT_IN_STRNCPY_CHK:
12123 len = CALL_EXPR_ARG (exp, 2);
12124 size = CALL_EXPR_ARG (exp, 3);
12126 case BUILT_IN_SNPRINTF_CHK:
12127 case BUILT_IN_VSNPRINTF_CHK:
12128 len = CALL_EXPR_ARG (exp, 1);
12129 size = CALL_EXPR_ARG (exp, 3);
12132 gcc_unreachable ();
12138 if (! host_integerp (size, 1) || integer_all_onesp (size))
12143 len = c_strlen (len, 1);
12144 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12147 else if (fcode == BUILT_IN_STRNCAT_CHK)
12149 tree src = CALL_EXPR_ARG (exp, 1);
12150 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12152 src = c_strlen (src, 1);
12153 if (! src || ! host_integerp (src, 1))
12155 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12156 exp, get_callee_fndecl (exp));
12159 else if (tree_int_cst_lt (src, size))
12162 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12165 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12166 exp, get_callee_fndecl (exp));
12169 /* Emit warning if a buffer overflow is detected at compile time
12170 in __sprintf_chk/__vsprintf_chk calls. */
12173 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12175 tree dest, size, len, fmt, flag;
12176 const char *fmt_str;
12177 int nargs = call_expr_nargs (exp);
12179 /* Verify the required arguments in the original call. */
12183 dest = CALL_EXPR_ARG (exp, 0);
12184 flag = CALL_EXPR_ARG (exp, 1);
12185 size = CALL_EXPR_ARG (exp, 2);
12186 fmt = CALL_EXPR_ARG (exp, 3);
12188 if (! host_integerp (size, 1) || integer_all_onesp (size))
12191 /* Check whether the format is a literal string constant. */
12192 fmt_str = c_getstr (fmt);
12193 if (fmt_str == NULL)
12196 if (!init_target_chars ())
12199 /* If the format doesn't contain % args or %%, we know its size. */
12200 if (strchr (fmt_str, target_percent) == 0)
12201 len = build_int_cstu (size_type_node, strlen (fmt_str));
12202 /* If the format is "%s" and first ... argument is a string literal,
12204 else if (fcode == BUILT_IN_SPRINTF_CHK
12205 && strcmp (fmt_str, target_percent_s) == 0)
12211 arg = CALL_EXPR_ARG (exp, 4);
12212 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12215 len = c_strlen (arg, 1);
12216 if (!len || ! host_integerp (len, 1))
12222 if (! tree_int_cst_lt (len, size))
12223 warning_at (tree_nonartificial_location (exp),
12224 0, "%Kcall to %D will always overflow destination buffer",
12225 exp, get_callee_fndecl (exp));
12228 /* Emit warning if a free is called with address of a variable. */
12231 maybe_emit_free_warning (tree exp)
12233 tree arg = CALL_EXPR_ARG (exp, 0);
12236 if (TREE_CODE (arg) != ADDR_EXPR)
12239 arg = get_base_address (TREE_OPERAND (arg, 0));
12240 if (arg == NULL || INDIRECT_REF_P (arg))
12243 if (SSA_VAR_P (arg))
12244 warning_at (tree_nonartificial_location (exp),
12245 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12247 warning_at (tree_nonartificial_location (exp),
12248 0, "%Kattempt to free a non-heap object", exp);
12251 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12255 fold_builtin_object_size (tree ptr, tree ost)
12257 tree ret = NULL_TREE;
12258 int object_size_type;
12260 if (!validate_arg (ptr, POINTER_TYPE)
12261 || !validate_arg (ost, INTEGER_TYPE))
12266 if (TREE_CODE (ost) != INTEGER_CST
12267 || tree_int_cst_sgn (ost) < 0
12268 || compare_tree_int (ost, 3) > 0)
12271 object_size_type = tree_low_cst (ost, 0);
12273 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12274 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12275 and (size_t) 0 for types 2 and 3. */
12276 if (TREE_SIDE_EFFECTS (ptr))
12277 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12279 if (TREE_CODE (ptr) == ADDR_EXPR)
12280 ret = build_int_cstu (size_type_node,
12281 compute_builtin_object_size (ptr, object_size_type));
12283 else if (TREE_CODE (ptr) == SSA_NAME)
12285 unsigned HOST_WIDE_INT bytes;
12287 /* If object size is not known yet, delay folding until
12288 later. Maybe subsequent passes will help determining
12290 bytes = compute_builtin_object_size (ptr, object_size_type);
12291 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12293 ret = build_int_cstu (size_type_node, bytes);
12298 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12299 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12300 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12307 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12308 DEST, SRC, LEN, and SIZE are the arguments to the call.
12309 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12310 code of the builtin. If MAXLEN is not NULL, it is maximum length
12311 passed as third argument. */
12314 fold_builtin_memory_chk (tree fndecl,
12315 tree dest, tree src, tree len, tree size,
12316 tree maxlen, bool ignore,
12317 enum built_in_function fcode)
12321 if (!validate_arg (dest, POINTER_TYPE)
12322 || !validate_arg (src,
12323 (fcode == BUILT_IN_MEMSET_CHK
12324 ? INTEGER_TYPE : POINTER_TYPE))
12325 || !validate_arg (len, INTEGER_TYPE)
12326 || !validate_arg (size, INTEGER_TYPE))
12329 /* If SRC and DEST are the same (and not volatile), return DEST
12330 (resp. DEST+LEN for __mempcpy_chk). */
12331 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12333 if (fcode != BUILT_IN_MEMPCPY_CHK)
12334 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12337 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12338 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12342 if (! host_integerp (size, 1))
12345 if (! integer_all_onesp (size))
12347 if (! host_integerp (len, 1))
12349 /* If LEN is not constant, try MAXLEN too.
12350 For MAXLEN only allow optimizing into non-_ocs function
12351 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12352 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12354 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12356 /* (void) __mempcpy_chk () can be optimized into
12357 (void) __memcpy_chk (). */
12358 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12362 return build_call_expr (fn, 4, dest, src, len, size);
12370 if (tree_int_cst_lt (size, maxlen))
12375 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12376 mem{cpy,pcpy,move,set} is available. */
12379 case BUILT_IN_MEMCPY_CHK:
12380 fn = built_in_decls[BUILT_IN_MEMCPY];
12382 case BUILT_IN_MEMPCPY_CHK:
12383 fn = built_in_decls[BUILT_IN_MEMPCPY];
12385 case BUILT_IN_MEMMOVE_CHK:
12386 fn = built_in_decls[BUILT_IN_MEMMOVE];
12388 case BUILT_IN_MEMSET_CHK:
12389 fn = built_in_decls[BUILT_IN_MEMSET];
12398 return build_call_expr (fn, 3, dest, src, len);
12401 /* Fold a call to the __st[rp]cpy_chk builtin.
12402 DEST, SRC, and SIZE are the arguments to the call.
12403 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12404 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12405 strings passed as second argument. */
12408 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12409 tree maxlen, bool ignore,
12410 enum built_in_function fcode)
12414 if (!validate_arg (dest, POINTER_TYPE)
12415 || !validate_arg (src, POINTER_TYPE)
12416 || !validate_arg (size, INTEGER_TYPE))
12419 /* If SRC and DEST are the same (and not volatile), return DEST. */
12420 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12421 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12423 if (! host_integerp (size, 1))
12426 if (! integer_all_onesp (size))
12428 len = c_strlen (src, 1);
12429 if (! len || ! host_integerp (len, 1))
12431 /* If LEN is not constant, try MAXLEN too.
12432 For MAXLEN only allow optimizing into non-_ocs function
12433 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12434 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12436 if (fcode == BUILT_IN_STPCPY_CHK)
12441 /* If return value of __stpcpy_chk is ignored,
12442 optimize into __strcpy_chk. */
12443 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12447 return build_call_expr (fn, 3, dest, src, size);
12450 if (! len || TREE_SIDE_EFFECTS (len))
12453 /* If c_strlen returned something, but not a constant,
12454 transform __strcpy_chk into __memcpy_chk. */
12455 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12459 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12460 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12461 build_call_expr (fn, 4,
12462 dest, src, len, size));
12468 if (! tree_int_cst_lt (maxlen, size))
12472 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12473 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12474 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12478 return build_call_expr (fn, 2, dest, src);
12481 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12482 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12483 length passed as third argument. */
12486 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12491 if (!validate_arg (dest, POINTER_TYPE)
12492 || !validate_arg (src, POINTER_TYPE)
12493 || !validate_arg (len, INTEGER_TYPE)
12494 || !validate_arg (size, INTEGER_TYPE))
12497 if (! host_integerp (size, 1))
12500 if (! integer_all_onesp (size))
12502 if (! host_integerp (len, 1))
12504 /* If LEN is not constant, try MAXLEN too.
12505 For MAXLEN only allow optimizing into non-_ocs function
12506 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12507 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12513 if (tree_int_cst_lt (size, maxlen))
12517 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12518 fn = built_in_decls[BUILT_IN_STRNCPY];
12522 return build_call_expr (fn, 3, dest, src, len);
12525 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12526 are the arguments to the call. */
12529 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12534 if (!validate_arg (dest, POINTER_TYPE)
12535 || !validate_arg (src, POINTER_TYPE)
12536 || !validate_arg (size, INTEGER_TYPE))
12539 p = c_getstr (src);
12540 /* If the SRC parameter is "", return DEST. */
12541 if (p && *p == '\0')
12542 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12544 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12547 /* If __builtin_strcat_chk is used, assume strcat is available. */
12548 fn = built_in_decls[BUILT_IN_STRCAT];
12552 return build_call_expr (fn, 2, dest, src);
12555 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12559 fold_builtin_strncat_chk (tree fndecl,
12560 tree dest, tree src, tree len, tree size)
12565 if (!validate_arg (dest, POINTER_TYPE)
12566 || !validate_arg (src, POINTER_TYPE)
12567 || !validate_arg (size, INTEGER_TYPE)
12568 || !validate_arg (size, INTEGER_TYPE))
12571 p = c_getstr (src);
12572 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12573 if (p && *p == '\0')
12574 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12575 else if (integer_zerop (len))
12576 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12578 if (! host_integerp (size, 1))
12581 if (! integer_all_onesp (size))
12583 tree src_len = c_strlen (src, 1);
12585 && host_integerp (src_len, 1)
12586 && host_integerp (len, 1)
12587 && ! tree_int_cst_lt (len, src_len))
12589 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12590 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12594 return build_call_expr (fn, 3, dest, src, size);
12599 /* If __builtin_strncat_chk is used, assume strncat is available. */
12600 fn = built_in_decls[BUILT_IN_STRNCAT];
12604 return build_call_expr (fn, 3, dest, src, len);
12607 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12608 a normal call should be emitted rather than expanding the function
12609 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12612 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12614 tree dest, size, len, fn, fmt, flag;
12615 const char *fmt_str;
12616 int nargs = call_expr_nargs (exp);
12618 /* Verify the required arguments in the original call. */
12621 dest = CALL_EXPR_ARG (exp, 0);
12622 if (!validate_arg (dest, POINTER_TYPE))
12624 flag = CALL_EXPR_ARG (exp, 1);
12625 if (!validate_arg (flag, INTEGER_TYPE))
12627 size = CALL_EXPR_ARG (exp, 2);
12628 if (!validate_arg (size, INTEGER_TYPE))
12630 fmt = CALL_EXPR_ARG (exp, 3);
12631 if (!validate_arg (fmt, POINTER_TYPE))
12634 if (! host_integerp (size, 1))
12639 if (!init_target_chars ())
12642 /* Check whether the format is a literal string constant. */
12643 fmt_str = c_getstr (fmt);
12644 if (fmt_str != NULL)
12646 /* If the format doesn't contain % args or %%, we know the size. */
12647 if (strchr (fmt_str, target_percent) == 0)
12649 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12650 len = build_int_cstu (size_type_node, strlen (fmt_str));
12652 /* If the format is "%s" and first ... argument is a string literal,
12653 we know the size too. */
12654 else if (fcode == BUILT_IN_SPRINTF_CHK
12655 && strcmp (fmt_str, target_percent_s) == 0)
12661 arg = CALL_EXPR_ARG (exp, 4);
12662 if (validate_arg (arg, POINTER_TYPE))
12664 len = c_strlen (arg, 1);
12665 if (! len || ! host_integerp (len, 1))
12672 if (! integer_all_onesp (size))
12674 if (! len || ! tree_int_cst_lt (len, size))
12678 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12679 or if format doesn't contain % chars or is "%s". */
12680 if (! integer_zerop (flag))
12682 if (fmt_str == NULL)
12684 if (strchr (fmt_str, target_percent) != NULL
12685 && strcmp (fmt_str, target_percent_s))
12689 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12690 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12691 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12695 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12698 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12699 a normal call should be emitted rather than expanding the function
12700 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12701 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12702 passed as second argument. */
12705 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12706 enum built_in_function fcode)
12708 tree dest, size, len, fn, fmt, flag;
12709 const char *fmt_str;
12711 /* Verify the required arguments in the original call. */
12712 if (call_expr_nargs (exp) < 5)
12714 dest = CALL_EXPR_ARG (exp, 0);
12715 if (!validate_arg (dest, POINTER_TYPE))
12717 len = CALL_EXPR_ARG (exp, 1);
12718 if (!validate_arg (len, INTEGER_TYPE))
12720 flag = CALL_EXPR_ARG (exp, 2);
12721 if (!validate_arg (flag, INTEGER_TYPE))
12723 size = CALL_EXPR_ARG (exp, 3);
12724 if (!validate_arg (size, INTEGER_TYPE))
12726 fmt = CALL_EXPR_ARG (exp, 4);
12727 if (!validate_arg (fmt, POINTER_TYPE))
12730 if (! host_integerp (size, 1))
12733 if (! integer_all_onesp (size))
12735 if (! host_integerp (len, 1))
12737 /* If LEN is not constant, try MAXLEN too.
12738 For MAXLEN only allow optimizing into non-_ocs function
12739 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12740 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12746 if (tree_int_cst_lt (size, maxlen))
12750 if (!init_target_chars ())
12753 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12754 or if format doesn't contain % chars or is "%s". */
12755 if (! integer_zerop (flag))
12757 fmt_str = c_getstr (fmt);
12758 if (fmt_str == NULL)
12760 if (strchr (fmt_str, target_percent) != NULL
12761 && strcmp (fmt_str, target_percent_s))
12765 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12767 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12768 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12772 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12775 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12776 FMT and ARG are the arguments to the call; we don't fold cases with
12777 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12779 Return NULL_TREE if no simplification was possible, otherwise return the
12780 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12781 code of the function to be simplified. */
12784 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12785 enum built_in_function fcode)
12787 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12788 const char *fmt_str = NULL;
12790 /* If the return value is used, don't do the transformation. */
12794 /* Verify the required arguments in the original call. */
12795 if (!validate_arg (fmt, POINTER_TYPE))
12798 /* Check whether the format is a literal string constant. */
12799 fmt_str = c_getstr (fmt);
12800 if (fmt_str == NULL)
12803 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12805 /* If we're using an unlocked function, assume the other
12806 unlocked functions exist explicitly. */
12807 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12808 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12812 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12813 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12816 if (!init_target_chars ())
12819 if (strcmp (fmt_str, target_percent_s) == 0
12820 || strchr (fmt_str, target_percent) == NULL)
12824 if (strcmp (fmt_str, target_percent_s) == 0)
12826 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12829 if (!arg || !validate_arg (arg, POINTER_TYPE))
12832 str = c_getstr (arg);
12838 /* The format specifier doesn't contain any '%' characters. */
12839 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12845 /* If the string was "", printf does nothing. */
12846 if (str[0] == '\0')
12847 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12849 /* If the string has length of 1, call putchar. */
12850 if (str[1] == '\0')
12852 /* Given printf("c"), (where c is any one character,)
12853 convert "c"[0] to an int and pass that to the replacement
12855 newarg = build_int_cst (NULL_TREE, str[0]);
12857 call = build_call_expr (fn_putchar, 1, newarg);
12861 /* If the string was "string\n", call puts("string"). */
12862 size_t len = strlen (str);
12863 if ((unsigned char)str[len - 1] == target_newline
12864 && (size_t) (int) len == len
12868 tree offset_node, string_cst;
12870 /* Create a NUL-terminated string that's one char shorter
12871 than the original, stripping off the trailing '\n'. */
12872 newarg = build_string_literal (len, str);
12873 string_cst = string_constant (newarg, &offset_node);
12874 #ifdef ENABLE_CHECKING
12875 gcc_assert (string_cst
12876 && (TREE_STRING_LENGTH (string_cst)
12878 && integer_zerop (offset_node)
12880 TREE_STRING_POINTER (string_cst)[len - 1]
12881 == target_newline);
12883 /* build_string_literal creates a new STRING_CST,
12884 modify it in place to avoid double copying. */
12885 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12886 newstr[len - 1] = '\0';
12888 call = build_call_expr (fn_puts, 1, newarg);
12891 /* We'd like to arrange to call fputs(string,stdout) here,
12892 but we need stdout and don't have a way to get it yet. */
12897 /* The other optimizations can be done only on the non-va_list variants. */
12898 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12901 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12902 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12904 if (!arg || !validate_arg (arg, POINTER_TYPE))
12907 call = build_call_expr (fn_puts, 1, arg);
12910 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12911 else if (strcmp (fmt_str, target_percent_c) == 0)
12913 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12916 call = build_call_expr (fn_putchar, 1, arg);
12922 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12925 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12926 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12927 more than 3 arguments, and ARG may be null in the 2-argument case.
12929 Return NULL_TREE if no simplification was possible, otherwise return the
12930 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12931 code of the function to be simplified. */
12934 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12935 enum built_in_function fcode)
12937 tree fn_fputc, fn_fputs, call = NULL_TREE;
12938 const char *fmt_str = NULL;
12940 /* If the return value is used, don't do the transformation. */
12944 /* Verify the required arguments in the original call. */
12945 if (!validate_arg (fp, POINTER_TYPE))
12947 if (!validate_arg (fmt, POINTER_TYPE))
12950 /* Check whether the format is a literal string constant. */
12951 fmt_str = c_getstr (fmt);
12952 if (fmt_str == NULL)
12955 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12957 /* If we're using an unlocked function, assume the other
12958 unlocked functions exist explicitly. */
12959 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12960 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12964 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12965 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12968 if (!init_target_chars ())
12971 /* If the format doesn't contain % args or %%, use strcpy. */
12972 if (strchr (fmt_str, target_percent) == NULL)
12974 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12978 /* If the format specifier was "", fprintf does nothing. */
12979 if (fmt_str[0] == '\0')
12981 /* If FP has side-effects, just wait until gimplification is
12983 if (TREE_SIDE_EFFECTS (fp))
12986 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12989 /* When "string" doesn't contain %, replace all cases of
12990 fprintf (fp, string) with fputs (string, fp). The fputs
12991 builtin will take care of special cases like length == 1. */
12993 call = build_call_expr (fn_fputs, 2, fmt, fp);
12996 /* The other optimizations can be done only on the non-va_list variants. */
12997 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13000 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13001 else if (strcmp (fmt_str, target_percent_s) == 0)
13003 if (!arg || !validate_arg (arg, POINTER_TYPE))
13006 call = build_call_expr (fn_fputs, 2, arg, fp);
13009 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13010 else if (strcmp (fmt_str, target_percent_c) == 0)
13012 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13015 call = build_call_expr (fn_fputc, 2, arg, fp);
13020 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13023 /* Initialize format string characters in the target charset. */
13026 init_target_chars (void)
13031 target_newline = lang_hooks.to_target_charset ('\n');
13032 target_percent = lang_hooks.to_target_charset ('%');
13033 target_c = lang_hooks.to_target_charset ('c');
13034 target_s = lang_hooks.to_target_charset ('s');
13035 if (target_newline == 0 || target_percent == 0 || target_c == 0
13039 target_percent_c[0] = target_percent;
13040 target_percent_c[1] = target_c;
13041 target_percent_c[2] = '\0';
13043 target_percent_s[0] = target_percent;
13044 target_percent_s[1] = target_s;
13045 target_percent_s[2] = '\0';
13047 target_percent_s_newline[0] = target_percent;
13048 target_percent_s_newline[1] = target_s;
13049 target_percent_s_newline[2] = target_newline;
13050 target_percent_s_newline[3] = '\0';
13057 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13058 and no overflow/underflow occurred. INEXACT is true if M was not
13059 exactly calculated. TYPE is the tree type for the result. This
13060 function assumes that you cleared the MPFR flags and then
13061 calculated M to see if anything subsequently set a flag prior to
13062 entering this function. Return NULL_TREE if any checks fail. */
13065 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13067 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13068 overflow/underflow occurred. If -frounding-math, proceed iff the
13069 result of calling FUNC was exact. */
13070 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13071 && (!flag_rounding_math || !inexact))
13073 REAL_VALUE_TYPE rr;
13075 real_from_mpfr (&rr, m, type, GMP_RNDN);
13076 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13077 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13078 but the mpft_t is not, then we underflowed in the
13080 if (real_isfinite (&rr)
13081 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13083 REAL_VALUE_TYPE rmode;
13085 real_convert (&rmode, TYPE_MODE (type), &rr);
13086 /* Proceed iff the specified mode can hold the value. */
13087 if (real_identical (&rmode, &rr))
13088 return build_real (type, rmode);
13094 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13095 FUNC on it and return the resulting value as a tree with type TYPE.
13096 If MIN and/or MAX are not NULL, then the supplied ARG must be
13097 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13098 acceptable values, otherwise they are not. The mpfr precision is
13099 set to the precision of TYPE. We assume that function FUNC returns
13100 zero if the result could be calculated exactly within the requested
13104 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13105 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13108 tree result = NULL_TREE;
13112 /* To proceed, MPFR must exactly represent the target floating point
13113 format, which only happens when the target base equals two. */
13114 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13115 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13117 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13119 if (real_isfinite (ra)
13120 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13121 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13123 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13124 const int prec = fmt->p;
13125 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13129 mpfr_init2 (m, prec);
13130 mpfr_from_real (m, ra, GMP_RNDN);
13131 mpfr_clear_flags ();
13132 inexact = func (m, m, rnd);
13133 result = do_mpfr_ckconv (m, type, inexact);
13141 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13142 FUNC on it and return the resulting value as a tree with type TYPE.
13143 The mpfr precision is set to the precision of TYPE. We assume that
13144 function FUNC returns zero if the result could be calculated
13145 exactly within the requested precision. */
13148 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13149 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13151 tree result = NULL_TREE;
13156 /* To proceed, MPFR must exactly represent the target floating point
13157 format, which only happens when the target base equals two. */
13158 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13159 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13160 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13162 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13163 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13165 if (real_isfinite (ra1) && real_isfinite (ra2))
13167 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13168 const int prec = fmt->p;
13169 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13173 mpfr_inits2 (prec, m1, m2, NULL);
13174 mpfr_from_real (m1, ra1, GMP_RNDN);
13175 mpfr_from_real (m2, ra2, GMP_RNDN);
13176 mpfr_clear_flags ();
13177 inexact = func (m1, m1, m2, rnd);
13178 result = do_mpfr_ckconv (m1, type, inexact);
13179 mpfr_clears (m1, m2, NULL);
13186 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13187 FUNC on it and return the resulting value as a tree with type TYPE.
13188 The mpfr precision is set to the precision of TYPE. We assume that
13189 function FUNC returns zero if the result could be calculated
13190 exactly within the requested precision. */
13193 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13194 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13196 tree result = NULL_TREE;
13202 /* To proceed, MPFR must exactly represent the target floating point
13203 format, which only happens when the target base equals two. */
13204 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13205 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13206 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13207 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13209 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13210 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13211 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13213 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13215 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13216 const int prec = fmt->p;
13217 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13221 mpfr_inits2 (prec, m1, m2, m3, NULL);
13222 mpfr_from_real (m1, ra1, GMP_RNDN);
13223 mpfr_from_real (m2, ra2, GMP_RNDN);
13224 mpfr_from_real (m3, ra3, GMP_RNDN);
13225 mpfr_clear_flags ();
13226 inexact = func (m1, m1, m2, m3, rnd);
13227 result = do_mpfr_ckconv (m1, type, inexact);
13228 mpfr_clears (m1, m2, m3, NULL);
13235 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13236 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13237 If ARG_SINP and ARG_COSP are NULL then the result is returned
13238 as a complex value.
13239 The type is taken from the type of ARG and is used for setting the
13240 precision of the calculation and results. */
13243 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13245 tree const type = TREE_TYPE (arg);
13246 tree result = NULL_TREE;
13250 /* To proceed, MPFR must exactly represent the target floating point
13251 format, which only happens when the target base equals two. */
13252 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13253 && TREE_CODE (arg) == REAL_CST
13254 && !TREE_OVERFLOW (arg))
13256 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13258 if (real_isfinite (ra))
13260 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13261 const int prec = fmt->p;
13262 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13263 tree result_s, result_c;
13267 mpfr_inits2 (prec, m, ms, mc, NULL);
13268 mpfr_from_real (m, ra, GMP_RNDN);
13269 mpfr_clear_flags ();
13270 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13271 result_s = do_mpfr_ckconv (ms, type, inexact);
13272 result_c = do_mpfr_ckconv (mc, type, inexact);
13273 mpfr_clears (m, ms, mc, NULL);
13274 if (result_s && result_c)
13276 /* If we are to return in a complex value do so. */
13277 if (!arg_sinp && !arg_cosp)
13278 return build_complex (build_complex_type (type),
13279 result_c, result_s);
13281 /* Dereference the sin/cos pointer arguments. */
13282 arg_sinp = build_fold_indirect_ref (arg_sinp);
13283 arg_cosp = build_fold_indirect_ref (arg_cosp);
13284 /* Proceed if valid pointer type were passed in. */
13285 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13286 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13288 /* Set the values. */
13289 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13291 TREE_SIDE_EFFECTS (result_s) = 1;
13292 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13294 TREE_SIDE_EFFECTS (result_c) = 1;
13295 /* Combine the assignments into a compound expr. */
13296 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13297 result_s, result_c));
13305 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13306 two-argument mpfr order N Bessel function FUNC on them and return
13307 the resulting value as a tree with type TYPE. The mpfr precision
13308 is set to the precision of TYPE. We assume that function FUNC
13309 returns zero if the result could be calculated exactly within the
13310 requested precision. */
13312 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13313 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13314 const REAL_VALUE_TYPE *min, bool inclusive)
13316 tree result = NULL_TREE;
13321 /* To proceed, MPFR must exactly represent the target floating point
13322 format, which only happens when the target base equals two. */
13323 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13324 && host_integerp (arg1, 0)
13325 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13327 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13328 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13331 && real_isfinite (ra)
13332 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13334 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13335 const int prec = fmt->p;
13336 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13340 mpfr_init2 (m, prec);
13341 mpfr_from_real (m, ra, GMP_RNDN);
13342 mpfr_clear_flags ();
13343 inexact = func (m, n, m, rnd);
13344 result = do_mpfr_ckconv (m, type, inexact);
13352 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13353 the pointer *(ARG_QUO) and return the result. The type is taken
13354 from the type of ARG0 and is used for setting the precision of the
13355 calculation and results. */
13358 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13360 tree const type = TREE_TYPE (arg0);
13361 tree result = NULL_TREE;
13366 /* To proceed, MPFR must exactly represent the target floating point
13367 format, which only happens when the target base equals two. */
13368 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13369 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13370 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13372 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13373 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13375 if (real_isfinite (ra0) && real_isfinite (ra1))
13377 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13378 const int prec = fmt->p;
13379 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13384 mpfr_inits2 (prec, m0, m1, NULL);
13385 mpfr_from_real (m0, ra0, GMP_RNDN);
13386 mpfr_from_real (m1, ra1, GMP_RNDN);
13387 mpfr_clear_flags ();
13388 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13389 /* Remquo is independent of the rounding mode, so pass
13390 inexact=0 to do_mpfr_ckconv(). */
13391 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13392 mpfr_clears (m0, m1, NULL);
13395 /* MPFR calculates quo in the host's long so it may
13396 return more bits in quo than the target int can hold
13397 if sizeof(host long) > sizeof(target int). This can
13398 happen even for native compilers in LP64 mode. In
13399 these cases, modulo the quo value with the largest
13400 number that the target int can hold while leaving one
13401 bit for the sign. */
13402 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13403 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13405 /* Dereference the quo pointer argument. */
13406 arg_quo = build_fold_indirect_ref (arg_quo);
13407 /* Proceed iff a valid pointer type was passed in. */
13408 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13410 /* Set the value. */
13411 tree result_quo = fold_build2 (MODIFY_EXPR,
13412 TREE_TYPE (arg_quo), arg_quo,
13413 build_int_cst (NULL, integer_quo));
13414 TREE_SIDE_EFFECTS (result_quo) = 1;
13415 /* Combine the quo assignment with the rem. */
13416 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13417 result_quo, result_rem));
13425 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13426 resulting value as a tree with type TYPE. The mpfr precision is
13427 set to the precision of TYPE. We assume that this mpfr function
13428 returns zero if the result could be calculated exactly within the
13429 requested precision. In addition, the integer pointer represented
13430 by ARG_SG will be dereferenced and set to the appropriate signgam
13434 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13436 tree result = NULL_TREE;
13440 /* To proceed, MPFR must exactly represent the target floating point
13441 format, which only happens when the target base equals two. Also
13442 verify ARG is a constant and that ARG_SG is an int pointer. */
13443 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13444 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13445 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13446 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13448 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13450 /* In addition to NaN and Inf, the argument cannot be zero or a
13451 negative integer. */
13452 if (real_isfinite (ra)
13453 && ra->cl != rvc_zero
13454 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13456 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13457 const int prec = fmt->p;
13458 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13463 mpfr_init2 (m, prec);
13464 mpfr_from_real (m, ra, GMP_RNDN);
13465 mpfr_clear_flags ();
13466 inexact = mpfr_lgamma (m, &sg, m, rnd);
13467 result_lg = do_mpfr_ckconv (m, type, inexact);
13473 /* Dereference the arg_sg pointer argument. */
13474 arg_sg = build_fold_indirect_ref (arg_sg);
13475 /* Assign the signgam value into *arg_sg. */
13476 result_sg = fold_build2 (MODIFY_EXPR,
13477 TREE_TYPE (arg_sg), arg_sg,
13478 build_int_cst (NULL, sg));
13479 TREE_SIDE_EFFECTS (result_sg) = 1;
13480 /* Combine the signgam assignment with the lgamma result. */
13481 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13482 result_sg, result_lg));
13491 The functions below provide an alternate interface for folding
13492 builtin function calls presented as GIMPLE_CALL statements rather
13493 than as CALL_EXPRs. The folded result is still expressed as a
13494 tree. There is too much code duplication in the handling of
13495 varargs functions, and a more intrusive re-factoring would permit
13496 better sharing of code between the tree and statement-based
13497 versions of these functions. */
13499 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13500 along with N new arguments specified as the "..." parameters. SKIP
13501 is the number of arguments in STMT to be omitted. This function is used
13502 to do varargs-to-varargs transformations. */
13505 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13507 int oldnargs = gimple_call_num_args (stmt);
13508 int nargs = oldnargs - skip + n;
13509 tree fntype = TREE_TYPE (fndecl);
13510 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13515 buffer = XALLOCAVEC (tree, nargs);
13517 for (i = 0; i < n; i++)
13518 buffer[i] = va_arg (ap, tree);
13520 for (j = skip; j < oldnargs; j++, i++)
13521 buffer[i] = gimple_call_arg (stmt, j);
13523 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13526 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13527 a normal call should be emitted rather than expanding the function
13528 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13531 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13533 tree dest, size, len, fn, fmt, flag;
13534 const char *fmt_str;
13535 int nargs = gimple_call_num_args (stmt);
13537 /* Verify the required arguments in the original call. */
13540 dest = gimple_call_arg (stmt, 0);
13541 if (!validate_arg (dest, POINTER_TYPE))
13543 flag = gimple_call_arg (stmt, 1);
13544 if (!validate_arg (flag, INTEGER_TYPE))
13546 size = gimple_call_arg (stmt, 2);
13547 if (!validate_arg (size, INTEGER_TYPE))
13549 fmt = gimple_call_arg (stmt, 3);
13550 if (!validate_arg (fmt, POINTER_TYPE))
13553 if (! host_integerp (size, 1))
13558 if (!init_target_chars ())
13561 /* Check whether the format is a literal string constant. */
13562 fmt_str = c_getstr (fmt);
13563 if (fmt_str != NULL)
13565 /* If the format doesn't contain % args or %%, we know the size. */
13566 if (strchr (fmt_str, target_percent) == 0)
13568 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13569 len = build_int_cstu (size_type_node, strlen (fmt_str));
13571 /* If the format is "%s" and first ... argument is a string literal,
13572 we know the size too. */
13573 else if (fcode == BUILT_IN_SPRINTF_CHK
13574 && strcmp (fmt_str, target_percent_s) == 0)
13580 arg = gimple_call_arg (stmt, 4);
13581 if (validate_arg (arg, POINTER_TYPE))
13583 len = c_strlen (arg, 1);
13584 if (! len || ! host_integerp (len, 1))
13591 if (! integer_all_onesp (size))
13593 if (! len || ! tree_int_cst_lt (len, size))
13597 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13598 or if format doesn't contain % chars or is "%s". */
13599 if (! integer_zerop (flag))
13601 if (fmt_str == NULL)
13603 if (strchr (fmt_str, target_percent) != NULL
13604 && strcmp (fmt_str, target_percent_s))
13608 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13609 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13610 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13614 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13617 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13618 a normal call should be emitted rather than expanding the function
13619 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13620 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13621 passed as second argument. */
13624 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13625 enum built_in_function fcode)
13627 tree dest, size, len, fn, fmt, flag;
13628 const char *fmt_str;
13630 /* Verify the required arguments in the original call. */
13631 if (gimple_call_num_args (stmt) < 5)
13633 dest = gimple_call_arg (stmt, 0);
13634 if (!validate_arg (dest, POINTER_TYPE))
13636 len = gimple_call_arg (stmt, 1);
13637 if (!validate_arg (len, INTEGER_TYPE))
13639 flag = gimple_call_arg (stmt, 2);
13640 if (!validate_arg (flag, INTEGER_TYPE))
13642 size = gimple_call_arg (stmt, 3);
13643 if (!validate_arg (size, INTEGER_TYPE))
13645 fmt = gimple_call_arg (stmt, 4);
13646 if (!validate_arg (fmt, POINTER_TYPE))
13649 if (! host_integerp (size, 1))
13652 if (! integer_all_onesp (size))
13654 if (! host_integerp (len, 1))
13656 /* If LEN is not constant, try MAXLEN too.
13657 For MAXLEN only allow optimizing into non-_ocs function
13658 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13659 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13665 if (tree_int_cst_lt (size, maxlen))
13669 if (!init_target_chars ())
13672 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13673 or if format doesn't contain % chars or is "%s". */
13674 if (! integer_zerop (flag))
13676 fmt_str = c_getstr (fmt);
13677 if (fmt_str == NULL)
13679 if (strchr (fmt_str, target_percent) != NULL
13680 && strcmp (fmt_str, target_percent_s))
13684 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13686 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13687 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13691 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13694 /* Builtins with folding operations that operate on "..." arguments
13695 need special handling; we need to store the arguments in a convenient
13696 data structure before attempting any folding. Fortunately there are
13697 only a few builtins that fall into this category. FNDECL is the
13698 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13699 result of the function call is ignored. */
13702 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13704 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13705 tree ret = NULL_TREE;
13709 case BUILT_IN_SPRINTF_CHK:
13710 case BUILT_IN_VSPRINTF_CHK:
13711 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13714 case BUILT_IN_SNPRINTF_CHK:
13715 case BUILT_IN_VSNPRINTF_CHK:
13716 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13723 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13724 TREE_NO_WARNING (ret) = 1;
13730 /* A wrapper function for builtin folding that prevents warnings for
13731 "statement without effect" and the like, caused by removing the
13732 call node earlier than the warning is generated. */
13735 fold_call_stmt (gimple stmt, bool ignore)
13737 tree ret = NULL_TREE;
13738 tree fndecl = gimple_call_fndecl (stmt);
13740 && TREE_CODE (fndecl) == FUNCTION_DECL
13741 && DECL_BUILT_IN (fndecl)
13742 && !gimple_call_va_arg_pack_p (stmt))
13744 int nargs = gimple_call_num_args (stmt);
13746 if (avoid_folding_inline_builtin (fndecl))
13748 /* FIXME: Don't use a list in this interface. */
13749 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13751 tree arglist = NULL_TREE;
13753 for (i = nargs - 1; i >= 0; i--)
13754 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13755 return targetm.fold_builtin (fndecl, arglist, ignore);
13759 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13761 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13763 for (i = 0; i < nargs; i++)
13764 args[i] = gimple_call_arg (stmt, i);
13765 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13768 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13771 /* Propagate location information from original call to
13772 expansion of builtin. Otherwise things like
13773 maybe_emit_chk_warning, that operate on the expansion
13774 of a builtin, will use the wrong location information. */
13775 if (gimple_has_location (stmt))
13777 tree realret = ret;
13778 if (TREE_CODE (ret) == NOP_EXPR)
13779 realret = TREE_OPERAND (ret, 0);
13780 if (CAN_HAVE_LOCATION_P (realret)
13781 && !EXPR_HAS_LOCATION (realret))
13782 SET_EXPR_LOCATION (realret, gimple_location (stmt));