1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
69 #include "builtins.def"
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls[(int) END_BUILTINS];
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_args_info (tree);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
126 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_bcopy (tree, int);
129 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
131 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_alloca (tree, rtx);
143 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static rtx expand_builtin_fputs (tree, rtx, bool);
146 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
147 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
148 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
149 static tree stabilize_va_list (tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_expect (tree, tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (tree, tree);
155 static tree fold_builtin_inf (tree, int);
156 static tree fold_builtin_nan (tree, tree, int);
157 static tree rewrite_call_expr (tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static bool integer_valued_real_p (tree);
160 static tree fold_trunc_transparent_mathfn (tree, tree);
161 static bool readonly_data_expr (tree);
162 static rtx expand_builtin_fabs (tree, rtx, rtx);
163 static rtx expand_builtin_signbit (tree, rtx);
164 static tree fold_builtin_sqrt (tree, tree);
165 static tree fold_builtin_cbrt (tree, tree);
166 static tree fold_builtin_pow (tree, tree, tree, tree);
167 static tree fold_builtin_powi (tree, tree, tree, tree);
168 static tree fold_builtin_cos (tree, tree, tree);
169 static tree fold_builtin_cosh (tree, tree, tree);
170 static tree fold_builtin_tan (tree, tree);
171 static tree fold_builtin_trunc (tree, tree);
172 static tree fold_builtin_floor (tree, tree);
173 static tree fold_builtin_ceil (tree, tree);
174 static tree fold_builtin_round (tree, tree);
175 static tree fold_builtin_int_roundingfn (tree, tree);
176 static tree fold_builtin_bitop (tree, tree);
177 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
178 static tree fold_builtin_strchr (tree, tree, tree);
179 static tree fold_builtin_memchr (tree, tree, tree, tree);
180 static tree fold_builtin_memcmp (tree, tree, tree);
181 static tree fold_builtin_strcmp (tree, tree);
182 static tree fold_builtin_strncmp (tree, tree, tree);
183 static tree fold_builtin_signbit (tree, tree);
184 static tree fold_builtin_copysign (tree, tree, tree, tree);
185 static tree fold_builtin_isascii (tree);
186 static tree fold_builtin_toascii (tree);
187 static tree fold_builtin_isdigit (tree);
188 static tree fold_builtin_fabs (tree, tree);
189 static tree fold_builtin_abs (tree, tree);
190 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
192 static tree fold_builtin_n (tree, tree *, int, bool);
193 static tree fold_builtin_0 (tree, bool);
194 static tree fold_builtin_1 (tree, tree, bool);
195 static tree fold_builtin_2 (tree, tree, tree, bool);
196 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
197 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
198 static tree fold_builtin_varargs (tree, tree, bool);
200 static tree fold_builtin_strpbrk (tree, tree, tree);
201 static tree fold_builtin_strstr (tree, tree, tree);
202 static tree fold_builtin_strrchr (tree, tree, tree);
203 static tree fold_builtin_strcat (tree, tree);
204 static tree fold_builtin_strncat (tree, tree, tree);
205 static tree fold_builtin_strspn (tree, tree);
206 static tree fold_builtin_strcspn (tree, tree);
207 static tree fold_builtin_sprintf (tree, tree, tree, int);
209 static rtx expand_builtin_object_size (tree);
210 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
211 enum built_in_function);
212 static void maybe_emit_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
214 static void maybe_emit_free_warning (tree);
215 static tree fold_builtin_object_size (tree, tree);
216 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
217 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
218 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
219 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
220 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
221 enum built_in_function);
222 static bool init_target_chars (void);
224 static unsigned HOST_WIDE_INT target_newline;
225 static unsigned HOST_WIDE_INT target_percent;
226 static unsigned HOST_WIDE_INT target_c;
227 static unsigned HOST_WIDE_INT target_s;
228 static char target_percent_c[3];
229 static char target_percent_s[3];
230 static char target_percent_s_newline[4];
231 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_arg2 (tree, tree, tree,
234 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
235 static tree do_mpfr_arg3 (tree, tree, tree, tree,
236 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
237 static tree do_mpfr_sincos (tree, tree, tree);
238 static tree do_mpfr_bessel_n (tree, tree, tree,
239 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
240 const REAL_VALUE_TYPE *, bool);
241 static tree do_mpfr_remquo (tree, tree, tree);
242 static tree do_mpfr_lgamma_r (tree, tree, tree);
244 /* Return true if NODE should be considered for inline expansion regardless
245 of the optimization level. This means whenever a function is invoked with
246 its "internal" name, which normally contains the prefix "__builtin". */
248 static bool called_as_built_in (tree node)
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 if (strncmp (name, "__builtin_", 10) == 0)
253 if (strncmp (name, "__sync_", 7) == 0)
258 /* Return the alignment in bits of EXP, an object.
259 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
260 guessed alignment e.g. from type alignment. */
263 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
268 if (handled_component_p (exp))
270 HOST_WIDE_INT bitsize, bitpos;
272 enum machine_mode mode;
273 int unsignedp, volatilep;
275 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
276 &mode, &unsignedp, &volatilep, true);
278 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
283 if (TREE_CODE (offset) == PLUS_EXPR)
285 next_offset = TREE_OPERAND (offset, 0);
286 offset = TREE_OPERAND (offset, 1);
290 if (host_integerp (offset, 1))
292 /* Any overflow in calculating offset_bits won't change
295 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
298 inner = MIN (inner, (offset_bits & -offset_bits));
300 else if (TREE_CODE (offset) == MULT_EXPR
301 && host_integerp (TREE_OPERAND (offset, 1), 1))
303 /* Any overflow in calculating offset_factor won't change
305 unsigned offset_factor
306 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
310 inner = MIN (inner, (offset_factor & -offset_factor));
314 inner = MIN (inner, BITS_PER_UNIT);
317 offset = next_offset;
321 align = MIN (inner, DECL_ALIGN (exp));
322 #ifdef CONSTANT_ALIGNMENT
323 else if (CONSTANT_CLASS_P (exp))
324 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
326 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
327 || TREE_CODE (exp) == INDIRECT_REF)
328 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
330 align = MIN (align, inner);
331 return MIN (align, max_align);
334 /* Return the alignment in bits of EXP, a pointer valued expression.
335 But don't return more than MAX_ALIGN no matter what.
336 The alignment returned is, by default, the alignment of the thing that
337 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
339 Otherwise, look at the expression to see if we can do better, i.e., if the
340 expression is actually pointing at an object whose alignment is tighter. */
343 get_pointer_alignment (tree exp, unsigned int max_align)
345 unsigned int align, inner;
347 /* We rely on TER to compute accurate alignment information. */
348 if (!(optimize && flag_tree_ter))
351 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
354 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
355 align = MIN (align, max_align);
359 switch (TREE_CODE (exp))
362 exp = TREE_OPERAND (exp, 0);
363 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
366 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
367 align = MIN (inner, max_align);
370 case POINTER_PLUS_EXPR:
371 /* If sum of pointer + int, restrict our maximum alignment to that
372 imposed by the integer. If not, we can't do any better than
374 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
377 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
378 & (max_align / BITS_PER_UNIT - 1))
382 exp = TREE_OPERAND (exp, 0);
386 /* See what we are pointing at and look at its alignment. */
387 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
395 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
396 way, because it could contain a zero byte in the middle.
397 TREE_STRING_LENGTH is the size of the character array, not the string.
399 ONLY_VALUE should be nonzero if the result is not going to be emitted
400 into the instruction stream and zero if it is going to be expanded.
401 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
402 is returned, otherwise NULL, since
403 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
404 evaluate the side-effects.
406 The value returned is of type `ssizetype'.
408 Unfortunately, string_constant can't access the values of const char
409 arrays with initializers, so neither can we do so here. */
412 c_strlen (tree src, int only_value)
415 HOST_WIDE_INT offset;
420 if (TREE_CODE (src) == COND_EXPR
421 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
425 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
426 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
427 if (tree_int_cst_equal (len1, len2))
431 if (TREE_CODE (src) == COMPOUND_EXPR
432 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
433 return c_strlen (TREE_OPERAND (src, 1), only_value);
435 src = string_constant (src, &offset_node);
439 max = TREE_STRING_LENGTH (src) - 1;
440 ptr = TREE_STRING_POINTER (src);
442 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
444 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
445 compute the offset to the following null if we don't know where to
446 start searching for it. */
449 for (i = 0; i < max; i++)
453 /* We don't know the starting offset, but we do know that the string
454 has no internal zero bytes. We can assume that the offset falls
455 within the bounds of the string; otherwise, the programmer deserves
456 what he gets. Subtract the offset from the length of the string,
457 and return that. This would perhaps not be valid if we were dealing
458 with named arrays in addition to literal string constants. */
460 return size_diffop (size_int (max), offset_node);
463 /* We have a known offset into the string. Start searching there for
464 a null character if we can represent it as a single HOST_WIDE_INT. */
465 if (offset_node == 0)
467 else if (! host_integerp (offset_node, 0))
470 offset = tree_low_cst (offset_node, 0);
472 /* If the offset is known to be out of bounds, warn, and call strlen at
474 if (offset < 0 || offset > max)
476 /* Suppress multiple warnings for propagated constant strings. */
477 if (! TREE_NO_WARNING (src))
479 warning (0, "offset outside bounds of constant string");
480 TREE_NO_WARNING (src) = 1;
485 /* Use strlen to search for the first zero byte. Since any strings
486 constructed with build_string will have nulls appended, we win even
487 if we get handed something like (char[4])"abcd".
489 Since OFFSET is our starting index into the string, no further
490 calculation is needed. */
491 return ssize_int (strlen (ptr + offset));
494 /* Return a char pointer for a C string if it is a string constant
495 or sum of string constant and integer constant. */
502 src = string_constant (src, &offset_node);
506 if (offset_node == 0)
507 return TREE_STRING_POINTER (src);
508 else if (!host_integerp (offset_node, 1)
509 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
512 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
515 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
516 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
519 c_readstr (const char *str, enum machine_mode mode)
525 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
530 for (i = 0; i < GET_MODE_SIZE (mode); i++)
533 if (WORDS_BIG_ENDIAN)
534 j = GET_MODE_SIZE (mode) - i - 1;
535 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
536 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
537 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
539 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
542 ch = (unsigned char) str[i];
543 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
545 return immed_double_const (c[0], c[1], mode);
548 /* Cast a target constant CST to target CHAR and if that value fits into
549 host char type, return zero and put that value into variable pointed to by
553 target_char_cast (tree cst, char *p)
555 unsigned HOST_WIDE_INT val, hostval;
557 if (!host_integerp (cst, 1)
558 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
561 val = tree_low_cst (cst, 1);
562 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
563 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
566 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
567 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
576 /* Similar to save_expr, but assumes that arbitrary code is not executed
577 in between the multiple evaluations. In particular, we assume that a
578 non-addressable local variable will not be modified. */
581 builtin_save_expr (tree exp)
583 if (TREE_ADDRESSABLE (exp) == 0
584 && (TREE_CODE (exp) == PARM_DECL
585 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
588 return save_expr (exp);
591 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
592 times to get the address of either a higher stack frame, or a return
593 address located within it (depending on FNDECL_CODE). */
596 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
600 #ifdef INITIAL_FRAME_ADDRESS_RTX
601 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
605 /* For a zero count with __builtin_return_address, we don't care what
606 frame address we return, because target-specific definitions will
607 override us. Therefore frame pointer elimination is OK, and using
608 the soft frame pointer is OK.
610 For a nonzero count, or a zero count with __builtin_frame_address,
611 we require a stable offset from the current frame pointer to the
612 previous one, so we must use the hard frame pointer, and
613 we must disable frame pointer elimination. */
614 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 tem = frame_pointer_rtx;
618 tem = hard_frame_pointer_rtx;
620 /* Tell reload not to eliminate the frame pointer. */
621 crtl->accesses_prior_frames = 1;
625 /* Some machines need special handling before we can access
626 arbitrary frames. For example, on the SPARC, we must first flush
627 all register windows to the stack. */
628 #ifdef SETUP_FRAME_ADDRESSES
630 SETUP_FRAME_ADDRESSES ();
633 /* On the SPARC, the return address is not in the frame, it is in a
634 register. There is no way to access it off of the current frame
635 pointer, but it can be accessed off the previous frame pointer by
636 reading the value from the register window save area. */
637 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
638 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
642 /* Scan back COUNT frames to the specified frame. */
643 for (i = 0; i < count; i++)
645 /* Assume the dynamic chain pointer is in the word that the
646 frame address points to, unless otherwise specified. */
647 #ifdef DYNAMIC_CHAIN_ADDRESS
648 tem = DYNAMIC_CHAIN_ADDRESS (tem);
650 tem = memory_address (Pmode, tem);
651 tem = gen_frame_mem (Pmode, tem);
652 tem = copy_to_reg (tem);
655 /* For __builtin_frame_address, return what we've got. But, on
656 the SPARC for example, we may have to add a bias. */
657 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
658 #ifdef FRAME_ADDR_RTX
659 return FRAME_ADDR_RTX (tem);
664 /* For __builtin_return_address, get the return address from that frame. */
665 #ifdef RETURN_ADDR_RTX
666 tem = RETURN_ADDR_RTX (count, tem);
668 tem = memory_address (Pmode,
669 plus_constant (tem, GET_MODE_SIZE (Pmode)));
670 tem = gen_frame_mem (Pmode, tem);
675 /* Alias set used for setjmp buffer. */
676 static alias_set_type setjmp_alias_set = -1;
678 /* Construct the leading half of a __builtin_setjmp call. Control will
679 return to RECEIVER_LABEL. This is also called directly by the SJLJ
680 exception handling code. */
683 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
685 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
689 if (setjmp_alias_set == -1)
690 setjmp_alias_set = new_alias_set ();
692 buf_addr = convert_memory_address (Pmode, buf_addr);
694 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
696 /* We store the frame pointer and the address of receiver_label in
697 the buffer and use the rest of it for the stack save area, which
698 is machine-dependent. */
700 mem = gen_rtx_MEM (Pmode, buf_addr);
701 set_mem_alias_set (mem, setjmp_alias_set);
702 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
704 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
705 set_mem_alias_set (mem, setjmp_alias_set);
707 emit_move_insn (validize_mem (mem),
708 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
710 stack_save = gen_rtx_MEM (sa_mode,
711 plus_constant (buf_addr,
712 2 * GET_MODE_SIZE (Pmode)));
713 set_mem_alias_set (stack_save, setjmp_alias_set);
714 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
716 /* If there is further processing to do, do it. */
717 #ifdef HAVE_builtin_setjmp_setup
718 if (HAVE_builtin_setjmp_setup)
719 emit_insn (gen_builtin_setjmp_setup (buf_addr));
722 /* Tell optimize_save_area_alloca that extra work is going to
723 need to go on during alloca. */
724 cfun->calls_setjmp = 1;
726 /* We have a nonlocal label. */
727 cfun->has_nonlocal_label = 1;
730 /* Construct the trailing part of a __builtin_setjmp call. This is
731 also called directly by the SJLJ exception handling code. */
734 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
736 /* Clobber the FP when we get here, so we have to make sure it's
737 marked as used by this function. */
738 emit_use (hard_frame_pointer_rtx);
740 /* Mark the static chain as clobbered here so life information
741 doesn't get messed up for it. */
742 emit_clobber (static_chain_rtx);
744 /* Now put in the code to restore the frame pointer, and argument
745 pointer, if needed. */
746 #ifdef HAVE_nonlocal_goto
747 if (! HAVE_nonlocal_goto)
750 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
751 /* This might change the hard frame pointer in ways that aren't
752 apparent to early optimization passes, so force a clobber. */
753 emit_clobber (hard_frame_pointer_rtx);
756 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
757 if (fixed_regs[ARG_POINTER_REGNUM])
759 #ifdef ELIMINABLE_REGS
761 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
763 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
764 if (elim_regs[i].from == ARG_POINTER_REGNUM
765 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
768 if (i == ARRAY_SIZE (elim_regs))
771 /* Now restore our arg pointer from the address at which it
772 was saved in our stack frame. */
773 emit_move_insn (crtl->args.internal_arg_pointer,
774 copy_to_reg (get_arg_pointer_save_area ()));
779 #ifdef HAVE_builtin_setjmp_receiver
780 if (HAVE_builtin_setjmp_receiver)
781 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
784 #ifdef HAVE_nonlocal_goto_receiver
785 if (HAVE_nonlocal_goto_receiver)
786 emit_insn (gen_nonlocal_goto_receiver ());
791 /* We must not allow the code we just generated to be reordered by
792 scheduling. Specifically, the update of the frame pointer must
793 happen immediately, not later. */
794 emit_insn (gen_blockage ());
797 /* __builtin_longjmp is passed a pointer to an array of five words (not
798 all will be used on all machines). It operates similarly to the C
799 library function of the same name, but is more efficient. Much of
800 the code below is copied from the handling of non-local gotos. */
803 expand_builtin_longjmp (rtx buf_addr, rtx value)
805 rtx fp, lab, stack, insn, last;
806 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
808 /* DRAP is needed for stack realign if longjmp is expanded to current
810 if (SUPPORTS_STACK_ALIGNMENT)
811 crtl->need_drap = true;
813 if (setjmp_alias_set == -1)
814 setjmp_alias_set = new_alias_set ();
816 buf_addr = convert_memory_address (Pmode, buf_addr);
818 buf_addr = force_reg (Pmode, buf_addr);
820 /* We used to store value in static_chain_rtx, but that fails if pointers
821 are smaller than integers. We instead require that the user must pass
822 a second argument of 1, because that is what builtin_setjmp will
823 return. This also makes EH slightly more efficient, since we are no
824 longer copying around a value that we don't care about. */
825 gcc_assert (value == const1_rtx);
827 last = get_last_insn ();
828 #ifdef HAVE_builtin_longjmp
829 if (HAVE_builtin_longjmp)
830 emit_insn (gen_builtin_longjmp (buf_addr));
834 fp = gen_rtx_MEM (Pmode, buf_addr);
835 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
836 GET_MODE_SIZE (Pmode)));
838 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
839 2 * GET_MODE_SIZE (Pmode)));
840 set_mem_alias_set (fp, setjmp_alias_set);
841 set_mem_alias_set (lab, setjmp_alias_set);
842 set_mem_alias_set (stack, setjmp_alias_set);
844 /* Pick up FP, label, and SP from the block and jump. This code is
845 from expand_goto in stmt.c; see there for detailed comments. */
846 #ifdef HAVE_nonlocal_goto
847 if (HAVE_nonlocal_goto)
848 /* We have to pass a value to the nonlocal_goto pattern that will
849 get copied into the static_chain pointer, but it does not matter
850 what that value is, because builtin_setjmp does not use it. */
851 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
855 lab = copy_to_reg (lab);
857 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
858 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
860 emit_move_insn (hard_frame_pointer_rtx, fp);
861 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
863 emit_use (hard_frame_pointer_rtx);
864 emit_use (stack_pointer_rtx);
865 emit_indirect_jump (lab);
869 /* Search backwards and mark the jump insn as a non-local goto.
870 Note that this precludes the use of __builtin_longjmp to a
871 __builtin_setjmp target in the same function. However, we've
872 already cautioned the user that these functions are for
873 internal exception handling use only. */
874 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
876 gcc_assert (insn != last);
880 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
883 else if (CALL_P (insn))
888 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
889 and the address of the save area. */
892 expand_builtin_nonlocal_goto (tree exp)
894 tree t_label, t_save_area;
895 rtx r_label, r_save_area, r_fp, r_sp, insn;
897 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
900 t_label = CALL_EXPR_ARG (exp, 0);
901 t_save_area = CALL_EXPR_ARG (exp, 1);
903 r_label = expand_normal (t_label);
904 r_label = convert_memory_address (Pmode, r_label);
905 r_save_area = expand_normal (t_save_area);
906 r_save_area = convert_memory_address (Pmode, r_save_area);
907 /* Copy the address of the save location to a register just in case it was based
908 on the frame pointer. */
909 r_save_area = copy_to_reg (r_save_area);
910 r_fp = gen_rtx_MEM (Pmode, r_save_area);
911 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
912 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
914 crtl->has_nonlocal_goto = 1;
916 #ifdef HAVE_nonlocal_goto
917 /* ??? We no longer need to pass the static chain value, afaik. */
918 if (HAVE_nonlocal_goto)
919 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
923 r_label = copy_to_reg (r_label);
925 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
926 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
928 /* Restore frame pointer for containing function.
929 This sets the actual hard register used for the frame pointer
930 to the location of the function's incoming static chain info.
931 The non-local goto handler will then adjust it to contain the
932 proper value and reload the argument pointer, if needed. */
933 emit_move_insn (hard_frame_pointer_rtx, r_fp);
934 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
936 /* USE of hard_frame_pointer_rtx added for consistency;
937 not clear if really needed. */
938 emit_use (hard_frame_pointer_rtx);
939 emit_use (stack_pointer_rtx);
941 /* If the architecture is using a GP register, we must
942 conservatively assume that the target function makes use of it.
943 The prologue of functions with nonlocal gotos must therefore
944 initialize the GP register to the appropriate value, and we
945 must then make sure that this value is live at the point
946 of the jump. (Note that this doesn't necessarily apply
947 to targets with a nonlocal_goto pattern; they are free
948 to implement it in their own way. Note also that this is
949 a no-op if the GP register is a global invariant.) */
950 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
951 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
952 emit_use (pic_offset_table_rtx);
954 emit_indirect_jump (r_label);
957 /* Search backwards to the jump insn and mark it as a
959 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
963 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
966 else if (CALL_P (insn))
973 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
974 (not all will be used on all machines) that was passed to __builtin_setjmp.
975 It updates the stack pointer in that block to correspond to the current
979 expand_builtin_update_setjmp_buf (rtx buf_addr)
981 enum machine_mode sa_mode = Pmode;
985 #ifdef HAVE_save_stack_nonlocal
986 if (HAVE_save_stack_nonlocal)
987 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
989 #ifdef STACK_SAVEAREA_MODE
990 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
994 = gen_rtx_MEM (sa_mode,
997 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1001 emit_insn (gen_setjmp ());
1004 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1007 /* Expand a call to __builtin_prefetch. For a target that does not support
1008 data prefetch, evaluate the memory address argument in case it has side
1012 expand_builtin_prefetch (tree exp)
1014 tree arg0, arg1, arg2;
1018 if (!validate_arglist (exp, POINTER_TYPE, 0))
1021 arg0 = CALL_EXPR_ARG (exp, 0);
1023 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1024 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1026 nargs = call_expr_nargs (exp);
1028 arg1 = CALL_EXPR_ARG (exp, 1);
1030 arg1 = integer_zero_node;
1032 arg2 = CALL_EXPR_ARG (exp, 2);
1034 arg2 = build_int_cst (NULL_TREE, 3);
1036 /* Argument 0 is an address. */
1037 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1039 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1040 if (TREE_CODE (arg1) != INTEGER_CST)
1042 error ("second argument to %<__builtin_prefetch%> must be a constant");
1043 arg1 = integer_zero_node;
1045 op1 = expand_normal (arg1);
1046 /* Argument 1 must be either zero or one. */
1047 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1049 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1054 /* Argument 2 (locality) must be a compile-time constant int. */
1055 if (TREE_CODE (arg2) != INTEGER_CST)
1057 error ("third argument to %<__builtin_prefetch%> must be a constant");
1058 arg2 = integer_zero_node;
1060 op2 = expand_normal (arg2);
1061 /* Argument 2 must be 0, 1, 2, or 3. */
1062 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1064 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1068 #ifdef HAVE_prefetch
1071 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1073 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1074 || (GET_MODE (op0) != Pmode))
1076 op0 = convert_memory_address (Pmode, op0);
1077 op0 = force_reg (Pmode, op0);
1079 emit_insn (gen_prefetch (op0, op1, op2));
1083 /* Don't do anything with direct references to volatile memory, but
1084 generate code to handle other side effects. */
1085 if (!MEM_P (op0) && side_effects_p (op0))
1089 /* Get a MEM rtx for expression EXP which is the address of an operand
1090 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1091 the maximum length of the block of memory that might be accessed or
1095 get_memory_rtx (tree exp, tree len)
1097 tree orig_exp = exp;
1101 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1102 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1103 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1104 exp = TREE_OPERAND (exp, 0);
1106 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1107 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1109 /* Get an expression we can use to find the attributes to assign to MEM.
1110 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1111 we can. First remove any nops. */
1112 while (CONVERT_EXPR_P (exp)
1113 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1114 exp = TREE_OPERAND (exp, 0);
1117 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1118 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1119 && host_integerp (TREE_OPERAND (exp, 1), 0)
1120 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1121 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1122 else if (TREE_CODE (exp) == ADDR_EXPR)
1123 exp = TREE_OPERAND (exp, 0);
1124 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1125 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1129 /* Honor attributes derived from exp, except for the alias set
1130 (as builtin stringops may alias with anything) and the size
1131 (as stringops may access multiple array elements). */
1134 set_mem_attributes (mem, exp, 0);
1137 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1139 /* Allow the string and memory builtins to overflow from one
1140 field into another, see http://gcc.gnu.org/PR23561.
1141 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1142 memory accessed by the string or memory builtin will fit
1143 within the field. */
1144 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1146 tree mem_expr = MEM_EXPR (mem);
1147 HOST_WIDE_INT offset = -1, length = -1;
1150 while (TREE_CODE (inner) == ARRAY_REF
1151 || CONVERT_EXPR_P (inner)
1152 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1153 || TREE_CODE (inner) == SAVE_EXPR)
1154 inner = TREE_OPERAND (inner, 0);
1156 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1158 if (MEM_OFFSET (mem)
1159 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1160 offset = INTVAL (MEM_OFFSET (mem));
1162 if (offset >= 0 && len && host_integerp (len, 0))
1163 length = tree_low_cst (len, 0);
1165 while (TREE_CODE (inner) == COMPONENT_REF)
1167 tree field = TREE_OPERAND (inner, 1);
1168 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1169 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1171 /* Bitfields are generally not byte-addressable. */
1172 gcc_assert (!DECL_BIT_FIELD (field)
1173 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1174 % BITS_PER_UNIT) == 0
1175 && host_integerp (DECL_SIZE (field), 0)
1176 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1177 % BITS_PER_UNIT) == 0));
1179 /* If we can prove that the memory starting at XEXP (mem, 0) and
1180 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1181 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1182 fields without DECL_SIZE_UNIT like flexible array members. */
1184 && DECL_SIZE_UNIT (field)
1185 && host_integerp (DECL_SIZE_UNIT (field), 0))
1188 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1191 && offset + length <= size)
1196 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1197 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1198 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1206 mem_expr = TREE_OPERAND (mem_expr, 0);
1207 inner = TREE_OPERAND (inner, 0);
1210 if (mem_expr == NULL)
1212 if (mem_expr != MEM_EXPR (mem))
1214 set_mem_expr (mem, mem_expr);
1215 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1218 set_mem_alias_set (mem, 0);
1219 set_mem_size (mem, NULL_RTX);
1225 /* Built-in functions to perform an untyped call and return. */
1227 /* For each register that may be used for calling a function, this
1228 gives a mode used to copy the register's value. VOIDmode indicates
1229 the register is not used for calling a function. If the machine
1230 has register windows, this gives only the outbound registers.
1231 INCOMING_REGNO gives the corresponding inbound register. */
1232 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1234 /* For each register that may be used for returning values, this gives
1235 a mode used to copy the register's value. VOIDmode indicates the
1236 register is not used for returning values. If the machine has
1237 register windows, this gives only the outbound registers.
1238 INCOMING_REGNO gives the corresponding inbound register. */
1239 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1241 /* For each register that may be used for calling a function, this
1242 gives the offset of that register into the block returned by
1243 __builtin_apply_args. 0 indicates that the register is not
1244 used for calling a function. */
1245 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1247 /* Return the size required for the block returned by __builtin_apply_args,
1248 and initialize apply_args_mode. */
1251 apply_args_size (void)
1253 static int size = -1;
1256 enum machine_mode mode;
1258 /* The values computed by this function never change. */
1261 /* The first value is the incoming arg-pointer. */
1262 size = GET_MODE_SIZE (Pmode);
1264 /* The second value is the structure value address unless this is
1265 passed as an "invisible" first argument. */
1266 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1267 size += GET_MODE_SIZE (Pmode);
1269 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1270 if (FUNCTION_ARG_REGNO_P (regno))
1272 mode = reg_raw_mode[regno];
1274 gcc_assert (mode != VOIDmode);
1276 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1277 if (size % align != 0)
1278 size = CEIL (size, align) * align;
1279 apply_args_reg_offset[regno] = size;
1280 size += GET_MODE_SIZE (mode);
1281 apply_args_mode[regno] = mode;
1285 apply_args_mode[regno] = VOIDmode;
1286 apply_args_reg_offset[regno] = 0;
1292 /* Return the size required for the block returned by __builtin_apply,
1293 and initialize apply_result_mode. */
1296 apply_result_size (void)
1298 static int size = -1;
1300 enum machine_mode mode;
1302 /* The values computed by this function never change. */
1307 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1308 if (FUNCTION_VALUE_REGNO_P (regno))
1310 mode = reg_raw_mode[regno];
1312 gcc_assert (mode != VOIDmode);
1314 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1315 if (size % align != 0)
1316 size = CEIL (size, align) * align;
1317 size += GET_MODE_SIZE (mode);
1318 apply_result_mode[regno] = mode;
1321 apply_result_mode[regno] = VOIDmode;
1323 /* Allow targets that use untyped_call and untyped_return to override
1324 the size so that machine-specific information can be stored here. */
1325 #ifdef APPLY_RESULT_SIZE
1326 size = APPLY_RESULT_SIZE;
1332 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1333 /* Create a vector describing the result block RESULT. If SAVEP is true,
1334 the result block is used to save the values; otherwise it is used to
1335 restore the values. */
1338 result_vector (int savep, rtx result)
1340 int regno, size, align, nelts;
1341 enum machine_mode mode;
1343 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1346 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1347 if ((mode = apply_result_mode[regno]) != VOIDmode)
1349 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1350 if (size % align != 0)
1351 size = CEIL (size, align) * align;
1352 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1353 mem = adjust_address (result, mode, size);
1354 savevec[nelts++] = (savep
1355 ? gen_rtx_SET (VOIDmode, mem, reg)
1356 : gen_rtx_SET (VOIDmode, reg, mem));
1357 size += GET_MODE_SIZE (mode);
1359 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1361 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1363 /* Save the state required to perform an untyped call with the same
1364 arguments as were passed to the current function. */
1367 expand_builtin_apply_args_1 (void)
1370 int size, align, regno;
1371 enum machine_mode mode;
1372 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1374 /* Create a block where the arg-pointer, structure value address,
1375 and argument registers can be saved. */
1376 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1378 /* Walk past the arg-pointer and structure value address. */
1379 size = GET_MODE_SIZE (Pmode);
1380 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1381 size += GET_MODE_SIZE (Pmode);
1383 /* Save each register used in calling a function to the block. */
1384 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1385 if ((mode = apply_args_mode[regno]) != VOIDmode)
1387 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1388 if (size % align != 0)
1389 size = CEIL (size, align) * align;
1391 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1393 emit_move_insn (adjust_address (registers, mode, size), tem);
1394 size += GET_MODE_SIZE (mode);
1397 /* Save the arg pointer to the block. */
1398 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1399 #ifdef STACK_GROWS_DOWNWARD
1400 /* We need the pointer as the caller actually passed them to us, not
1401 as we might have pretended they were passed. Make sure it's a valid
1402 operand, as emit_move_insn isn't expected to handle a PLUS. */
1404 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1407 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1409 size = GET_MODE_SIZE (Pmode);
1411 /* Save the structure value address unless this is passed as an
1412 "invisible" first argument. */
1413 if (struct_incoming_value)
1415 emit_move_insn (adjust_address (registers, Pmode, size),
1416 copy_to_reg (struct_incoming_value));
1417 size += GET_MODE_SIZE (Pmode);
1420 /* Return the address of the block. */
1421 return copy_addr_to_reg (XEXP (registers, 0));
1424 /* __builtin_apply_args returns block of memory allocated on
1425 the stack into which is stored the arg pointer, structure
1426 value address, static chain, and all the registers that might
1427 possibly be used in performing a function call. The code is
1428 moved to the start of the function so the incoming values are
1432 expand_builtin_apply_args (void)
1434 /* Don't do __builtin_apply_args more than once in a function.
1435 Save the result of the first call and reuse it. */
1436 if (apply_args_value != 0)
1437 return apply_args_value;
1439 /* When this function is called, it means that registers must be
1440 saved on entry to this function. So we migrate the
1441 call to the first insn of this function. */
1446 temp = expand_builtin_apply_args_1 ();
1450 apply_args_value = temp;
1452 /* Put the insns after the NOTE that starts the function.
1453 If this is inside a start_sequence, make the outer-level insn
1454 chain current, so the code is placed at the start of the
1455 function. If internal_arg_pointer is a non-virtual pseudo,
1456 it needs to be placed after the function that initializes
1458 push_topmost_sequence ();
1459 if (REG_P (crtl->args.internal_arg_pointer)
1460 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1461 emit_insn_before (seq, parm_birth_insn);
1463 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1464 pop_topmost_sequence ();
1469 /* Perform an untyped call and save the state required to perform an
1470 untyped return of whatever value was returned by the given function. */
1473 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1475 int size, align, regno;
1476 enum machine_mode mode;
1477 rtx incoming_args, result, reg, dest, src, call_insn;
1478 rtx old_stack_level = 0;
1479 rtx call_fusage = 0;
1480 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1482 arguments = convert_memory_address (Pmode, arguments);
1484 /* Create a block where the return registers can be saved. */
1485 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1487 /* Fetch the arg pointer from the ARGUMENTS block. */
1488 incoming_args = gen_reg_rtx (Pmode);
1489 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1490 #ifndef STACK_GROWS_DOWNWARD
1491 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1492 incoming_args, 0, OPTAB_LIB_WIDEN);
1495 /* Push a new argument block and copy the arguments. Do not allow
1496 the (potential) memcpy call below to interfere with our stack
1498 do_pending_stack_adjust ();
1501 /* Save the stack with nonlocal if available. */
1502 #ifdef HAVE_save_stack_nonlocal
1503 if (HAVE_save_stack_nonlocal)
1504 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1507 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1509 /* Allocate a block of memory onto the stack and copy the memory
1510 arguments to the outgoing arguments address. */
1511 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1513 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1514 may have already set current_function_calls_alloca to true.
1515 current_function_calls_alloca won't be set if argsize is zero,
1516 so we have to guarantee need_drap is true here. */
1517 if (SUPPORTS_STACK_ALIGNMENT)
1518 crtl->need_drap = true;
1520 dest = virtual_outgoing_args_rtx;
1521 #ifndef STACK_GROWS_DOWNWARD
1522 if (GET_CODE (argsize) == CONST_INT)
1523 dest = plus_constant (dest, -INTVAL (argsize));
1525 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1527 dest = gen_rtx_MEM (BLKmode, dest);
1528 set_mem_align (dest, PARM_BOUNDARY);
1529 src = gen_rtx_MEM (BLKmode, incoming_args);
1530 set_mem_align (src, PARM_BOUNDARY);
1531 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1533 /* Refer to the argument block. */
1535 arguments = gen_rtx_MEM (BLKmode, arguments);
1536 set_mem_align (arguments, PARM_BOUNDARY);
1538 /* Walk past the arg-pointer and structure value address. */
1539 size = GET_MODE_SIZE (Pmode);
1541 size += GET_MODE_SIZE (Pmode);
1543 /* Restore each of the registers previously saved. Make USE insns
1544 for each of these registers for use in making the call. */
1545 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1546 if ((mode = apply_args_mode[regno]) != VOIDmode)
1548 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1549 if (size % align != 0)
1550 size = CEIL (size, align) * align;
1551 reg = gen_rtx_REG (mode, regno);
1552 emit_move_insn (reg, adjust_address (arguments, mode, size));
1553 use_reg (&call_fusage, reg);
1554 size += GET_MODE_SIZE (mode);
1557 /* Restore the structure value address unless this is passed as an
1558 "invisible" first argument. */
1559 size = GET_MODE_SIZE (Pmode);
1562 rtx value = gen_reg_rtx (Pmode);
1563 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1564 emit_move_insn (struct_value, value);
1565 if (REG_P (struct_value))
1566 use_reg (&call_fusage, struct_value);
1567 size += GET_MODE_SIZE (Pmode);
1570 /* All arguments and registers used for the call are set up by now! */
1571 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1573 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1574 and we don't want to load it into a register as an optimization,
1575 because prepare_call_address already did it if it should be done. */
1576 if (GET_CODE (function) != SYMBOL_REF)
1577 function = memory_address (FUNCTION_MODE, function);
1579 /* Generate the actual call instruction and save the return value. */
1580 #ifdef HAVE_untyped_call
1581 if (HAVE_untyped_call)
1582 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1583 result, result_vector (1, result)));
1586 #ifdef HAVE_call_value
1587 if (HAVE_call_value)
1591 /* Locate the unique return register. It is not possible to
1592 express a call that sets more than one return register using
1593 call_value; use untyped_call for that. In fact, untyped_call
1594 only needs to save the return registers in the given block. */
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1600 valreg = gen_rtx_REG (mode, regno);
1603 emit_call_insn (GEN_CALL_VALUE (valreg,
1604 gen_rtx_MEM (FUNCTION_MODE, function),
1605 const0_rtx, NULL_RTX, const0_rtx));
1607 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1613 /* Find the CALL insn we just emitted, and attach the register usage
1615 call_insn = last_call_insn ();
1616 add_function_usage_to (call_insn, call_fusage);
1618 /* Restore the stack. */
1619 #ifdef HAVE_save_stack_nonlocal
1620 if (HAVE_save_stack_nonlocal)
1621 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1624 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1628 /* Return the address of the result block. */
1629 result = copy_addr_to_reg (XEXP (result, 0));
1630 return convert_memory_address (ptr_mode, result);
1633 /* Perform an untyped return. */
1636 expand_builtin_return (rtx result)
1638 int size, align, regno;
1639 enum machine_mode mode;
1641 rtx call_fusage = 0;
1643 result = convert_memory_address (Pmode, result);
1645 apply_result_size ();
1646 result = gen_rtx_MEM (BLKmode, result);
1648 #ifdef HAVE_untyped_return
1649 if (HAVE_untyped_return)
1651 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1657 /* Restore the return value and note that each value is used. */
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_result_mode[regno]) != VOIDmode)
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1665 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1666 emit_move_insn (reg, adjust_address (result, mode, size));
1668 push_to_sequence (call_fusage);
1670 call_fusage = get_insns ();
1672 size += GET_MODE_SIZE (mode);
1675 /* Put the USE insns before the return. */
1676 emit_insn (call_fusage);
1678 /* Return whatever values was restored by jumping directly to the end
1680 expand_naked_return ();
1683 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1685 static enum type_class
1686 type_to_class (tree type)
1688 switch (TREE_CODE (type))
1690 case VOID_TYPE: return void_type_class;
1691 case INTEGER_TYPE: return integer_type_class;
1692 case ENUMERAL_TYPE: return enumeral_type_class;
1693 case BOOLEAN_TYPE: return boolean_type_class;
1694 case POINTER_TYPE: return pointer_type_class;
1695 case REFERENCE_TYPE: return reference_type_class;
1696 case OFFSET_TYPE: return offset_type_class;
1697 case REAL_TYPE: return real_type_class;
1698 case COMPLEX_TYPE: return complex_type_class;
1699 case FUNCTION_TYPE: return function_type_class;
1700 case METHOD_TYPE: return method_type_class;
1701 case RECORD_TYPE: return record_type_class;
1703 case QUAL_UNION_TYPE: return union_type_class;
1704 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1705 ? string_type_class : array_type_class);
1706 case LANG_TYPE: return lang_type_class;
1707 default: return no_type_class;
1711 /* Expand a call EXP to __builtin_classify_type. */
1714 expand_builtin_classify_type (tree exp)
1716 if (call_expr_nargs (exp))
1717 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1718 return GEN_INT (no_type_class);
1721 /* This helper macro, meant to be used in mathfn_built_in below,
1722 determines which among a set of three builtin math functions is
1723 appropriate for a given type mode. The `F' and `L' cases are
1724 automatically generated from the `double' case. */
1725 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1726 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1727 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1728 fcodel = BUILT_IN_MATHFN##L ; break;
1729 /* Similar to above, but appends _R after any F/L suffix. */
1730 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1731 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1732 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1733 fcodel = BUILT_IN_MATHFN##L_R ; break;
1735 /* Return mathematic function equivalent to FN but operating directly
1736 on TYPE, if available. If IMPLICIT is true find the function in
1737 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1738 can't do the conversion, return zero. */
1741 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1743 tree const *const fn_arr
1744 = implicit ? implicit_built_in_decls : built_in_decls;
1745 enum built_in_function fcode, fcodef, fcodel;
1749 CASE_MATHFN (BUILT_IN_ACOS)
1750 CASE_MATHFN (BUILT_IN_ACOSH)
1751 CASE_MATHFN (BUILT_IN_ASIN)
1752 CASE_MATHFN (BUILT_IN_ASINH)
1753 CASE_MATHFN (BUILT_IN_ATAN)
1754 CASE_MATHFN (BUILT_IN_ATAN2)
1755 CASE_MATHFN (BUILT_IN_ATANH)
1756 CASE_MATHFN (BUILT_IN_CBRT)
1757 CASE_MATHFN (BUILT_IN_CEIL)
1758 CASE_MATHFN (BUILT_IN_CEXPI)
1759 CASE_MATHFN (BUILT_IN_COPYSIGN)
1760 CASE_MATHFN (BUILT_IN_COS)
1761 CASE_MATHFN (BUILT_IN_COSH)
1762 CASE_MATHFN (BUILT_IN_DREM)
1763 CASE_MATHFN (BUILT_IN_ERF)
1764 CASE_MATHFN (BUILT_IN_ERFC)
1765 CASE_MATHFN (BUILT_IN_EXP)
1766 CASE_MATHFN (BUILT_IN_EXP10)
1767 CASE_MATHFN (BUILT_IN_EXP2)
1768 CASE_MATHFN (BUILT_IN_EXPM1)
1769 CASE_MATHFN (BUILT_IN_FABS)
1770 CASE_MATHFN (BUILT_IN_FDIM)
1771 CASE_MATHFN (BUILT_IN_FLOOR)
1772 CASE_MATHFN (BUILT_IN_FMA)
1773 CASE_MATHFN (BUILT_IN_FMAX)
1774 CASE_MATHFN (BUILT_IN_FMIN)
1775 CASE_MATHFN (BUILT_IN_FMOD)
1776 CASE_MATHFN (BUILT_IN_FREXP)
1777 CASE_MATHFN (BUILT_IN_GAMMA)
1778 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1779 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1780 CASE_MATHFN (BUILT_IN_HYPOT)
1781 CASE_MATHFN (BUILT_IN_ILOGB)
1782 CASE_MATHFN (BUILT_IN_INF)
1783 CASE_MATHFN (BUILT_IN_ISINF)
1784 CASE_MATHFN (BUILT_IN_J0)
1785 CASE_MATHFN (BUILT_IN_J1)
1786 CASE_MATHFN (BUILT_IN_JN)
1787 CASE_MATHFN (BUILT_IN_LCEIL)
1788 CASE_MATHFN (BUILT_IN_LDEXP)
1789 CASE_MATHFN (BUILT_IN_LFLOOR)
1790 CASE_MATHFN (BUILT_IN_LGAMMA)
1791 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1792 CASE_MATHFN (BUILT_IN_LLCEIL)
1793 CASE_MATHFN (BUILT_IN_LLFLOOR)
1794 CASE_MATHFN (BUILT_IN_LLRINT)
1795 CASE_MATHFN (BUILT_IN_LLROUND)
1796 CASE_MATHFN (BUILT_IN_LOG)
1797 CASE_MATHFN (BUILT_IN_LOG10)
1798 CASE_MATHFN (BUILT_IN_LOG1P)
1799 CASE_MATHFN (BUILT_IN_LOG2)
1800 CASE_MATHFN (BUILT_IN_LOGB)
1801 CASE_MATHFN (BUILT_IN_LRINT)
1802 CASE_MATHFN (BUILT_IN_LROUND)
1803 CASE_MATHFN (BUILT_IN_MODF)
1804 CASE_MATHFN (BUILT_IN_NAN)
1805 CASE_MATHFN (BUILT_IN_NANS)
1806 CASE_MATHFN (BUILT_IN_NEARBYINT)
1807 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1808 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1809 CASE_MATHFN (BUILT_IN_POW)
1810 CASE_MATHFN (BUILT_IN_POWI)
1811 CASE_MATHFN (BUILT_IN_POW10)
1812 CASE_MATHFN (BUILT_IN_REMAINDER)
1813 CASE_MATHFN (BUILT_IN_REMQUO)
1814 CASE_MATHFN (BUILT_IN_RINT)
1815 CASE_MATHFN (BUILT_IN_ROUND)
1816 CASE_MATHFN (BUILT_IN_SCALB)
1817 CASE_MATHFN (BUILT_IN_SCALBLN)
1818 CASE_MATHFN (BUILT_IN_SCALBN)
1819 CASE_MATHFN (BUILT_IN_SIGNBIT)
1820 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1821 CASE_MATHFN (BUILT_IN_SIN)
1822 CASE_MATHFN (BUILT_IN_SINCOS)
1823 CASE_MATHFN (BUILT_IN_SINH)
1824 CASE_MATHFN (BUILT_IN_SQRT)
1825 CASE_MATHFN (BUILT_IN_TAN)
1826 CASE_MATHFN (BUILT_IN_TANH)
1827 CASE_MATHFN (BUILT_IN_TGAMMA)
1828 CASE_MATHFN (BUILT_IN_TRUNC)
1829 CASE_MATHFN (BUILT_IN_Y0)
1830 CASE_MATHFN (BUILT_IN_Y1)
1831 CASE_MATHFN (BUILT_IN_YN)
1837 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1838 return fn_arr[fcode];
1839 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1840 return fn_arr[fcodef];
1841 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1842 return fn_arr[fcodel];
1847 /* Like mathfn_built_in_1(), but always use the implicit array. */
1850 mathfn_built_in (tree type, enum built_in_function fn)
1852 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1855 /* If errno must be maintained, expand the RTL to check if the result,
1856 TARGET, of a built-in function call, EXP, is NaN, and if so set
1860 expand_errno_check (tree exp, rtx target)
1862 rtx lab = gen_label_rtx ();
1864 /* Test the result; if it is NaN, set errno=EDOM because
1865 the argument was not in the domain. */
1866 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1870 /* If this built-in doesn't throw an exception, set errno directly. */
1871 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1873 #ifdef GEN_ERRNO_RTX
1874 rtx errno_rtx = GEN_ERRNO_RTX;
1877 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1879 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1885 /* Make sure the library call isn't expanded as a tail call. */
1886 CALL_EXPR_TAILCALL (exp) = 0;
1888 /* We can't set errno=EDOM directly; let the library call do it.
1889 Pop the arguments right away in case the call gets deleted. */
1891 expand_call (exp, target, 0);
1896 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1897 Return NULL_RTX if a normal call should be emitted rather than expanding
1898 the function in-line. EXP is the expression that is a call to the builtin
1899 function; if convenient, the result should be placed in TARGET.
1900 SUBTARGET may be used as the target for computing one of EXP's operands. */
1903 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1905 optab builtin_optab;
1906 rtx op0, insns, before_call;
1907 tree fndecl = get_callee_fndecl (exp);
1908 enum machine_mode mode;
1909 bool errno_set = false;
1912 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1915 arg = CALL_EXPR_ARG (exp, 0);
1917 switch (DECL_FUNCTION_CODE (fndecl))
1919 CASE_FLT_FN (BUILT_IN_SQRT):
1920 errno_set = ! tree_expr_nonnegative_p (arg);
1921 builtin_optab = sqrt_optab;
1923 CASE_FLT_FN (BUILT_IN_EXP):
1924 errno_set = true; builtin_optab = exp_optab; break;
1925 CASE_FLT_FN (BUILT_IN_EXP10):
1926 CASE_FLT_FN (BUILT_IN_POW10):
1927 errno_set = true; builtin_optab = exp10_optab; break;
1928 CASE_FLT_FN (BUILT_IN_EXP2):
1929 errno_set = true; builtin_optab = exp2_optab; break;
1930 CASE_FLT_FN (BUILT_IN_EXPM1):
1931 errno_set = true; builtin_optab = expm1_optab; break;
1932 CASE_FLT_FN (BUILT_IN_LOGB):
1933 errno_set = true; builtin_optab = logb_optab; break;
1934 CASE_FLT_FN (BUILT_IN_LOG):
1935 errno_set = true; builtin_optab = log_optab; break;
1936 CASE_FLT_FN (BUILT_IN_LOG10):
1937 errno_set = true; builtin_optab = log10_optab; break;
1938 CASE_FLT_FN (BUILT_IN_LOG2):
1939 errno_set = true; builtin_optab = log2_optab; break;
1940 CASE_FLT_FN (BUILT_IN_LOG1P):
1941 errno_set = true; builtin_optab = log1p_optab; break;
1942 CASE_FLT_FN (BUILT_IN_ASIN):
1943 builtin_optab = asin_optab; break;
1944 CASE_FLT_FN (BUILT_IN_ACOS):
1945 builtin_optab = acos_optab; break;
1946 CASE_FLT_FN (BUILT_IN_TAN):
1947 builtin_optab = tan_optab; break;
1948 CASE_FLT_FN (BUILT_IN_ATAN):
1949 builtin_optab = atan_optab; break;
1950 CASE_FLT_FN (BUILT_IN_FLOOR):
1951 builtin_optab = floor_optab; break;
1952 CASE_FLT_FN (BUILT_IN_CEIL):
1953 builtin_optab = ceil_optab; break;
1954 CASE_FLT_FN (BUILT_IN_TRUNC):
1955 builtin_optab = btrunc_optab; break;
1956 CASE_FLT_FN (BUILT_IN_ROUND):
1957 builtin_optab = round_optab; break;
1958 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1959 builtin_optab = nearbyint_optab;
1960 if (flag_trapping_math)
1962 /* Else fallthrough and expand as rint. */
1963 CASE_FLT_FN (BUILT_IN_RINT):
1964 builtin_optab = rint_optab; break;
1969 /* Make a suitable register to place result in. */
1970 mode = TYPE_MODE (TREE_TYPE (exp));
1972 if (! flag_errno_math || ! HONOR_NANS (mode))
1975 /* Before working hard, check whether the instruction is available. */
1976 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1978 target = gen_reg_rtx (mode);
1980 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1981 need to expand the argument again. This way, we will not perform
1982 side-effects more the once. */
1983 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1985 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1989 /* Compute into TARGET.
1990 Set TARGET to wherever the result comes back. */
1991 target = expand_unop (mode, builtin_optab, op0, target, 0);
1996 expand_errno_check (exp, target);
1998 /* Output the entire sequence. */
1999 insns = get_insns ();
2005 /* If we were unable to expand via the builtin, stop the sequence
2006 (without outputting the insns) and call to the library function
2007 with the stabilized argument list. */
2011 before_call = get_last_insn ();
2013 return expand_call (exp, target, target == const0_rtx);
2016 /* Expand a call to the builtin binary math functions (pow and atan2).
2017 Return NULL_RTX if a normal call should be emitted rather than expanding the
2018 function in-line. EXP is the expression that is a call to the builtin
2019 function; if convenient, the result should be placed in TARGET.
2020 SUBTARGET may be used as the target for computing one of EXP's
2024 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2026 optab builtin_optab;
2027 rtx op0, op1, insns;
2028 int op1_type = REAL_TYPE;
2029 tree fndecl = get_callee_fndecl (exp);
2031 enum machine_mode mode;
2032 bool errno_set = true;
2034 switch (DECL_FUNCTION_CODE (fndecl))
2036 CASE_FLT_FN (BUILT_IN_SCALBN):
2037 CASE_FLT_FN (BUILT_IN_SCALBLN):
2038 CASE_FLT_FN (BUILT_IN_LDEXP):
2039 op1_type = INTEGER_TYPE;
2044 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2047 arg0 = CALL_EXPR_ARG (exp, 0);
2048 arg1 = CALL_EXPR_ARG (exp, 1);
2050 switch (DECL_FUNCTION_CODE (fndecl))
2052 CASE_FLT_FN (BUILT_IN_POW):
2053 builtin_optab = pow_optab; break;
2054 CASE_FLT_FN (BUILT_IN_ATAN2):
2055 builtin_optab = atan2_optab; break;
2056 CASE_FLT_FN (BUILT_IN_SCALB):
2057 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2059 builtin_optab = scalb_optab; break;
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2064 /* Fall through... */
2065 CASE_FLT_FN (BUILT_IN_LDEXP):
2066 builtin_optab = ldexp_optab; break;
2067 CASE_FLT_FN (BUILT_IN_FMOD):
2068 builtin_optab = fmod_optab; break;
2069 CASE_FLT_FN (BUILT_IN_REMAINDER):
2070 CASE_FLT_FN (BUILT_IN_DREM):
2071 builtin_optab = remainder_optab; break;
2076 /* Make a suitable register to place result in. */
2077 mode = TYPE_MODE (TREE_TYPE (exp));
2079 /* Before working hard, check whether the instruction is available. */
2080 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2083 target = gen_reg_rtx (mode);
2085 if (! flag_errno_math || ! HONOR_NANS (mode))
2088 /* Always stabilize the argument list. */
2089 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2090 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2092 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2093 op1 = expand_normal (arg1);
2097 /* Compute into TARGET.
2098 Set TARGET to wherever the result comes back. */
2099 target = expand_binop (mode, builtin_optab, op0, op1,
2100 target, 0, OPTAB_DIRECT);
2102 /* If we were unable to expand via the builtin, stop the sequence
2103 (without outputting the insns) and call to the library function
2104 with the stabilized argument list. */
2108 return expand_call (exp, target, target == const0_rtx);
2112 expand_errno_check (exp, target);
2114 /* Output the entire sequence. */
2115 insns = get_insns ();
2122 /* Expand a call to the builtin sin and cos math functions.
2123 Return NULL_RTX if a normal call should be emitted rather than expanding the
2124 function in-line. EXP is the expression that is a call to the builtin
2125 function; if convenient, the result should be placed in TARGET.
2126 SUBTARGET may be used as the target for computing one of EXP's
2130 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2132 optab builtin_optab;
2134 tree fndecl = get_callee_fndecl (exp);
2135 enum machine_mode mode;
2138 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2141 arg = CALL_EXPR_ARG (exp, 0);
2143 switch (DECL_FUNCTION_CODE (fndecl))
2145 CASE_FLT_FN (BUILT_IN_SIN):
2146 CASE_FLT_FN (BUILT_IN_COS):
2147 builtin_optab = sincos_optab; break;
2152 /* Make a suitable register to place result in. */
2153 mode = TYPE_MODE (TREE_TYPE (exp));
2155 /* Check if sincos insn is available, otherwise fallback
2156 to sin or cos insn. */
2157 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2158 switch (DECL_FUNCTION_CODE (fndecl))
2160 CASE_FLT_FN (BUILT_IN_SIN):
2161 builtin_optab = sin_optab; break;
2162 CASE_FLT_FN (BUILT_IN_COS):
2163 builtin_optab = cos_optab; break;
2168 /* Before working hard, check whether the instruction is available. */
2169 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2171 target = gen_reg_rtx (mode);
2173 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2174 need to expand the argument again. This way, we will not perform
2175 side-effects more the once. */
2176 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2178 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2182 /* Compute into TARGET.
2183 Set TARGET to wherever the result comes back. */
2184 if (builtin_optab == sincos_optab)
2188 switch (DECL_FUNCTION_CODE (fndecl))
2190 CASE_FLT_FN (BUILT_IN_SIN):
2191 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2193 CASE_FLT_FN (BUILT_IN_COS):
2194 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2199 gcc_assert (result);
2203 target = expand_unop (mode, builtin_optab, op0, target, 0);
2208 /* Output the entire sequence. */
2209 insns = get_insns ();
2215 /* If we were unable to expand via the builtin, stop the sequence
2216 (without outputting the insns) and call to the library function
2217 with the stabilized argument list. */
2221 target = expand_call (exp, target, target == const0_rtx);
2226 /* Expand a call to one of the builtin math functions that operate on
2227 floating point argument and output an integer result (ilogb, isinf,
2229 Return 0 if a normal call should be emitted rather than expanding the
2230 function in-line. EXP is the expression that is a call to the builtin
2231 function; if convenient, the result should be placed in TARGET.
2232 SUBTARGET may be used as the target for computing one of EXP's operands. */
2235 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2237 optab builtin_optab = 0;
2238 enum insn_code icode = CODE_FOR_nothing;
2240 tree fndecl = get_callee_fndecl (exp);
2241 enum machine_mode mode;
2242 bool errno_set = false;
2245 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2248 arg = CALL_EXPR_ARG (exp, 0);
2250 switch (DECL_FUNCTION_CODE (fndecl))
2252 CASE_FLT_FN (BUILT_IN_ILOGB):
2253 errno_set = true; builtin_optab = ilogb_optab; break;
2254 CASE_FLT_FN (BUILT_IN_ISINF):
2255 builtin_optab = isinf_optab; break;
2256 case BUILT_IN_ISNORMAL:
2257 case BUILT_IN_ISFINITE:
2258 CASE_FLT_FN (BUILT_IN_FINITE):
2259 /* These builtins have no optabs (yet). */
2265 /* There's no easy way to detect the case we need to set EDOM. */
2266 if (flag_errno_math && errno_set)
2269 /* Optab mode depends on the mode of the input argument. */
2270 mode = TYPE_MODE (TREE_TYPE (arg));
2273 icode = optab_handler (builtin_optab, mode)->insn_code;
2275 /* Before working hard, check whether the instruction is available. */
2276 if (icode != CODE_FOR_nothing)
2278 rtx last = get_last_insn ();
2279 tree orig_arg = arg;
2280 /* Make a suitable register to place result in. */
2282 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2283 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2285 gcc_assert (insn_data[icode].operand[0].predicate
2286 (target, GET_MODE (target)));
2288 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2289 need to expand the argument again. This way, we will not perform
2290 side-effects more the once. */
2291 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2293 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2295 if (mode != GET_MODE (op0))
2296 op0 = convert_to_mode (mode, op0, 0);
2298 /* Compute into TARGET.
2299 Set TARGET to wherever the result comes back. */
2300 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2302 delete_insns_since (last);
2303 CALL_EXPR_ARG (exp, 0) = orig_arg;
2306 /* If there is no optab, try generic code. */
2307 switch (DECL_FUNCTION_CODE (fndecl))
2311 CASE_FLT_FN (BUILT_IN_ISINF):
2313 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2314 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2315 tree const type = TREE_TYPE (arg);
2319 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2320 real_from_string (&r, buf);
2321 result = build_call_expr (isgr_fn, 2,
2322 fold_build1 (ABS_EXPR, type, arg),
2323 build_real (type, r));
2324 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2326 CASE_FLT_FN (BUILT_IN_FINITE):
2327 case BUILT_IN_ISFINITE:
2329 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2330 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2331 tree const type = TREE_TYPE (arg);
2335 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2336 real_from_string (&r, buf);
2337 result = build_call_expr (isle_fn, 2,
2338 fold_build1 (ABS_EXPR, type, arg),
2339 build_real (type, r));
2340 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2342 case BUILT_IN_ISNORMAL:
2344 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2345 islessequal(fabs(x),DBL_MAX). */
2346 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2347 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2348 tree const type = TREE_TYPE (arg);
2349 REAL_VALUE_TYPE rmax, rmin;
2352 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2353 real_from_string (&rmax, buf);
2354 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2355 real_from_string (&rmin, buf);
2356 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2357 result = build_call_expr (isle_fn, 2, arg,
2358 build_real (type, rmax));
2359 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2360 build_call_expr (isge_fn, 2, arg,
2361 build_real (type, rmin)));
2362 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2368 target = expand_call (exp, target, target == const0_rtx);
2373 /* Expand a call to the builtin sincos math function.
2374 Return NULL_RTX if a normal call should be emitted rather than expanding the
2375 function in-line. EXP is the expression that is a call to the builtin
2379 expand_builtin_sincos (tree exp)
2381 rtx op0, op1, op2, target1, target2;
2382 enum machine_mode mode;
2383 tree arg, sinp, cosp;
2386 if (!validate_arglist (exp, REAL_TYPE,
2387 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2390 arg = CALL_EXPR_ARG (exp, 0);
2391 sinp = CALL_EXPR_ARG (exp, 1);
2392 cosp = CALL_EXPR_ARG (exp, 2);
2394 /* Make a suitable register to place result in. */
2395 mode = TYPE_MODE (TREE_TYPE (arg));
2397 /* Check if sincos insn is available, otherwise emit the call. */
2398 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2401 target1 = gen_reg_rtx (mode);
2402 target2 = gen_reg_rtx (mode);
2404 op0 = expand_normal (arg);
2405 op1 = expand_normal (build_fold_indirect_ref (sinp));
2406 op2 = expand_normal (build_fold_indirect_ref (cosp));
2408 /* Compute into target1 and target2.
2409 Set TARGET to wherever the result comes back. */
2410 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2411 gcc_assert (result);
2413 /* Move target1 and target2 to the memory locations indicated
2415 emit_move_insn (op1, target1);
2416 emit_move_insn (op2, target2);
2421 /* Expand a call to the internal cexpi builtin to the sincos math function.
2422 EXP is the expression that is a call to the builtin function; if convenient,
2423 the result should be placed in TARGET. SUBTARGET may be used as the target
2424 for computing one of EXP's operands. */
2427 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2429 tree fndecl = get_callee_fndecl (exp);
2431 enum machine_mode mode;
2434 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2437 arg = CALL_EXPR_ARG (exp, 0);
2438 type = TREE_TYPE (arg);
2439 mode = TYPE_MODE (TREE_TYPE (arg));
2441 /* Try expanding via a sincos optab, fall back to emitting a libcall
2442 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2443 is only generated from sincos, cexp or if we have either of them. */
2444 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2446 op1 = gen_reg_rtx (mode);
2447 op2 = gen_reg_rtx (mode);
2449 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2451 /* Compute into op1 and op2. */
2452 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2454 else if (TARGET_HAS_SINCOS)
2456 tree call, fn = NULL_TREE;
2460 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2461 fn = built_in_decls[BUILT_IN_SINCOSF];
2462 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2463 fn = built_in_decls[BUILT_IN_SINCOS];
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2465 fn = built_in_decls[BUILT_IN_SINCOSL];
2469 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2470 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2471 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2472 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2473 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2474 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2476 /* Make sure not to fold the sincos call again. */
2477 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2478 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2479 call, 3, arg, top1, top2));
2483 tree call, fn = NULL_TREE, narg;
2484 tree ctype = build_complex_type (type);
2486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2487 fn = built_in_decls[BUILT_IN_CEXPF];
2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2489 fn = built_in_decls[BUILT_IN_CEXP];
2490 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2491 fn = built_in_decls[BUILT_IN_CEXPL];
2495 /* If we don't have a decl for cexp create one. This is the
2496 friendliest fallback if the user calls __builtin_cexpi
2497 without full target C99 function support. */
2498 if (fn == NULL_TREE)
2501 const char *name = NULL;
2503 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2505 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2507 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2510 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2511 fn = build_fn_decl (name, fntype);
2514 narg = fold_build2 (COMPLEX_EXPR, ctype,
2515 build_real (type, dconst0), arg);
2517 /* Make sure not to fold the cexp call again. */
2518 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2519 return expand_expr (build_call_nary (ctype, call, 1, narg),
2520 target, VOIDmode, EXPAND_NORMAL);
2523 /* Now build the proper return type. */
2524 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2525 make_tree (TREE_TYPE (arg), op2),
2526 make_tree (TREE_TYPE (arg), op1)),
2527 target, VOIDmode, EXPAND_NORMAL);
2530 /* Expand a call to one of the builtin rounding functions gcc defines
2531 as an extension (lfloor and lceil). As these are gcc extensions we
2532 do not need to worry about setting errno to EDOM.
2533 If expanding via optab fails, lower expression to (int)(floor(x)).
2534 EXP is the expression that is a call to the builtin function;
2535 if convenient, the result should be placed in TARGET. */
2538 expand_builtin_int_roundingfn (tree exp, rtx target)
2540 convert_optab builtin_optab;
2541 rtx op0, insns, tmp;
2542 tree fndecl = get_callee_fndecl (exp);
2543 enum built_in_function fallback_fn;
2544 tree fallback_fndecl;
2545 enum machine_mode mode;
2548 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2551 arg = CALL_EXPR_ARG (exp, 0);
2553 switch (DECL_FUNCTION_CODE (fndecl))
2555 CASE_FLT_FN (BUILT_IN_LCEIL):
2556 CASE_FLT_FN (BUILT_IN_LLCEIL):
2557 builtin_optab = lceil_optab;
2558 fallback_fn = BUILT_IN_CEIL;
2561 CASE_FLT_FN (BUILT_IN_LFLOOR):
2562 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2563 builtin_optab = lfloor_optab;
2564 fallback_fn = BUILT_IN_FLOOR;
2571 /* Make a suitable register to place result in. */
2572 mode = TYPE_MODE (TREE_TYPE (exp));
2574 target = gen_reg_rtx (mode);
2576 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2577 need to expand the argument again. This way, we will not perform
2578 side-effects more the once. */
2579 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2581 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2585 /* Compute into TARGET. */
2586 if (expand_sfix_optab (target, op0, builtin_optab))
2588 /* Output the entire sequence. */
2589 insns = get_insns ();
2595 /* If we were unable to expand via the builtin, stop the sequence
2596 (without outputting the insns). */
2599 /* Fall back to floating point rounding optab. */
2600 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2602 /* For non-C99 targets we may end up without a fallback fndecl here
2603 if the user called __builtin_lfloor directly. In this case emit
2604 a call to the floor/ceil variants nevertheless. This should result
2605 in the best user experience for not full C99 targets. */
2606 if (fallback_fndecl == NULL_TREE)
2609 const char *name = NULL;
2611 switch (DECL_FUNCTION_CODE (fndecl))
2613 case BUILT_IN_LCEIL:
2614 case BUILT_IN_LLCEIL:
2617 case BUILT_IN_LCEILF:
2618 case BUILT_IN_LLCEILF:
2621 case BUILT_IN_LCEILL:
2622 case BUILT_IN_LLCEILL:
2625 case BUILT_IN_LFLOOR:
2626 case BUILT_IN_LLFLOOR:
2629 case BUILT_IN_LFLOORF:
2630 case BUILT_IN_LLFLOORF:
2633 case BUILT_IN_LFLOORL:
2634 case BUILT_IN_LLFLOORL:
2641 fntype = build_function_type_list (TREE_TYPE (arg),
2642 TREE_TYPE (arg), NULL_TREE);
2643 fallback_fndecl = build_fn_decl (name, fntype);
2646 exp = build_call_expr (fallback_fndecl, 1, arg);
2648 tmp = expand_normal (exp);
2650 /* Truncate the result of floating point optab to integer
2651 via expand_fix (). */
2652 target = gen_reg_rtx (mode);
2653 expand_fix (target, tmp, 0);
2658 /* Expand a call to one of the builtin math functions doing integer
2660 Return 0 if a normal call should be emitted rather than expanding the
2661 function in-line. EXP is the expression that is a call to the builtin
2662 function; if convenient, the result should be placed in TARGET. */
2665 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2667 convert_optab builtin_optab;
2669 tree fndecl = get_callee_fndecl (exp);
2671 enum machine_mode mode;
2673 /* There's no easy way to detect the case we need to set EDOM. */
2674 if (flag_errno_math)
2677 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2680 arg = CALL_EXPR_ARG (exp, 0);
2682 switch (DECL_FUNCTION_CODE (fndecl))
2684 CASE_FLT_FN (BUILT_IN_LRINT):
2685 CASE_FLT_FN (BUILT_IN_LLRINT):
2686 builtin_optab = lrint_optab; break;
2687 CASE_FLT_FN (BUILT_IN_LROUND):
2688 CASE_FLT_FN (BUILT_IN_LLROUND):
2689 builtin_optab = lround_optab; break;
2694 /* Make a suitable register to place result in. */
2695 mode = TYPE_MODE (TREE_TYPE (exp));
2697 target = gen_reg_rtx (mode);
2699 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2700 need to expand the argument again. This way, we will not perform
2701 side-effects more the once. */
2702 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2704 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2708 if (expand_sfix_optab (target, op0, builtin_optab))
2710 /* Output the entire sequence. */
2711 insns = get_insns ();
2717 /* If we were unable to expand via the builtin, stop the sequence
2718 (without outputting the insns) and call to the library function
2719 with the stabilized argument list. */
2722 target = expand_call (exp, target, target == const0_rtx);
2727 /* To evaluate powi(x,n), the floating point value x raised to the
2728 constant integer exponent n, we use a hybrid algorithm that
2729 combines the "window method" with look-up tables. For an
2730 introduction to exponentiation algorithms and "addition chains",
2731 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2732 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2733 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2734 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2736 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2737 multiplications to inline before calling the system library's pow
2738 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2739 so this default never requires calling pow, powf or powl. */
2741 #ifndef POWI_MAX_MULTS
2742 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2745 /* The size of the "optimal power tree" lookup table. All
2746 exponents less than this value are simply looked up in the
2747 powi_table below. This threshold is also used to size the
2748 cache of pseudo registers that hold intermediate results. */
2749 #define POWI_TABLE_SIZE 256
2751 /* The size, in bits of the window, used in the "window method"
2752 exponentiation algorithm. This is equivalent to a radix of
2753 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2754 #define POWI_WINDOW_SIZE 3
2756 /* The following table is an efficient representation of an
2757 "optimal power tree". For each value, i, the corresponding
2758 value, j, in the table states than an optimal evaluation
2759 sequence for calculating pow(x,i) can be found by evaluating
2760 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2761 100 integers is given in Knuth's "Seminumerical algorithms". */
2763 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2765 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2766 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2767 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2768 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2769 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2770 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2771 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2772 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2773 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2774 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2775 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2776 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2777 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2778 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2779 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2780 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2781 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2782 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2783 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2784 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2785 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2786 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2787 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2788 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2789 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2790 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2791 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2792 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2793 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2794 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2795 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2796 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2800 /* Return the number of multiplications required to calculate
2801 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2802 subroutine of powi_cost. CACHE is an array indicating
2803 which exponents have already been calculated. */
2806 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2808 /* If we've already calculated this exponent, then this evaluation
2809 doesn't require any additional multiplications. */
2814 return powi_lookup_cost (n - powi_table[n], cache)
2815 + powi_lookup_cost (powi_table[n], cache) + 1;
2818 /* Return the number of multiplications required to calculate
2819 powi(x,n) for an arbitrary x, given the exponent N. This
2820 function needs to be kept in sync with expand_powi below. */
2823 powi_cost (HOST_WIDE_INT n)
2825 bool cache[POWI_TABLE_SIZE];
2826 unsigned HOST_WIDE_INT digit;
2827 unsigned HOST_WIDE_INT val;
2833 /* Ignore the reciprocal when calculating the cost. */
2834 val = (n < 0) ? -n : n;
2836 /* Initialize the exponent cache. */
2837 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2842 while (val >= POWI_TABLE_SIZE)
2846 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2847 result += powi_lookup_cost (digit, cache)
2848 + POWI_WINDOW_SIZE + 1;
2849 val >>= POWI_WINDOW_SIZE;
2858 return result + powi_lookup_cost (val, cache);
2861 /* Recursive subroutine of expand_powi. This function takes the array,
2862 CACHE, of already calculated exponents and an exponent N and returns
2863 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2866 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2868 unsigned HOST_WIDE_INT digit;
2872 if (n < POWI_TABLE_SIZE)
2877 target = gen_reg_rtx (mode);
2880 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2881 op1 = expand_powi_1 (mode, powi_table[n], cache);
2885 target = gen_reg_rtx (mode);
2886 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2887 op0 = expand_powi_1 (mode, n - digit, cache);
2888 op1 = expand_powi_1 (mode, digit, cache);
2892 target = gen_reg_rtx (mode);
2893 op0 = expand_powi_1 (mode, n >> 1, cache);
2897 result = expand_mult (mode, op0, op1, target, 0);
2898 if (result != target)
2899 emit_move_insn (target, result);
2903 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2904 floating point operand in mode MODE, and N is the exponent. This
2905 function needs to be kept in sync with powi_cost above. */
2908 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2910 unsigned HOST_WIDE_INT val;
2911 rtx cache[POWI_TABLE_SIZE];
2915 return CONST1_RTX (mode);
2917 val = (n < 0) ? -n : n;
2919 memset (cache, 0, sizeof (cache));
2922 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2924 /* If the original exponent was negative, reciprocate the result. */
2926 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2927 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2932 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2933 a normal call should be emitted rather than expanding the function
2934 in-line. EXP is the expression that is a call to the builtin
2935 function; if convenient, the result should be placed in TARGET. */
2938 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2942 tree type = TREE_TYPE (exp);
2943 REAL_VALUE_TYPE cint, c, c2;
2946 enum machine_mode mode = TYPE_MODE (type);
2948 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2951 arg0 = CALL_EXPR_ARG (exp, 0);
2952 arg1 = CALL_EXPR_ARG (exp, 1);
2954 if (TREE_CODE (arg1) != REAL_CST
2955 || TREE_OVERFLOW (arg1))
2956 return expand_builtin_mathfn_2 (exp, target, subtarget);
2958 /* Handle constant exponents. */
2960 /* For integer valued exponents we can expand to an optimal multiplication
2961 sequence using expand_powi. */
2962 c = TREE_REAL_CST (arg1);
2963 n = real_to_integer (&c);
2964 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2965 if (real_identical (&c, &cint)
2966 && ((n >= -1 && n <= 2)
2967 || (flag_unsafe_math_optimizations
2968 && optimize_insn_for_speed_p ()
2969 && powi_cost (n) <= POWI_MAX_MULTS)))
2971 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2974 op = force_reg (mode, op);
2975 op = expand_powi (op, mode, n);
2980 narg0 = builtin_save_expr (arg0);
2982 /* If the exponent is not integer valued, check if it is half of an integer.
2983 In this case we can expand to sqrt (x) * x**(n/2). */
2984 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2985 if (fn != NULL_TREE)
2987 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2988 n = real_to_integer (&c2);
2989 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2990 if (real_identical (&c2, &cint)
2991 && ((flag_unsafe_math_optimizations
2992 && optimize_insn_for_speed_p ()
2993 && powi_cost (n/2) <= POWI_MAX_MULTS)
2994 /* Even the c==0.5 case cannot be done unconditionally
2995 when we need to preserve signed zeros, as
2996 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
2997 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)))
2999 tree call_expr = build_call_expr (fn, 1, narg0);
3000 /* Use expand_expr in case the newly built call expression
3001 was folded to a non-call. */
3002 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3005 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3006 op2 = force_reg (mode, op2);
3007 op2 = expand_powi (op2, mode, abs (n / 2));
3008 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3009 0, OPTAB_LIB_WIDEN);
3010 /* If the original exponent was negative, reciprocate the
3013 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3014 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3020 /* Try if the exponent is a third of an integer. In this case
3021 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3022 different from pow (x, 1./3.) due to rounding and behavior
3023 with negative x we need to constrain this transformation to
3024 unsafe math and positive x or finite math. */
3025 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3027 && flag_unsafe_math_optimizations
3028 && (tree_expr_nonnegative_p (arg0)
3029 || !HONOR_NANS (mode)))
3031 REAL_VALUE_TYPE dconst3;
3032 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3033 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3034 real_round (&c2, mode, &c2);
3035 n = real_to_integer (&c2);
3036 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3037 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3038 real_convert (&c2, mode, &c2);
3039 if (real_identical (&c2, &c)
3040 && ((optimize_insn_for_speed_p ()
3041 && powi_cost (n/3) <= POWI_MAX_MULTS)
3044 tree call_expr = build_call_expr (fn, 1,narg0);
3045 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3046 if (abs (n) % 3 == 2)
3047 op = expand_simple_binop (mode, MULT, op, op, op,
3048 0, OPTAB_LIB_WIDEN);
3051 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3052 op2 = force_reg (mode, op2);
3053 op2 = expand_powi (op2, mode, abs (n / 3));
3054 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3055 0, OPTAB_LIB_WIDEN);
3056 /* If the original exponent was negative, reciprocate the
3059 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3060 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3066 /* Fall back to optab expansion. */
3067 return expand_builtin_mathfn_2 (exp, target, subtarget);
3070 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3071 a normal call should be emitted rather than expanding the function
3072 in-line. EXP is the expression that is a call to the builtin
3073 function; if convenient, the result should be placed in TARGET. */
3076 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3080 enum machine_mode mode;
3081 enum machine_mode mode2;
3083 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3086 arg0 = CALL_EXPR_ARG (exp, 0);
3087 arg1 = CALL_EXPR_ARG (exp, 1);
3088 mode = TYPE_MODE (TREE_TYPE (exp));
3090 /* Handle constant power. */
3092 if (TREE_CODE (arg1) == INTEGER_CST
3093 && !TREE_OVERFLOW (arg1))
3095 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3097 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3098 Otherwise, check the number of multiplications required. */
3099 if ((TREE_INT_CST_HIGH (arg1) == 0
3100 || TREE_INT_CST_HIGH (arg1) == -1)
3101 && ((n >= -1 && n <= 2)
3102 || (optimize_insn_for_speed_p ()
3103 && powi_cost (n) <= POWI_MAX_MULTS)))
3105 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3106 op0 = force_reg (mode, op0);
3107 return expand_powi (op0, mode, n);
3111 /* Emit a libcall to libgcc. */
3113 /* Mode of the 2nd argument must match that of an int. */
3114 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3116 if (target == NULL_RTX)
3117 target = gen_reg_rtx (mode);
3119 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3120 if (GET_MODE (op0) != mode)
3121 op0 = convert_to_mode (mode, op0, 0);
3122 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3123 if (GET_MODE (op1) != mode2)
3124 op1 = convert_to_mode (mode2, op1, 0);
3126 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3127 target, LCT_CONST, mode, 2,
3128 op0, mode, op1, mode2);
3133 /* Expand expression EXP which is a call to the strlen builtin. Return
3134 NULL_RTX if we failed the caller should emit a normal call, otherwise
3135 try to get the result in TARGET, if convenient. */
3138 expand_builtin_strlen (tree exp, rtx target,
3139 enum machine_mode target_mode)
3141 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3147 tree src = CALL_EXPR_ARG (exp, 0);
3148 rtx result, src_reg, char_rtx, before_strlen;
3149 enum machine_mode insn_mode = target_mode, char_mode;
3150 enum insn_code icode = CODE_FOR_nothing;
3153 /* If the length can be computed at compile-time, return it. */
3154 len = c_strlen (src, 0);
3156 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3158 /* If the length can be computed at compile-time and is constant
3159 integer, but there are side-effects in src, evaluate
3160 src for side-effects, then return len.
3161 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3162 can be optimized into: i++; x = 3; */
3163 len = c_strlen (src, 1);
3164 if (len && TREE_CODE (len) == INTEGER_CST)
3166 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3167 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3170 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3172 /* If SRC is not a pointer type, don't do this operation inline. */
3176 /* Bail out if we can't compute strlen in the right mode. */
3177 while (insn_mode != VOIDmode)
3179 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3180 if (icode != CODE_FOR_nothing)
3183 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3185 if (insn_mode == VOIDmode)
3188 /* Make a place to write the result of the instruction. */
3192 && GET_MODE (result) == insn_mode
3193 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3194 result = gen_reg_rtx (insn_mode);
3196 /* Make a place to hold the source address. We will not expand
3197 the actual source until we are sure that the expansion will
3198 not fail -- there are trees that cannot be expanded twice. */
3199 src_reg = gen_reg_rtx (Pmode);
3201 /* Mark the beginning of the strlen sequence so we can emit the
3202 source operand later. */
3203 before_strlen = get_last_insn ();
3205 char_rtx = const0_rtx;
3206 char_mode = insn_data[(int) icode].operand[2].mode;
3207 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3209 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3211 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3212 char_rtx, GEN_INT (align));
3217 /* Now that we are assured of success, expand the source. */
3219 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3221 emit_move_insn (src_reg, pat);
3226 emit_insn_after (pat, before_strlen);
3228 emit_insn_before (pat, get_insns ());
3230 /* Return the value in the proper mode for this function. */
3231 if (GET_MODE (result) == target_mode)
3233 else if (target != 0)
3234 convert_move (target, result, 0);
3236 target = convert_to_mode (target_mode, result, 0);
3242 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3243 caller should emit a normal call, otherwise try to get the result
3244 in TARGET, if convenient (and in mode MODE if that's convenient). */
3247 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3249 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3251 tree type = TREE_TYPE (exp);
3252 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3253 CALL_EXPR_ARG (exp, 1), type);
3255 return expand_expr (result, target, mode, EXPAND_NORMAL);
3260 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3261 caller should emit a normal call, otherwise try to get the result
3262 in TARGET, if convenient (and in mode MODE if that's convenient). */
3265 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3267 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3269 tree type = TREE_TYPE (exp);
3270 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3271 CALL_EXPR_ARG (exp, 1), type);
3273 return expand_expr (result, target, mode, EXPAND_NORMAL);
3275 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3280 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3281 caller should emit a normal call, otherwise try to get the result
3282 in TARGET, if convenient (and in mode MODE if that's convenient). */
3285 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3287 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3289 tree type = TREE_TYPE (exp);
3290 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3291 CALL_EXPR_ARG (exp, 1), type);
3293 return expand_expr (result, target, mode, EXPAND_NORMAL);
3298 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3299 caller should emit a normal call, otherwise try to get the result
3300 in TARGET, if convenient (and in mode MODE if that's convenient). */
3303 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3305 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3307 tree type = TREE_TYPE (exp);
3308 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3309 CALL_EXPR_ARG (exp, 1), type);
3311 return expand_expr (result, target, mode, EXPAND_NORMAL);
3316 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3317 bytes from constant string DATA + OFFSET and return it as target
3321 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3322 enum machine_mode mode)
3324 const char *str = (const char *) data;
3326 gcc_assert (offset >= 0
3327 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3328 <= strlen (str) + 1));
3330 return c_readstr (str + offset, mode);
3333 /* Expand a call EXP to the memcpy builtin.
3334 Return NULL_RTX if we failed, the caller should emit a normal call,
3335 otherwise try to get the result in TARGET, if convenient (and in
3336 mode MODE if that's convenient). */
3339 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3341 tree fndecl = get_callee_fndecl (exp);
3343 if (!validate_arglist (exp,
3344 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3348 tree dest = CALL_EXPR_ARG (exp, 0);
3349 tree src = CALL_EXPR_ARG (exp, 1);
3350 tree len = CALL_EXPR_ARG (exp, 2);
3351 const char *src_str;
3352 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3353 unsigned int dest_align
3354 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3355 rtx dest_mem, src_mem, dest_addr, len_rtx;
3356 tree result = fold_builtin_memory_op (dest, src, len,
3357 TREE_TYPE (TREE_TYPE (fndecl)),
3359 HOST_WIDE_INT expected_size = -1;
3360 unsigned int expected_align = 0;
3361 tree_ann_common_t ann;
3365 while (TREE_CODE (result) == COMPOUND_EXPR)
3367 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3369 result = TREE_OPERAND (result, 1);
3371 return expand_expr (result, target, mode, EXPAND_NORMAL);
3374 /* If DEST is not a pointer type, call the normal function. */
3375 if (dest_align == 0)
3378 /* If either SRC is not a pointer type, don't do this
3379 operation in-line. */
3383 ann = tree_common_ann (exp);
3385 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3387 if (expected_align < dest_align)
3388 expected_align = dest_align;
3389 dest_mem = get_memory_rtx (dest, len);
3390 set_mem_align (dest_mem, dest_align);
3391 len_rtx = expand_normal (len);
3392 src_str = c_getstr (src);
3394 /* If SRC is a string constant and block move would be done
3395 by pieces, we can avoid loading the string from memory
3396 and only stored the computed constants. */
3398 && GET_CODE (len_rtx) == CONST_INT
3399 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3400 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3401 CONST_CAST (char *, src_str),
3404 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3405 builtin_memcpy_read_str,
3406 CONST_CAST (char *, src_str),
3407 dest_align, false, 0);
3408 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3409 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3413 src_mem = get_memory_rtx (src, len);
3414 set_mem_align (src_mem, src_align);
3416 /* Copy word part most expediently. */
3417 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3418 CALL_EXPR_TAILCALL (exp)
3419 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3420 expected_align, expected_size);
3424 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3425 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3431 /* Expand a call EXP to the mempcpy builtin.
3432 Return NULL_RTX if we failed; the caller should emit a normal call,
3433 otherwise try to get the result in TARGET, if convenient (and in
3434 mode MODE if that's convenient). If ENDP is 0 return the
3435 destination pointer, if ENDP is 1 return the end pointer ala
3436 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3440 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3442 if (!validate_arglist (exp,
3443 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3447 tree dest = CALL_EXPR_ARG (exp, 0);
3448 tree src = CALL_EXPR_ARG (exp, 1);
3449 tree len = CALL_EXPR_ARG (exp, 2);
3450 return expand_builtin_mempcpy_args (dest, src, len,
3452 target, mode, /*endp=*/ 1);
3456 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3457 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3458 so that this can also be called without constructing an actual CALL_EXPR.
3459 TYPE is the return type of the call. The other arguments and return value
3460 are the same as for expand_builtin_mempcpy. */
3463 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3464 rtx target, enum machine_mode mode, int endp)
3466 /* If return value is ignored, transform mempcpy into memcpy. */
3467 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3469 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3470 tree result = build_call_expr (fn, 3, dest, src, len);
3472 while (TREE_CODE (result) == COMPOUND_EXPR)
3474 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3476 result = TREE_OPERAND (result, 1);
3478 return expand_expr (result, target, mode, EXPAND_NORMAL);
3482 const char *src_str;
3483 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3484 unsigned int dest_align
3485 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3486 rtx dest_mem, src_mem, len_rtx;
3487 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3491 while (TREE_CODE (result) == COMPOUND_EXPR)
3493 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3495 result = TREE_OPERAND (result, 1);
3497 return expand_expr (result, target, mode, EXPAND_NORMAL);
3500 /* If either SRC or DEST is not a pointer type, don't do this
3501 operation in-line. */
3502 if (dest_align == 0 || src_align == 0)
3505 /* If LEN is not constant, call the normal function. */
3506 if (! host_integerp (len, 1))
3509 len_rtx = expand_normal (len);
3510 src_str = c_getstr (src);
3512 /* If SRC is a string constant and block move would be done
3513 by pieces, we can avoid loading the string from memory
3514 and only stored the computed constants. */
3516 && GET_CODE (len_rtx) == CONST_INT
3517 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3518 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3519 CONST_CAST (char *, src_str),
3522 dest_mem = get_memory_rtx (dest, len);
3523 set_mem_align (dest_mem, dest_align);
3524 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3525 builtin_memcpy_read_str,
3526 CONST_CAST (char *, src_str),
3527 dest_align, false, endp);
3528 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3529 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3533 if (GET_CODE (len_rtx) == CONST_INT
3534 && can_move_by_pieces (INTVAL (len_rtx),
3535 MIN (dest_align, src_align)))
3537 dest_mem = get_memory_rtx (dest, len);
3538 set_mem_align (dest_mem, dest_align);
3539 src_mem = get_memory_rtx (src, len);
3540 set_mem_align (src_mem, src_align);
3541 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3542 MIN (dest_align, src_align), endp);
3543 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3544 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3552 /* Expand expression EXP, which is a call to the memmove builtin. Return
3553 NULL_RTX if we failed; the caller should emit a normal call. */
3556 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3558 if (!validate_arglist (exp,
3559 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3563 tree dest = CALL_EXPR_ARG (exp, 0);
3564 tree src = CALL_EXPR_ARG (exp, 1);
3565 tree len = CALL_EXPR_ARG (exp, 2);
3566 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3567 target, mode, ignore);
3571 /* Helper function to do the actual work for expand_builtin_memmove. The
3572 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3573 so that this can also be called without constructing an actual CALL_EXPR.
3574 TYPE is the return type of the call. The other arguments and return value
3575 are the same as for expand_builtin_memmove. */
3578 expand_builtin_memmove_args (tree dest, tree src, tree len,
3579 tree type, rtx target, enum machine_mode mode,
3582 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3586 STRIP_TYPE_NOPS (result);
3587 while (TREE_CODE (result) == COMPOUND_EXPR)
3589 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3591 result = TREE_OPERAND (result, 1);
3593 return expand_expr (result, target, mode, EXPAND_NORMAL);
3596 /* Otherwise, call the normal function. */
3600 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3601 NULL_RTX if we failed the caller should emit a normal call. */
3604 expand_builtin_bcopy (tree exp, int ignore)
3606 tree type = TREE_TYPE (exp);
3607 tree src, dest, size;
3609 if (!validate_arglist (exp,
3610 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3613 src = CALL_EXPR_ARG (exp, 0);
3614 dest = CALL_EXPR_ARG (exp, 1);
3615 size = CALL_EXPR_ARG (exp, 2);
3617 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3618 This is done this way so that if it isn't expanded inline, we fall
3619 back to calling bcopy instead of memmove. */
3620 return expand_builtin_memmove_args (dest, src,
3621 fold_convert (sizetype, size),
3622 type, const0_rtx, VOIDmode,
3627 # define HAVE_movstr 0
3628 # define CODE_FOR_movstr CODE_FOR_nothing
3631 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3632 we failed, the caller should emit a normal call, otherwise try to
3633 get the result in TARGET, if convenient. If ENDP is 0 return the
3634 destination pointer, if ENDP is 1 return the end pointer ala
3635 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3639 expand_movstr (tree dest, tree src, rtx target, int endp)
3645 const struct insn_data * data;
3650 dest_mem = get_memory_rtx (dest, NULL);
3651 src_mem = get_memory_rtx (src, NULL);
3654 target = force_reg (Pmode, XEXP (dest_mem, 0));
3655 dest_mem = replace_equiv_address (dest_mem, target);
3656 end = gen_reg_rtx (Pmode);
3660 if (target == 0 || target == const0_rtx)
3662 end = gen_reg_rtx (Pmode);
3670 data = insn_data + CODE_FOR_movstr;
3672 if (data->operand[0].mode != VOIDmode)
3673 end = gen_lowpart (data->operand[0].mode, end);
3675 insn = data->genfun (end, dest_mem, src_mem);
3681 /* movstr is supposed to set end to the address of the NUL
3682 terminator. If the caller requested a mempcpy-like return value,
3684 if (endp == 1 && target != const0_rtx)
3686 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3687 emit_move_insn (target, force_operand (tem, NULL_RTX));
3693 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3694 NULL_RTX if we failed the caller should emit a normal call, otherwise
3695 try to get the result in TARGET, if convenient (and in mode MODE if that's
3699 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3701 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3703 tree dest = CALL_EXPR_ARG (exp, 0);
3704 tree src = CALL_EXPR_ARG (exp, 1);
3705 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3710 /* Helper function to do the actual work for expand_builtin_strcpy. The
3711 arguments to the builtin_strcpy call DEST and SRC are broken out
3712 so that this can also be called without constructing an actual CALL_EXPR.
3713 The other arguments and return value are the same as for
3714 expand_builtin_strcpy. */
3717 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3718 rtx target, enum machine_mode mode)
3720 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3723 while (TREE_CODE (result) == COMPOUND_EXPR)
3725 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3727 result = TREE_OPERAND (result, 1);
3729 return expand_expr (result, target, mode, EXPAND_NORMAL);
3731 return expand_movstr (dest, src, target, /*endp=*/0);
3735 /* Expand a call EXP to the stpcpy builtin.
3736 Return NULL_RTX if we failed the caller should emit a normal call,
3737 otherwise try to get the result in TARGET, if convenient (and in
3738 mode MODE if that's convenient). */
3741 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3748 dst = CALL_EXPR_ARG (exp, 0);
3749 src = CALL_EXPR_ARG (exp, 1);
3751 /* If return value is ignored, transform stpcpy into strcpy. */
3752 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3754 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3755 tree result = build_call_expr (fn, 2, dst, src);
3757 STRIP_NOPS (result);
3758 while (TREE_CODE (result) == COMPOUND_EXPR)
3760 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3762 result = TREE_OPERAND (result, 1);
3764 return expand_expr (result, target, mode, EXPAND_NORMAL);
3771 /* Ensure we get an actual string whose length can be evaluated at
3772 compile-time, not an expression containing a string. This is
3773 because the latter will potentially produce pessimized code
3774 when used to produce the return value. */
3775 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3776 return expand_movstr (dst, src, target, /*endp=*/2);
3778 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3779 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3780 target, mode, /*endp=*/2);
3785 if (TREE_CODE (len) == INTEGER_CST)
3787 rtx len_rtx = expand_normal (len);
3789 if (GET_CODE (len_rtx) == CONST_INT)
3791 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3792 dst, src, target, mode);
3798 if (mode != VOIDmode)
3799 target = gen_reg_rtx (mode);
3801 target = gen_reg_rtx (GET_MODE (ret));
3803 if (GET_MODE (target) != GET_MODE (ret))
3804 ret = gen_lowpart (GET_MODE (target), ret);
3806 ret = plus_constant (ret, INTVAL (len_rtx));
3807 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3815 return expand_movstr (dst, src, target, /*endp=*/2);
3819 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3820 bytes from constant string DATA + OFFSET and return it as target
3824 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3825 enum machine_mode mode)
3827 const char *str = (const char *) data;
3829 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3832 return c_readstr (str + offset, mode);
3835 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3836 NULL_RTX if we failed the caller should emit a normal call. */
3839 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3841 tree fndecl = get_callee_fndecl (exp);
3843 if (validate_arglist (exp,
3844 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3846 tree dest = CALL_EXPR_ARG (exp, 0);
3847 tree src = CALL_EXPR_ARG (exp, 1);
3848 tree len = CALL_EXPR_ARG (exp, 2);
3849 tree slen = c_strlen (src, 1);
3850 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3854 while (TREE_CODE (result) == COMPOUND_EXPR)
3856 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3858 result = TREE_OPERAND (result, 1);
3860 return expand_expr (result, target, mode, EXPAND_NORMAL);
3863 /* We must be passed a constant len and src parameter. */
3864 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3867 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3869 /* We're required to pad with trailing zeros if the requested
3870 len is greater than strlen(s2)+1. In that case try to
3871 use store_by_pieces, if it fails, punt. */
3872 if (tree_int_cst_lt (slen, len))
3874 unsigned int dest_align
3875 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3876 const char *p = c_getstr (src);
3879 if (!p || dest_align == 0 || !host_integerp (len, 1)
3880 || !can_store_by_pieces (tree_low_cst (len, 1),
3881 builtin_strncpy_read_str,
3882 CONST_CAST (char *, p),
3886 dest_mem = get_memory_rtx (dest, len);
3887 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3888 builtin_strncpy_read_str,
3889 CONST_CAST (char *, p), dest_align, false, 0);
3890 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3891 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3898 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3899 bytes from constant string DATA + OFFSET and return it as target
3903 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3904 enum machine_mode mode)
3906 const char *c = (const char *) data;
3907 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3909 memset (p, *c, GET_MODE_SIZE (mode));
3911 return c_readstr (p, mode);
3914 /* Callback routine for store_by_pieces. Return the RTL of a register
3915 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3916 char value given in the RTL register data. For example, if mode is
3917 4 bytes wide, return the RTL for 0x01010101*data. */
3920 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3921 enum machine_mode mode)
3927 size = GET_MODE_SIZE (mode);
3931 p = XALLOCAVEC (char, size);
3932 memset (p, 1, size);
3933 coeff = c_readstr (p, mode);
3935 target = convert_to_mode (mode, (rtx) data, 1);
3936 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3937 return force_reg (mode, target);
3940 /* Expand expression EXP, which is a call to the memset builtin. Return
3941 NULL_RTX if we failed the caller should emit a normal call, otherwise
3942 try to get the result in TARGET, if convenient (and in mode MODE if that's
3946 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3948 if (!validate_arglist (exp,
3949 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3953 tree dest = CALL_EXPR_ARG (exp, 0);
3954 tree val = CALL_EXPR_ARG (exp, 1);
3955 tree len = CALL_EXPR_ARG (exp, 2);
3956 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3960 /* Helper function to do the actual work for expand_builtin_memset. The
3961 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3962 so that this can also be called without constructing an actual CALL_EXPR.
3963 The other arguments and return value are the same as for
3964 expand_builtin_memset. */
3967 expand_builtin_memset_args (tree dest, tree val, tree len,
3968 rtx target, enum machine_mode mode, tree orig_exp)
3971 enum built_in_function fcode;
3973 unsigned int dest_align;
3974 rtx dest_mem, dest_addr, len_rtx;
3975 HOST_WIDE_INT expected_size = -1;
3976 unsigned int expected_align = 0;
3977 tree_ann_common_t ann;
3979 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3981 /* If DEST is not a pointer type, don't do this operation in-line. */
3982 if (dest_align == 0)
3985 ann = tree_common_ann (orig_exp);
3987 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3989 if (expected_align < dest_align)
3990 expected_align = dest_align;
3992 /* If the LEN parameter is zero, return DEST. */
3993 if (integer_zerop (len))
3995 /* Evaluate and ignore VAL in case it has side-effects. */
3996 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3997 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4000 /* Stabilize the arguments in case we fail. */
4001 dest = builtin_save_expr (dest);
4002 val = builtin_save_expr (val);
4003 len = builtin_save_expr (len);
4005 len_rtx = expand_normal (len);
4006 dest_mem = get_memory_rtx (dest, len);
4008 if (TREE_CODE (val) != INTEGER_CST)
4012 val_rtx = expand_normal (val);
4013 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4016 /* Assume that we can memset by pieces if we can store
4017 * the coefficients by pieces (in the required modes).
4018 * We can't pass builtin_memset_gen_str as that emits RTL. */
4020 if (host_integerp (len, 1)
4021 && can_store_by_pieces (tree_low_cst (len, 1),
4022 builtin_memset_read_str, &c, dest_align,
4025 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4027 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4028 builtin_memset_gen_str, val_rtx, dest_align,
4031 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4032 dest_align, expected_align,
4036 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4037 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4041 if (target_char_cast (val, &c))
4046 if (host_integerp (len, 1)
4047 && can_store_by_pieces (tree_low_cst (len, 1),
4048 builtin_memset_read_str, &c, dest_align,
4050 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4051 builtin_memset_read_str, &c, dest_align, true, 0);
4052 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4053 dest_align, expected_align,
4057 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4058 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4062 set_mem_align (dest_mem, dest_align);
4063 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4064 CALL_EXPR_TAILCALL (orig_exp)
4065 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4066 expected_align, expected_size);
4070 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4071 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4077 fndecl = get_callee_fndecl (orig_exp);
4078 fcode = DECL_FUNCTION_CODE (fndecl);
4079 if (fcode == BUILT_IN_MEMSET)
4080 fn = build_call_expr (fndecl, 3, dest, val, len);
4081 else if (fcode == BUILT_IN_BZERO)
4082 fn = build_call_expr (fndecl, 2, dest, len);
4085 if (TREE_CODE (fn) == CALL_EXPR)
4086 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4087 return expand_call (fn, target, target == const0_rtx);
4090 /* Expand expression EXP, which is a call to the bzero builtin. Return
4091 NULL_RTX if we failed the caller should emit a normal call. */
4094 expand_builtin_bzero (tree exp)
4098 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4101 dest = CALL_EXPR_ARG (exp, 0);
4102 size = CALL_EXPR_ARG (exp, 1);
4104 /* New argument list transforming bzero(ptr x, int y) to
4105 memset(ptr x, int 0, size_t y). This is done this way
4106 so that if it isn't expanded inline, we fallback to
4107 calling bzero instead of memset. */
4109 return expand_builtin_memset_args (dest, integer_zero_node,
4110 fold_convert (sizetype, size),
4111 const0_rtx, VOIDmode, exp);
4114 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4115 caller should emit a normal call, otherwise try to get the result
4116 in TARGET, if convenient (and in mode MODE if that's convenient). */
4119 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4121 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4122 INTEGER_TYPE, VOID_TYPE))
4124 tree type = TREE_TYPE (exp);
4125 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4126 CALL_EXPR_ARG (exp, 1),
4127 CALL_EXPR_ARG (exp, 2), type);
4129 return expand_expr (result, target, mode, EXPAND_NORMAL);
4134 /* Expand expression EXP, which is a call to the memcmp built-in function.
4135 Return NULL_RTX if we failed and the
4136 caller should emit a normal call, otherwise try to get the result in
4137 TARGET, if convenient (and in mode MODE, if that's convenient). */
4140 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4142 if (!validate_arglist (exp,
4143 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4147 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4148 CALL_EXPR_ARG (exp, 1),
4149 CALL_EXPR_ARG (exp, 2));
4151 return expand_expr (result, target, mode, EXPAND_NORMAL);
4154 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4156 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4159 tree arg1 = CALL_EXPR_ARG (exp, 0);
4160 tree arg2 = CALL_EXPR_ARG (exp, 1);
4161 tree len = CALL_EXPR_ARG (exp, 2);
4164 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4166 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4167 enum machine_mode insn_mode;
4169 #ifdef HAVE_cmpmemsi
4171 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4174 #ifdef HAVE_cmpstrnsi
4176 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4181 /* If we don't have POINTER_TYPE, call the function. */
4182 if (arg1_align == 0 || arg2_align == 0)
4185 /* Make a place to write the result of the instruction. */
4188 && REG_P (result) && GET_MODE (result) == insn_mode
4189 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4190 result = gen_reg_rtx (insn_mode);
4192 arg1_rtx = get_memory_rtx (arg1, len);
4193 arg2_rtx = get_memory_rtx (arg2, len);
4194 arg3_rtx = expand_normal (fold_convert (sizetype, len));
4196 /* Set MEM_SIZE as appropriate. */
4197 if (GET_CODE (arg3_rtx) == CONST_INT)
4199 set_mem_size (arg1_rtx, arg3_rtx);
4200 set_mem_size (arg2_rtx, arg3_rtx);
4203 #ifdef HAVE_cmpmemsi
4205 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4206 GEN_INT (MIN (arg1_align, arg2_align)));
4209 #ifdef HAVE_cmpstrnsi
4211 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4212 GEN_INT (MIN (arg1_align, arg2_align)));
4220 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4221 TYPE_MODE (integer_type_node), 3,
4222 XEXP (arg1_rtx, 0), Pmode,
4223 XEXP (arg2_rtx, 0), Pmode,
4224 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4225 TYPE_UNSIGNED (sizetype)),
4226 TYPE_MODE (sizetype));
4228 /* Return the value in the proper mode for this function. */
4229 mode = TYPE_MODE (TREE_TYPE (exp));
4230 if (GET_MODE (result) == mode)
4232 else if (target != 0)
4234 convert_move (target, result, 0);
4238 return convert_to_mode (mode, result, 0);
4245 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4246 if we failed the caller should emit a normal call, otherwise try to get
4247 the result in TARGET, if convenient. */
4250 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4252 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4256 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4257 CALL_EXPR_ARG (exp, 1));
4259 return expand_expr (result, target, mode, EXPAND_NORMAL);
4262 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4263 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4264 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4266 rtx arg1_rtx, arg2_rtx;
4267 rtx result, insn = NULL_RTX;
4269 tree arg1 = CALL_EXPR_ARG (exp, 0);
4270 tree arg2 = CALL_EXPR_ARG (exp, 1);
4273 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4275 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4277 /* If we don't have POINTER_TYPE, call the function. */
4278 if (arg1_align == 0 || arg2_align == 0)
4281 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4282 arg1 = builtin_save_expr (arg1);
4283 arg2 = builtin_save_expr (arg2);
4285 arg1_rtx = get_memory_rtx (arg1, NULL);
4286 arg2_rtx = get_memory_rtx (arg2, NULL);
4288 #ifdef HAVE_cmpstrsi
4289 /* Try to call cmpstrsi. */
4292 enum machine_mode insn_mode
4293 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4295 /* Make a place to write the result of the instruction. */
4298 && REG_P (result) && GET_MODE (result) == insn_mode
4299 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4300 result = gen_reg_rtx (insn_mode);
4302 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4303 GEN_INT (MIN (arg1_align, arg2_align)));
4306 #ifdef HAVE_cmpstrnsi
4307 /* Try to determine at least one length and call cmpstrnsi. */
4308 if (!insn && HAVE_cmpstrnsi)
4313 enum machine_mode insn_mode
4314 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4315 tree len1 = c_strlen (arg1, 1);
4316 tree len2 = c_strlen (arg2, 1);
4319 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4321 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4323 /* If we don't have a constant length for the first, use the length
4324 of the second, if we know it. We don't require a constant for
4325 this case; some cost analysis could be done if both are available
4326 but neither is constant. For now, assume they're equally cheap,
4327 unless one has side effects. If both strings have constant lengths,
4334 else if (TREE_SIDE_EFFECTS (len1))
4336 else if (TREE_SIDE_EFFECTS (len2))
4338 else if (TREE_CODE (len1) != INTEGER_CST)
4340 else if (TREE_CODE (len2) != INTEGER_CST)
4342 else if (tree_int_cst_lt (len1, len2))
4347 /* If both arguments have side effects, we cannot optimize. */
4348 if (!len || TREE_SIDE_EFFECTS (len))
4351 arg3_rtx = expand_normal (len);
4353 /* Make a place to write the result of the instruction. */
4356 && REG_P (result) && GET_MODE (result) == insn_mode
4357 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4358 result = gen_reg_rtx (insn_mode);
4360 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4361 GEN_INT (MIN (arg1_align, arg2_align)));
4369 /* Return the value in the proper mode for this function. */
4370 mode = TYPE_MODE (TREE_TYPE (exp));
4371 if (GET_MODE (result) == mode)
4374 return convert_to_mode (mode, result, 0);
4375 convert_move (target, result, 0);
4379 /* Expand the library call ourselves using a stabilized argument
4380 list to avoid re-evaluating the function's arguments twice. */
4381 #ifdef HAVE_cmpstrnsi
4384 fndecl = get_callee_fndecl (exp);
4385 fn = build_call_expr (fndecl, 2, arg1, arg2);
4386 if (TREE_CODE (fn) == CALL_EXPR)
4387 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4388 return expand_call (fn, target, target == const0_rtx);
4394 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4395 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4396 the result in TARGET, if convenient. */
4399 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4401 if (!validate_arglist (exp,
4402 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4406 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4407 CALL_EXPR_ARG (exp, 1),
4408 CALL_EXPR_ARG (exp, 2));
4410 return expand_expr (result, target, mode, EXPAND_NORMAL);
4413 /* If c_strlen can determine an expression for one of the string
4414 lengths, and it doesn't have side effects, then emit cmpstrnsi
4415 using length MIN(strlen(string)+1, arg3). */
4416 #ifdef HAVE_cmpstrnsi
4419 tree len, len1, len2;
4420 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4423 tree arg1 = CALL_EXPR_ARG (exp, 0);
4424 tree arg2 = CALL_EXPR_ARG (exp, 1);
4425 tree arg3 = CALL_EXPR_ARG (exp, 2);
4428 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4430 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4431 enum machine_mode insn_mode
4432 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4434 len1 = c_strlen (arg1, 1);
4435 len2 = c_strlen (arg2, 1);
4438 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4440 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4442 /* If we don't have a constant length for the first, use the length
4443 of the second, if we know it. We don't require a constant for
4444 this case; some cost analysis could be done if both are available
4445 but neither is constant. For now, assume they're equally cheap,
4446 unless one has side effects. If both strings have constant lengths,
4453 else if (TREE_SIDE_EFFECTS (len1))
4455 else if (TREE_SIDE_EFFECTS (len2))
4457 else if (TREE_CODE (len1) != INTEGER_CST)
4459 else if (TREE_CODE (len2) != INTEGER_CST)
4461 else if (tree_int_cst_lt (len1, len2))
4466 /* If both arguments have side effects, we cannot optimize. */
4467 if (!len || TREE_SIDE_EFFECTS (len))
4470 /* The actual new length parameter is MIN(len,arg3). */
4471 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4472 fold_convert (TREE_TYPE (len), arg3));
4474 /* If we don't have POINTER_TYPE, call the function. */
4475 if (arg1_align == 0 || arg2_align == 0)
4478 /* Make a place to write the result of the instruction. */
4481 && REG_P (result) && GET_MODE (result) == insn_mode
4482 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4483 result = gen_reg_rtx (insn_mode);
4485 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4486 arg1 = builtin_save_expr (arg1);
4487 arg2 = builtin_save_expr (arg2);
4488 len = builtin_save_expr (len);
4490 arg1_rtx = get_memory_rtx (arg1, len);
4491 arg2_rtx = get_memory_rtx (arg2, len);
4492 arg3_rtx = expand_normal (len);
4493 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4494 GEN_INT (MIN (arg1_align, arg2_align)));
4499 /* Return the value in the proper mode for this function. */
4500 mode = TYPE_MODE (TREE_TYPE (exp));
4501 if (GET_MODE (result) == mode)
4504 return convert_to_mode (mode, result, 0);
4505 convert_move (target, result, 0);
4509 /* Expand the library call ourselves using a stabilized argument
4510 list to avoid re-evaluating the function's arguments twice. */
4511 fndecl = get_callee_fndecl (exp);
4512 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4513 if (TREE_CODE (fn) == CALL_EXPR)
4514 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4515 return expand_call (fn, target, target == const0_rtx);
4521 /* Expand expression EXP, which is a call to the strcat builtin.
4522 Return NULL_RTX if we failed the caller should emit a normal call,
4523 otherwise try to get the result in TARGET, if convenient. */
4526 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4528 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4532 tree dst = CALL_EXPR_ARG (exp, 0);
4533 tree src = CALL_EXPR_ARG (exp, 1);
4534 const char *p = c_getstr (src);
4536 /* If the string length is zero, return the dst parameter. */
4537 if (p && *p == '\0')
4538 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4540 if (optimize_insn_for_speed_p ())
4542 /* See if we can store by pieces into (dst + strlen(dst)). */
4543 tree newsrc, newdst,
4544 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4547 /* Stabilize the argument list. */
4548 newsrc = builtin_save_expr (src);
4549 dst = builtin_save_expr (dst);
4553 /* Create strlen (dst). */
4554 newdst = build_call_expr (strlen_fn, 1, dst);
4555 /* Create (dst p+ strlen (dst)). */
4557 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4558 newdst = builtin_save_expr (newdst);
4560 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4562 end_sequence (); /* Stop sequence. */
4566 /* Output the entire sequence. */
4567 insns = get_insns ();
4571 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4578 /* Expand expression EXP, which is a call to the strncat builtin.
4579 Return NULL_RTX if we failed the caller should emit a normal call,
4580 otherwise try to get the result in TARGET, if convenient. */
4583 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4585 if (validate_arglist (exp,
4586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4588 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4589 CALL_EXPR_ARG (exp, 1),
4590 CALL_EXPR_ARG (exp, 2));
4592 return expand_expr (result, target, mode, EXPAND_NORMAL);
4597 /* Expand expression EXP, which is a call to the strspn builtin.
4598 Return NULL_RTX if we failed the caller should emit a normal call,
4599 otherwise try to get the result in TARGET, if convenient. */
4602 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4604 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4606 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4607 CALL_EXPR_ARG (exp, 1));
4609 return expand_expr (result, target, mode, EXPAND_NORMAL);
4614 /* Expand expression EXP, which is a call to the strcspn builtin.
4615 Return NULL_RTX if we failed the caller should emit a normal call,
4616 otherwise try to get the result in TARGET, if convenient. */
4619 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4621 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4623 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4624 CALL_EXPR_ARG (exp, 1));
4626 return expand_expr (result, target, mode, EXPAND_NORMAL);
4631 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4632 if that's convenient. */
4635 expand_builtin_saveregs (void)
4639 /* Don't do __builtin_saveregs more than once in a function.
4640 Save the result of the first call and reuse it. */
4641 if (saveregs_value != 0)
4642 return saveregs_value;
4644 /* When this function is called, it means that registers must be
4645 saved on entry to this function. So we migrate the call to the
4646 first insn of this function. */
4650 /* Do whatever the machine needs done in this case. */
4651 val = targetm.calls.expand_builtin_saveregs ();
4656 saveregs_value = val;
4658 /* Put the insns after the NOTE that starts the function. If this
4659 is inside a start_sequence, make the outer-level insn chain current, so
4660 the code is placed at the start of the function. */
4661 push_topmost_sequence ();
4662 emit_insn_after (seq, entry_of_function ());
4663 pop_topmost_sequence ();
4668 /* __builtin_args_info (N) returns word N of the arg space info
4669 for the current function. The number and meanings of words
4670 is controlled by the definition of CUMULATIVE_ARGS. */
4673 expand_builtin_args_info (tree exp)
4675 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4676 int *word_ptr = (int *) &crtl->args.info;
4678 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4680 if (call_expr_nargs (exp) != 0)
4682 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4683 error ("argument of %<__builtin_args_info%> must be constant");
4686 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4688 if (wordnum < 0 || wordnum >= nwords)
4689 error ("argument of %<__builtin_args_info%> out of range");
4691 return GEN_INT (word_ptr[wordnum]);
4695 error ("missing argument in %<__builtin_args_info%>");
4700 /* Expand a call to __builtin_next_arg. */
4703 expand_builtin_next_arg (void)
4705 /* Checking arguments is already done in fold_builtin_next_arg
4706 that must be called before this function. */
4707 return expand_binop (ptr_mode, add_optab,
4708 crtl->args.internal_arg_pointer,
4709 crtl->args.arg_offset_rtx,
4710 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4713 /* Make it easier for the backends by protecting the valist argument
4714 from multiple evaluations. */
4717 stabilize_va_list (tree valist, int needs_lvalue)
4719 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4721 gcc_assert (vatype != NULL_TREE);
4723 if (TREE_CODE (vatype) == ARRAY_TYPE)
4725 if (TREE_SIDE_EFFECTS (valist))
4726 valist = save_expr (valist);
4728 /* For this case, the backends will be expecting a pointer to
4729 vatype, but it's possible we've actually been given an array
4730 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4732 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4734 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4735 valist = build_fold_addr_expr_with_type (valist, p1);
4744 if (! TREE_SIDE_EFFECTS (valist))
4747 pt = build_pointer_type (vatype);
4748 valist = fold_build1 (ADDR_EXPR, pt, valist);
4749 TREE_SIDE_EFFECTS (valist) = 1;
4752 if (TREE_SIDE_EFFECTS (valist))
4753 valist = save_expr (valist);
4754 valist = build_fold_indirect_ref (valist);
4760 /* The "standard" definition of va_list is void*. */
4763 std_build_builtin_va_list (void)
4765 return ptr_type_node;
4768 /* The "standard" abi va_list is va_list_type_node. */
4771 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4773 return va_list_type_node;
4776 /* The "standard" type of va_list is va_list_type_node. */
4779 std_canonical_va_list_type (tree type)
4783 if (INDIRECT_REF_P (type))
4784 type = TREE_TYPE (type);
4785 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4786 type = TREE_TYPE (type);
4787 wtype = va_list_type_node;
4789 /* Treat structure va_list types. */
4790 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4791 htype = TREE_TYPE (htype);
4792 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4794 /* If va_list is an array type, the argument may have decayed
4795 to a pointer type, e.g. by being passed to another function.
4796 In that case, unwrap both types so that we can compare the
4797 underlying records. */
4798 if (TREE_CODE (htype) == ARRAY_TYPE
4799 || POINTER_TYPE_P (htype))
4801 wtype = TREE_TYPE (wtype);
4802 htype = TREE_TYPE (htype);
4805 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4806 return va_list_type_node;
4811 /* The "standard" implementation of va_start: just assign `nextarg' to
4815 std_expand_builtin_va_start (tree valist, rtx nextarg)
4817 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4818 convert_move (va_r, nextarg, 0);
4821 /* Expand EXP, a call to __builtin_va_start. */
4824 expand_builtin_va_start (tree exp)
4829 if (call_expr_nargs (exp) < 2)
4831 error ("too few arguments to function %<va_start%>");
4835 if (fold_builtin_next_arg (exp, true))
4838 nextarg = expand_builtin_next_arg ();
4839 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4841 if (targetm.expand_builtin_va_start)
4842 targetm.expand_builtin_va_start (valist, nextarg);
4844 std_expand_builtin_va_start (valist, nextarg);
4849 /* The "standard" implementation of va_arg: read the value from the
4850 current (padded) address and increment by the (padded) size. */
4853 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4856 tree addr, t, type_size, rounded_size, valist_tmp;
4857 unsigned HOST_WIDE_INT align, boundary;
4860 #ifdef ARGS_GROW_DOWNWARD
4861 /* All of the alignment and movement below is for args-grow-up machines.
4862 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4863 implement their own specialized gimplify_va_arg_expr routines. */
4867 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4869 type = build_pointer_type (type);
4871 align = PARM_BOUNDARY / BITS_PER_UNIT;
4872 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4874 /* When we align parameter on stack for caller, if the parameter
4875 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4876 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4877 here with caller. */
4878 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4879 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4881 boundary /= BITS_PER_UNIT;
4883 /* Hoist the valist value into a temporary for the moment. */
4884 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4886 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4887 requires greater alignment, we must perform dynamic alignment. */
4888 if (boundary > align
4889 && !integer_zerop (TYPE_SIZE (type)))
4891 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4892 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4893 valist_tmp, size_int (boundary - 1)));
4894 gimplify_and_add (t, pre_p);
4896 t = fold_convert (sizetype, valist_tmp);
4897 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4898 fold_convert (TREE_TYPE (valist),
4899 fold_build2 (BIT_AND_EXPR, sizetype, t,
4900 size_int (-boundary))));
4901 gimplify_and_add (t, pre_p);
4906 /* If the actual alignment is less than the alignment of the type,
4907 adjust the type accordingly so that we don't assume strict alignment
4908 when dereferencing the pointer. */
4909 boundary *= BITS_PER_UNIT;
4910 if (boundary < TYPE_ALIGN (type))
4912 type = build_variant_type_copy (type);
4913 TYPE_ALIGN (type) = boundary;
4916 /* Compute the rounded size of the type. */
4917 type_size = size_in_bytes (type);
4918 rounded_size = round_up (type_size, align);
4920 /* Reduce rounded_size so it's sharable with the postqueue. */
4921 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4925 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4927 /* Small args are padded downward. */
4928 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4929 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4930 size_binop (MINUS_EXPR, rounded_size, type_size));
4931 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4934 /* Compute new value for AP. */
4935 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4936 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4937 gimplify_and_add (t, pre_p);
4939 addr = fold_convert (build_pointer_type (type), addr);
4942 addr = build_va_arg_indirect_ref (addr);
4944 return build_va_arg_indirect_ref (addr);
4947 /* Build an indirect-ref expression over the given TREE, which represents a
4948 piece of a va_arg() expansion. */
4950 build_va_arg_indirect_ref (tree addr)
4952 addr = build_fold_indirect_ref (addr);
4954 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4960 /* Return a dummy expression of type TYPE in order to keep going after an
4964 dummy_object (tree type)
4966 tree t = build_int_cst (build_pointer_type (type), 0);
4967 return build1 (INDIRECT_REF, type, t);
4970 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4971 builtin function, but a very special sort of operator. */
4973 enum gimplify_status
4974 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4976 tree promoted_type, have_va_type;
4977 tree valist = TREE_OPERAND (*expr_p, 0);
4978 tree type = TREE_TYPE (*expr_p);
4981 /* Verify that valist is of the proper type. */
4982 have_va_type = TREE_TYPE (valist);
4983 if (have_va_type == error_mark_node)
4985 have_va_type = targetm.canonical_va_list_type (have_va_type);
4987 if (have_va_type == NULL_TREE)
4989 error ("first argument to %<va_arg%> not of type %<va_list%>");
4993 /* Generate a diagnostic for requesting data of a type that cannot
4994 be passed through `...' due to type promotion at the call site. */
4995 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4998 static bool gave_help;
5001 /* Unfortunately, this is merely undefined, rather than a constraint
5002 violation, so we cannot make this an error. If this call is never
5003 executed, the program is still strictly conforming. */
5004 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
5005 type, promoted_type);
5006 if (!gave_help && warned)
5009 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
5010 promoted_type, type);
5013 /* We can, however, treat "undefined" any way we please.
5014 Call abort to encourage the user to fix the program. */
5016 inform (input_location, "if this code is reached, the program will abort");
5017 /* Before the abort, allow the evaluation of the va_list
5018 expression to exit or longjmp. */
5019 gimplify_and_add (valist, pre_p);
5020 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5021 gimplify_and_add (t, pre_p);
5023 /* This is dead code, but go ahead and finish so that the
5024 mode of the result comes out right. */
5025 *expr_p = dummy_object (type);
5030 /* Make it easier for the backends by protecting the valist argument
5031 from multiple evaluations. */
5032 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5034 /* For this case, the backends will be expecting a pointer to
5035 TREE_TYPE (abi), but it's possible we've
5036 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5038 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5040 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5041 valist = build_fold_addr_expr_with_type (valist, p1);
5044 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5047 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5049 if (!targetm.gimplify_va_arg_expr)
5050 /* FIXME: Once most targets are converted we should merely
5051 assert this is non-null. */
5054 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5059 /* Expand EXP, a call to __builtin_va_end. */
5062 expand_builtin_va_end (tree exp)
5064 tree valist = CALL_EXPR_ARG (exp, 0);
5066 /* Evaluate for side effects, if needed. I hate macros that don't
5068 if (TREE_SIDE_EFFECTS (valist))
5069 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5074 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5075 builtin rather than just as an assignment in stdarg.h because of the
5076 nastiness of array-type va_list types. */
5079 expand_builtin_va_copy (tree exp)
5083 dst = CALL_EXPR_ARG (exp, 0);
5084 src = CALL_EXPR_ARG (exp, 1);
5086 dst = stabilize_va_list (dst, 1);
5087 src = stabilize_va_list (src, 0);
5089 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5091 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5093 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5094 TREE_SIDE_EFFECTS (t) = 1;
5095 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5099 rtx dstb, srcb, size;
5101 /* Evaluate to pointers. */
5102 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5103 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5104 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5105 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5107 dstb = convert_memory_address (Pmode, dstb);
5108 srcb = convert_memory_address (Pmode, srcb);
5110 /* "Dereference" to BLKmode memories. */
5111 dstb = gen_rtx_MEM (BLKmode, dstb);
5112 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5113 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5114 srcb = gen_rtx_MEM (BLKmode, srcb);
5115 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5116 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5119 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5125 /* Expand a call to one of the builtin functions __builtin_frame_address or
5126 __builtin_return_address. */
5129 expand_builtin_frame_address (tree fndecl, tree exp)
5131 /* The argument must be a nonnegative integer constant.
5132 It counts the number of frames to scan up the stack.
5133 The value is the return address saved in that frame. */
5134 if (call_expr_nargs (exp) == 0)
5135 /* Warning about missing arg was already issued. */
5137 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5139 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5140 error ("invalid argument to %<__builtin_frame_address%>");
5142 error ("invalid argument to %<__builtin_return_address%>");
5148 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5149 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5151 /* Some ports cannot access arbitrary stack frames. */
5154 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5155 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5157 warning (0, "unsupported argument to %<__builtin_return_address%>");
5161 /* For __builtin_frame_address, return what we've got. */
5162 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5166 && ! CONSTANT_P (tem))
5167 tem = copy_to_mode_reg (Pmode, tem);
5172 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5173 we failed and the caller should emit a normal call, otherwise try to get
5174 the result in TARGET, if convenient. */
5177 expand_builtin_alloca (tree exp, rtx target)
5182 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5183 should always expand to function calls. These can be intercepted
5188 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5191 /* Compute the argument. */
5192 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5194 /* Allocate the desired space. */
5195 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5196 result = convert_memory_address (ptr_mode, result);
5201 /* Expand a call to a bswap builtin with argument ARG0. MODE
5202 is the mode to expand with. */
5205 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5207 enum machine_mode mode;
5211 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5214 arg = CALL_EXPR_ARG (exp, 0);
5215 mode = TYPE_MODE (TREE_TYPE (arg));
5216 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5218 target = expand_unop (mode, bswap_optab, op0, target, 1);
5220 gcc_assert (target);
5222 return convert_to_mode (mode, target, 0);
5225 /* Expand a call to a unary builtin in EXP.
5226 Return NULL_RTX if a normal call should be emitted rather than expanding the
5227 function in-line. If convenient, the result should be placed in TARGET.
5228 SUBTARGET may be used as the target for computing one of EXP's operands. */
5231 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5232 rtx subtarget, optab op_optab)
5236 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5239 /* Compute the argument. */
5240 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5241 VOIDmode, EXPAND_NORMAL);
5242 /* Compute op, into TARGET if possible.
5243 Set TARGET to wherever the result comes back. */
5244 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5245 op_optab, op0, target, 1);
5246 gcc_assert (target);
5248 return convert_to_mode (target_mode, target, 0);
5251 /* If the string passed to fputs is a constant and is one character
5252 long, we attempt to transform this call into __builtin_fputc(). */
5255 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5257 /* Verify the arguments in the original call. */
5258 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5260 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5261 CALL_EXPR_ARG (exp, 1),
5262 (target == const0_rtx),
5263 unlocked, NULL_TREE);
5265 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5270 /* Expand a call to __builtin_expect. We just return our argument
5271 as the builtin_expect semantic should've been already executed by
5272 tree branch prediction pass. */
5275 expand_builtin_expect (tree exp, rtx target)
5279 if (call_expr_nargs (exp) < 2)
5281 arg = CALL_EXPR_ARG (exp, 0);
5282 c = CALL_EXPR_ARG (exp, 1);
5284 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5285 /* When guessing was done, the hints should be already stripped away. */
5286 gcc_assert (!flag_guess_branch_prob
5287 || optimize == 0 || errorcount || sorrycount);
5292 expand_builtin_trap (void)
5296 emit_insn (gen_trap ());
5299 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5303 /* Expand EXP, a call to fabs, fabsf or fabsl.
5304 Return NULL_RTX if a normal call should be emitted rather than expanding
5305 the function inline. If convenient, the result should be placed
5306 in TARGET. SUBTARGET may be used as the target for computing
5310 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5312 enum machine_mode mode;
5316 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5319 arg = CALL_EXPR_ARG (exp, 0);
5320 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5321 mode = TYPE_MODE (TREE_TYPE (arg));
5322 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5323 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5326 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5327 Return NULL is a normal call should be emitted rather than expanding the
5328 function inline. If convenient, the result should be placed in TARGET.
5329 SUBTARGET may be used as the target for computing the operand. */
5332 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5337 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5340 arg = CALL_EXPR_ARG (exp, 0);
5341 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5343 arg = CALL_EXPR_ARG (exp, 1);
5344 op1 = expand_normal (arg);
5346 return expand_copysign (op0, op1, target);
5349 /* Create a new constant string literal and return a char* pointer to it.
5350 The STRING_CST value is the LEN characters at STR. */
5352 build_string_literal (int len, const char *str)
5354 tree t, elem, index, type;
5356 t = build_string (len, str);
5357 elem = build_type_variant (char_type_node, 1, 0);
5358 index = build_index_type (size_int (len - 1));
5359 type = build_array_type (elem, index);
5360 TREE_TYPE (t) = type;
5361 TREE_CONSTANT (t) = 1;
5362 TREE_READONLY (t) = 1;
5363 TREE_STATIC (t) = 1;
5365 type = build_pointer_type (elem);
5366 t = build1 (ADDR_EXPR, type,
5367 build4 (ARRAY_REF, elem,
5368 t, integer_zero_node, NULL_TREE, NULL_TREE));
5372 /* Expand EXP, a call to printf or printf_unlocked.
5373 Return NULL_RTX if a normal call should be emitted rather than transforming
5374 the function inline. If convenient, the result should be placed in
5375 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5378 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5381 /* If we're using an unlocked function, assume the other unlocked
5382 functions exist explicitly. */
5383 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5384 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5385 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5386 : implicit_built_in_decls[BUILT_IN_PUTS];
5387 const char *fmt_str;
5390 int nargs = call_expr_nargs (exp);
5392 /* If the return value is used, don't do the transformation. */
5393 if (target != const0_rtx)
5396 /* Verify the required arguments in the original call. */
5399 fmt = CALL_EXPR_ARG (exp, 0);
5400 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5403 /* Check whether the format is a literal string constant. */
5404 fmt_str = c_getstr (fmt);
5405 if (fmt_str == NULL)
5408 if (!init_target_chars ())
5411 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5412 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5415 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5418 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5420 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5421 else if (strcmp (fmt_str, target_percent_c) == 0)
5424 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5427 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5431 /* We can't handle anything else with % args or %% ... yet. */
5432 if (strchr (fmt_str, target_percent))
5438 /* If the format specifier was "", printf does nothing. */
5439 if (fmt_str[0] == '\0')
5441 /* If the format specifier has length of 1, call putchar. */
5442 if (fmt_str[1] == '\0')
5444 /* Given printf("c"), (where c is any one character,)
5445 convert "c"[0] to an int and pass that to the replacement
5447 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5449 fn = build_call_expr (fn_putchar, 1, arg);
5453 /* If the format specifier was "string\n", call puts("string"). */
5454 size_t len = strlen (fmt_str);
5455 if ((unsigned char)fmt_str[len - 1] == target_newline)
5457 /* Create a NUL-terminated string that's one char shorter
5458 than the original, stripping off the trailing '\n'. */
5459 char *newstr = XALLOCAVEC (char, len);
5460 memcpy (newstr, fmt_str, len - 1);
5461 newstr[len - 1] = 0;
5462 arg = build_string_literal (len, newstr);
5464 fn = build_call_expr (fn_puts, 1, arg);
5467 /* We'd like to arrange to call fputs(string,stdout) here,
5468 but we need stdout and don't have a way to get it yet. */
5475 if (TREE_CODE (fn) == CALL_EXPR)
5476 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5477 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5480 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5481 Return NULL_RTX if a normal call should be emitted rather than transforming
5482 the function inline. If convenient, the result should be placed in
5483 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5486 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5489 /* If we're using an unlocked function, assume the other unlocked
5490 functions exist explicitly. */
5491 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5492 : implicit_built_in_decls[BUILT_IN_FPUTC];
5493 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5494 : implicit_built_in_decls[BUILT_IN_FPUTS];
5495 const char *fmt_str;
5498 int nargs = call_expr_nargs (exp);
5500 /* If the return value is used, don't do the transformation. */
5501 if (target != const0_rtx)
5504 /* Verify the required arguments in the original call. */
5507 fp = CALL_EXPR_ARG (exp, 0);
5508 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5510 fmt = CALL_EXPR_ARG (exp, 1);
5511 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5514 /* Check whether the format is a literal string constant. */
5515 fmt_str = c_getstr (fmt);
5516 if (fmt_str == NULL)
5519 if (!init_target_chars ())
5522 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5523 if (strcmp (fmt_str, target_percent_s) == 0)
5526 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5528 arg = CALL_EXPR_ARG (exp, 2);
5530 fn = build_call_expr (fn_fputs, 2, arg, fp);
5532 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5533 else if (strcmp (fmt_str, target_percent_c) == 0)
5536 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5538 arg = CALL_EXPR_ARG (exp, 2);
5540 fn = build_call_expr (fn_fputc, 2, arg, fp);
5544 /* We can't handle anything else with % args or %% ... yet. */
5545 if (strchr (fmt_str, target_percent))
5551 /* If the format specifier was "", fprintf does nothing. */
5552 if (fmt_str[0] == '\0')
5554 /* Evaluate and ignore FILE* argument for side-effects. */
5555 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5559 /* When "string" doesn't contain %, replace all cases of
5560 fprintf(stream,string) with fputs(string,stream). The fputs
5561 builtin will take care of special cases like length == 1. */
5563 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5568 if (TREE_CODE (fn) == CALL_EXPR)
5569 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5570 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5573 /* Expand a call EXP to sprintf. Return NULL_RTX if
5574 a normal call should be emitted rather than expanding the function
5575 inline. If convenient, the result should be placed in TARGET with
5579 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5582 const char *fmt_str;
5583 int nargs = call_expr_nargs (exp);
5585 /* Verify the required arguments in the original call. */
5588 dest = CALL_EXPR_ARG (exp, 0);
5589 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5591 fmt = CALL_EXPR_ARG (exp, 0);
5592 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5595 /* Check whether the format is a literal string constant. */
5596 fmt_str = c_getstr (fmt);
5597 if (fmt_str == NULL)
5600 if (!init_target_chars ())
5603 /* If the format doesn't contain % args or %%, use strcpy. */
5604 if (strchr (fmt_str, target_percent) == 0)
5606 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5609 if ((nargs > 2) || ! fn)
5611 expand_expr (build_call_expr (fn, 2, dest, fmt),
5612 const0_rtx, VOIDmode, EXPAND_NORMAL);
5613 if (target == const0_rtx)
5615 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5616 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5618 /* If the format is "%s", use strcpy if the result isn't used. */
5619 else if (strcmp (fmt_str, target_percent_s) == 0)
5622 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5628 arg = CALL_EXPR_ARG (exp, 2);
5629 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5632 if (target != const0_rtx)
5634 len = c_strlen (arg, 1);
5635 if (! len || TREE_CODE (len) != INTEGER_CST)
5641 expand_expr (build_call_expr (fn, 2, dest, arg),
5642 const0_rtx, VOIDmode, EXPAND_NORMAL);
5644 if (target == const0_rtx)
5646 return expand_expr (len, target, mode, EXPAND_NORMAL);
5652 /* Expand a call to either the entry or exit function profiler. */
5655 expand_builtin_profile_func (bool exitp)
5657 rtx this_rtx, which;
5659 this_rtx = DECL_RTL (current_function_decl);
5660 gcc_assert (MEM_P (this_rtx));
5661 this_rtx = XEXP (this_rtx, 0);
5664 which = profile_function_exit_libfunc;
5666 which = profile_function_entry_libfunc;
5668 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5669 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5676 /* Expand a call to __builtin___clear_cache. */
5679 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5681 #ifndef HAVE_clear_cache
5682 #ifdef CLEAR_INSN_CACHE
5683 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5684 does something. Just do the default expansion to a call to
5688 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5689 does nothing. There is no need to call it. Do nothing. */
5691 #endif /* CLEAR_INSN_CACHE */
5693 /* We have a "clear_cache" insn, and it will handle everything. */
5695 rtx begin_rtx, end_rtx;
5696 enum insn_code icode;
5698 /* We must not expand to a library call. If we did, any
5699 fallback library function in libgcc that might contain a call to
5700 __builtin___clear_cache() would recurse infinitely. */
5701 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5703 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5707 if (HAVE_clear_cache)
5709 icode = CODE_FOR_clear_cache;
5711 begin = CALL_EXPR_ARG (exp, 0);
5712 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5713 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5714 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5715 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5717 end = CALL_EXPR_ARG (exp, 1);
5718 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5719 end_rtx = convert_memory_address (Pmode, end_rtx);
5720 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5721 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5723 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5726 #endif /* HAVE_clear_cache */
5729 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5732 round_trampoline_addr (rtx tramp)
5734 rtx temp, addend, mask;
5736 /* If we don't need too much alignment, we'll have been guaranteed
5737 proper alignment by get_trampoline_type. */
5738 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5741 /* Round address up to desired boundary. */
5742 temp = gen_reg_rtx (Pmode);
5743 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5744 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5746 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5747 temp, 0, OPTAB_LIB_WIDEN);
5748 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5749 temp, 0, OPTAB_LIB_WIDEN);
5755 expand_builtin_init_trampoline (tree exp)
5757 tree t_tramp, t_func, t_chain;
5758 rtx r_tramp, r_func, r_chain;
5759 #ifdef TRAMPOLINE_TEMPLATE
5763 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5764 POINTER_TYPE, VOID_TYPE))
5767 t_tramp = CALL_EXPR_ARG (exp, 0);
5768 t_func = CALL_EXPR_ARG (exp, 1);
5769 t_chain = CALL_EXPR_ARG (exp, 2);
5771 r_tramp = expand_normal (t_tramp);
5772 r_func = expand_normal (t_func);
5773 r_chain = expand_normal (t_chain);
5775 /* Generate insns to initialize the trampoline. */
5776 r_tramp = round_trampoline_addr (r_tramp);
5777 #ifdef TRAMPOLINE_TEMPLATE
5778 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5779 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5780 emit_block_move (blktramp, assemble_trampoline_template (),
5781 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5783 trampolines_created = 1;
5784 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5790 expand_builtin_adjust_trampoline (tree exp)
5794 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5797 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5798 tramp = round_trampoline_addr (tramp);
5799 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5800 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5806 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5807 function. The function first checks whether the back end provides
5808 an insn to implement signbit for the respective mode. If not, it
5809 checks whether the floating point format of the value is such that
5810 the sign bit can be extracted. If that is not the case, the
5811 function returns NULL_RTX to indicate that a normal call should be
5812 emitted rather than expanding the function in-line. EXP is the
5813 expression that is a call to the builtin function; if convenient,
5814 the result should be placed in TARGET. */
5816 expand_builtin_signbit (tree exp, rtx target)
5818 const struct real_format *fmt;
5819 enum machine_mode fmode, imode, rmode;
5820 HOST_WIDE_INT hi, lo;
5823 enum insn_code icode;
5826 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5829 arg = CALL_EXPR_ARG (exp, 0);
5830 fmode = TYPE_MODE (TREE_TYPE (arg));
5831 rmode = TYPE_MODE (TREE_TYPE (exp));
5832 fmt = REAL_MODE_FORMAT (fmode);
5834 arg = builtin_save_expr (arg);
5836 /* Expand the argument yielding a RTX expression. */
5837 temp = expand_normal (arg);
5839 /* Check if the back end provides an insn that handles signbit for the
5841 icode = signbit_optab->handlers [(int) fmode].insn_code;
5842 if (icode != CODE_FOR_nothing)
5844 rtx last = get_last_insn ();
5845 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5846 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5848 delete_insns_since (last);
5851 /* For floating point formats without a sign bit, implement signbit
5853 bitpos = fmt->signbit_ro;
5856 /* But we can't do this if the format supports signed zero. */
5857 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5860 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5861 build_real (TREE_TYPE (arg), dconst0));
5862 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5865 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5867 imode = int_mode_for_mode (fmode);
5868 if (imode == BLKmode)
5870 temp = gen_lowpart (imode, temp);
5875 /* Handle targets with different FP word orders. */
5876 if (FLOAT_WORDS_BIG_ENDIAN)
5877 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5879 word = bitpos / BITS_PER_WORD;
5880 temp = operand_subword_force (temp, word, fmode);
5881 bitpos = bitpos % BITS_PER_WORD;
5884 /* Force the intermediate word_mode (or narrower) result into a
5885 register. This avoids attempting to create paradoxical SUBREGs
5886 of floating point modes below. */
5887 temp = force_reg (imode, temp);
5889 /* If the bitpos is within the "result mode" lowpart, the operation
5890 can be implement with a single bitwise AND. Otherwise, we need
5891 a right shift and an AND. */
5893 if (bitpos < GET_MODE_BITSIZE (rmode))
5895 if (bitpos < HOST_BITS_PER_WIDE_INT)
5898 lo = (HOST_WIDE_INT) 1 << bitpos;
5902 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5906 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5907 temp = gen_lowpart (rmode, temp);
5908 temp = expand_binop (rmode, and_optab, temp,
5909 immed_double_const (lo, hi, rmode),
5910 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5914 /* Perform a logical right shift to place the signbit in the least
5915 significant bit, then truncate the result to the desired mode
5916 and mask just this bit. */
5917 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5918 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5919 temp = gen_lowpart (rmode, temp);
5920 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5921 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5927 /* Expand fork or exec calls. TARGET is the desired target of the
5928 call. EXP is the call. FN is the
5929 identificator of the actual function. IGNORE is nonzero if the
5930 value is to be ignored. */
5933 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5938 /* If we are not profiling, just call the function. */
5939 if (!profile_arc_flag)
5942 /* Otherwise call the wrapper. This should be equivalent for the rest of
5943 compiler, so the code does not diverge, and the wrapper may run the
5944 code necessary for keeping the profiling sane. */
5946 switch (DECL_FUNCTION_CODE (fn))
5949 id = get_identifier ("__gcov_fork");
5952 case BUILT_IN_EXECL:
5953 id = get_identifier ("__gcov_execl");
5956 case BUILT_IN_EXECV:
5957 id = get_identifier ("__gcov_execv");
5960 case BUILT_IN_EXECLP:
5961 id = get_identifier ("__gcov_execlp");
5964 case BUILT_IN_EXECLE:
5965 id = get_identifier ("__gcov_execle");
5968 case BUILT_IN_EXECVP:
5969 id = get_identifier ("__gcov_execvp");
5972 case BUILT_IN_EXECVE:
5973 id = get_identifier ("__gcov_execve");
5980 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5981 DECL_EXTERNAL (decl) = 1;
5982 TREE_PUBLIC (decl) = 1;
5983 DECL_ARTIFICIAL (decl) = 1;
5984 TREE_NOTHROW (decl) = 1;
5985 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5986 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5987 call = rewrite_call_expr (exp, 0, decl, 0);
5988 return expand_call (call, target, ignore);
5993 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5994 the pointer in these functions is void*, the tree optimizers may remove
5995 casts. The mode computed in expand_builtin isn't reliable either, due
5996 to __sync_bool_compare_and_swap.
5998 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5999 group of builtins. This gives us log2 of the mode size. */
6001 static inline enum machine_mode
6002 get_builtin_sync_mode (int fcode_diff)
6004 /* The size is not negotiable, so ask not to get BLKmode in return
6005 if the target indicates that a smaller size would be better. */
6006 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6009 /* Expand the memory expression LOC and return the appropriate memory operand
6010 for the builtin_sync operations. */
6013 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6017 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6019 /* Note that we explicitly do not want any alias information for this
6020 memory, so that we kill all other live memories. Otherwise we don't
6021 satisfy the full barrier semantics of the intrinsic. */
6022 mem = validize_mem (gen_rtx_MEM (mode, addr));
6024 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6025 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6026 MEM_VOLATILE_P (mem) = 1;
6031 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6032 EXP is the CALL_EXPR. CODE is the rtx code
6033 that corresponds to the arithmetic or logical operation from the name;
6034 an exception here is that NOT actually means NAND. TARGET is an optional
6035 place for us to store the results; AFTER is true if this is the
6036 fetch_and_xxx form. IGNORE is true if we don't actually care about
6037 the result of the operation at all. */
6040 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6041 enum rtx_code code, bool after,
6042 rtx target, bool ignore)
6045 enum machine_mode old_mode;
6047 if (code == NOT && warn_sync_nand)
6049 tree fndecl = get_callee_fndecl (exp);
6050 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6052 static bool warned_f_a_n, warned_n_a_f;
6056 case BUILT_IN_FETCH_AND_NAND_1:
6057 case BUILT_IN_FETCH_AND_NAND_2:
6058 case BUILT_IN_FETCH_AND_NAND_4:
6059 case BUILT_IN_FETCH_AND_NAND_8:
6060 case BUILT_IN_FETCH_AND_NAND_16:
6065 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6066 inform (input_location,
6067 "%qD changed semantics in GCC 4.4", fndecl);
6068 warned_f_a_n = true;
6071 case BUILT_IN_NAND_AND_FETCH_1:
6072 case BUILT_IN_NAND_AND_FETCH_2:
6073 case BUILT_IN_NAND_AND_FETCH_4:
6074 case BUILT_IN_NAND_AND_FETCH_8:
6075 case BUILT_IN_NAND_AND_FETCH_16:
6080 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6081 inform (input_location,
6082 "%qD changed semantics in GCC 4.4", fndecl);
6083 warned_n_a_f = true;
6091 /* Expand the operands. */
6092 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6094 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6095 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6096 of CONST_INTs, where we know the old_mode only from the call argument. */
6097 old_mode = GET_MODE (val);
6098 if (old_mode == VOIDmode)
6099 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6100 val = convert_modes (mode, old_mode, val, 1);
6103 return expand_sync_operation (mem, val, code);
6105 return expand_sync_fetch_operation (mem, val, code, after, target);
6108 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6109 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6110 true if this is the boolean form. TARGET is a place for us to store the
6111 results; this is NOT optional if IS_BOOL is true. */
6114 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6115 bool is_bool, rtx target)
6117 rtx old_val, new_val, mem;
6118 enum machine_mode old_mode;
6120 /* Expand the operands. */
6121 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6124 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6125 mode, EXPAND_NORMAL);
6126 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6127 of CONST_INTs, where we know the old_mode only from the call argument. */
6128 old_mode = GET_MODE (old_val);
6129 if (old_mode == VOIDmode)
6130 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6131 old_val = convert_modes (mode, old_mode, old_val, 1);
6133 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6134 mode, EXPAND_NORMAL);
6135 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6136 of CONST_INTs, where we know the old_mode only from the call argument. */
6137 old_mode = GET_MODE (new_val);
6138 if (old_mode == VOIDmode)
6139 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6140 new_val = convert_modes (mode, old_mode, new_val, 1);
6143 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6145 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6148 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6149 general form is actually an atomic exchange, and some targets only
6150 support a reduced form with the second argument being a constant 1.
6151 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6155 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6159 enum machine_mode old_mode;
6161 /* Expand the operands. */
6162 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6163 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6164 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6165 of CONST_INTs, where we know the old_mode only from the call argument. */
6166 old_mode = GET_MODE (val);
6167 if (old_mode == VOIDmode)
6168 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6169 val = convert_modes (mode, old_mode, val, 1);
6171 return expand_sync_lock_test_and_set (mem, val, target);
6174 /* Expand the __sync_synchronize intrinsic. */
6177 expand_builtin_synchronize (void)
6181 #ifdef HAVE_memory_barrier
6182 if (HAVE_memory_barrier)
6184 emit_insn (gen_memory_barrier ());
6189 if (synchronize_libfunc != NULL_RTX)
6191 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6195 /* If no explicit memory barrier instruction is available, create an
6196 empty asm stmt with a memory clobber. */
6197 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6198 tree_cons (NULL, build_string (6, "memory"), NULL));
6199 ASM_VOLATILE_P (x) = 1;
6200 expand_asm_expr (x);
6203 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6206 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6208 enum insn_code icode;
6210 rtx val = const0_rtx;
6212 /* Expand the operands. */
6213 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6215 /* If there is an explicit operation in the md file, use it. */
6216 icode = sync_lock_release[mode];
6217 if (icode != CODE_FOR_nothing)
6219 if (!insn_data[icode].operand[1].predicate (val, mode))
6220 val = force_reg (mode, val);
6222 insn = GEN_FCN (icode) (mem, val);
6230 /* Otherwise we can implement this operation by emitting a barrier
6231 followed by a store of zero. */
6232 expand_builtin_synchronize ();
6233 emit_move_insn (mem, val);
6236 /* Expand an expression EXP that calls a built-in function,
6237 with result going to TARGET if that's convenient
6238 (and in mode MODE if that's convenient).
6239 SUBTARGET may be used as the target for computing one of EXP's operands.
6240 IGNORE is nonzero if the value is to be ignored. */
6243 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6246 tree fndecl = get_callee_fndecl (exp);
6247 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6248 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6250 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6251 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6253 /* When not optimizing, generate calls to library functions for a certain
6256 && !called_as_built_in (fndecl)
6257 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6258 && fcode != BUILT_IN_ALLOCA
6259 && fcode != BUILT_IN_FREE)
6260 return expand_call (exp, target, ignore);
6262 /* The built-in function expanders test for target == const0_rtx
6263 to determine whether the function's result will be ignored. */
6265 target = const0_rtx;
6267 /* If the result of a pure or const built-in function is ignored, and
6268 none of its arguments are volatile, we can avoid expanding the
6269 built-in call and just evaluate the arguments for side-effects. */
6270 if (target == const0_rtx
6271 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6273 bool volatilep = false;
6275 call_expr_arg_iterator iter;
6277 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6278 if (TREE_THIS_VOLATILE (arg))
6286 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6287 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6294 CASE_FLT_FN (BUILT_IN_FABS):
6295 target = expand_builtin_fabs (exp, target, subtarget);
6300 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6301 target = expand_builtin_copysign (exp, target, subtarget);
6306 /* Just do a normal library call if we were unable to fold
6308 CASE_FLT_FN (BUILT_IN_CABS):
6311 CASE_FLT_FN (BUILT_IN_EXP):
6312 CASE_FLT_FN (BUILT_IN_EXP10):
6313 CASE_FLT_FN (BUILT_IN_POW10):
6314 CASE_FLT_FN (BUILT_IN_EXP2):
6315 CASE_FLT_FN (BUILT_IN_EXPM1):
6316 CASE_FLT_FN (BUILT_IN_LOGB):
6317 CASE_FLT_FN (BUILT_IN_LOG):
6318 CASE_FLT_FN (BUILT_IN_LOG10):
6319 CASE_FLT_FN (BUILT_IN_LOG2):
6320 CASE_FLT_FN (BUILT_IN_LOG1P):
6321 CASE_FLT_FN (BUILT_IN_TAN):
6322 CASE_FLT_FN (BUILT_IN_ASIN):
6323 CASE_FLT_FN (BUILT_IN_ACOS):
6324 CASE_FLT_FN (BUILT_IN_ATAN):
6325 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6326 because of possible accuracy problems. */
6327 if (! flag_unsafe_math_optimizations)
6329 CASE_FLT_FN (BUILT_IN_SQRT):
6330 CASE_FLT_FN (BUILT_IN_FLOOR):
6331 CASE_FLT_FN (BUILT_IN_CEIL):
6332 CASE_FLT_FN (BUILT_IN_TRUNC):
6333 CASE_FLT_FN (BUILT_IN_ROUND):
6334 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6335 CASE_FLT_FN (BUILT_IN_RINT):
6336 target = expand_builtin_mathfn (exp, target, subtarget);
6341 CASE_FLT_FN (BUILT_IN_ILOGB):
6342 if (! flag_unsafe_math_optimizations)
6344 CASE_FLT_FN (BUILT_IN_ISINF):
6345 CASE_FLT_FN (BUILT_IN_FINITE):
6346 case BUILT_IN_ISFINITE:
6347 case BUILT_IN_ISNORMAL:
6348 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6353 CASE_FLT_FN (BUILT_IN_LCEIL):
6354 CASE_FLT_FN (BUILT_IN_LLCEIL):
6355 CASE_FLT_FN (BUILT_IN_LFLOOR):
6356 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6357 target = expand_builtin_int_roundingfn (exp, target);
6362 CASE_FLT_FN (BUILT_IN_LRINT):
6363 CASE_FLT_FN (BUILT_IN_LLRINT):
6364 CASE_FLT_FN (BUILT_IN_LROUND):
6365 CASE_FLT_FN (BUILT_IN_LLROUND):
6366 target = expand_builtin_int_roundingfn_2 (exp, target);
6371 CASE_FLT_FN (BUILT_IN_POW):
6372 target = expand_builtin_pow (exp, target, subtarget);
6377 CASE_FLT_FN (BUILT_IN_POWI):
6378 target = expand_builtin_powi (exp, target, subtarget);
6383 CASE_FLT_FN (BUILT_IN_ATAN2):
6384 CASE_FLT_FN (BUILT_IN_LDEXP):
6385 CASE_FLT_FN (BUILT_IN_SCALB):
6386 CASE_FLT_FN (BUILT_IN_SCALBN):
6387 CASE_FLT_FN (BUILT_IN_SCALBLN):
6388 if (! flag_unsafe_math_optimizations)
6391 CASE_FLT_FN (BUILT_IN_FMOD):
6392 CASE_FLT_FN (BUILT_IN_REMAINDER):
6393 CASE_FLT_FN (BUILT_IN_DREM):
6394 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6399 CASE_FLT_FN (BUILT_IN_CEXPI):
6400 target = expand_builtin_cexpi (exp, target, subtarget);
6401 gcc_assert (target);
6404 CASE_FLT_FN (BUILT_IN_SIN):
6405 CASE_FLT_FN (BUILT_IN_COS):
6406 if (! flag_unsafe_math_optimizations)
6408 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6413 CASE_FLT_FN (BUILT_IN_SINCOS):
6414 if (! flag_unsafe_math_optimizations)
6416 target = expand_builtin_sincos (exp);
6421 case BUILT_IN_APPLY_ARGS:
6422 return expand_builtin_apply_args ();
6424 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6425 FUNCTION with a copy of the parameters described by
6426 ARGUMENTS, and ARGSIZE. It returns a block of memory
6427 allocated on the stack into which is stored all the registers
6428 that might possibly be used for returning the result of a
6429 function. ARGUMENTS is the value returned by
6430 __builtin_apply_args. ARGSIZE is the number of bytes of
6431 arguments that must be copied. ??? How should this value be
6432 computed? We'll also need a safe worst case value for varargs
6434 case BUILT_IN_APPLY:
6435 if (!validate_arglist (exp, POINTER_TYPE,
6436 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6437 && !validate_arglist (exp, REFERENCE_TYPE,
6438 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6444 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6445 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6446 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6448 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6451 /* __builtin_return (RESULT) causes the function to return the
6452 value described by RESULT. RESULT is address of the block of
6453 memory returned by __builtin_apply. */
6454 case BUILT_IN_RETURN:
6455 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6456 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6459 case BUILT_IN_SAVEREGS:
6460 return expand_builtin_saveregs ();
6462 case BUILT_IN_ARGS_INFO:
6463 return expand_builtin_args_info (exp);
6465 case BUILT_IN_VA_ARG_PACK:
6466 /* All valid uses of __builtin_va_arg_pack () are removed during
6468 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6471 case BUILT_IN_VA_ARG_PACK_LEN:
6472 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6474 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6477 /* Return the address of the first anonymous stack arg. */
6478 case BUILT_IN_NEXT_ARG:
6479 if (fold_builtin_next_arg (exp, false))
6481 return expand_builtin_next_arg ();
6483 case BUILT_IN_CLEAR_CACHE:
6484 target = expand_builtin___clear_cache (exp);
6489 case BUILT_IN_CLASSIFY_TYPE:
6490 return expand_builtin_classify_type (exp);
6492 case BUILT_IN_CONSTANT_P:
6495 case BUILT_IN_FRAME_ADDRESS:
6496 case BUILT_IN_RETURN_ADDRESS:
6497 return expand_builtin_frame_address (fndecl, exp);
6499 /* Returns the address of the area where the structure is returned.
6501 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6502 if (call_expr_nargs (exp) != 0
6503 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6504 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6507 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6509 case BUILT_IN_ALLOCA:
6510 target = expand_builtin_alloca (exp, target);
6515 case BUILT_IN_STACK_SAVE:
6516 return expand_stack_save ();
6518 case BUILT_IN_STACK_RESTORE:
6519 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6522 case BUILT_IN_BSWAP32:
6523 case BUILT_IN_BSWAP64:
6524 target = expand_builtin_bswap (exp, target, subtarget);
6530 CASE_INT_FN (BUILT_IN_FFS):
6531 case BUILT_IN_FFSIMAX:
6532 target = expand_builtin_unop (target_mode, exp, target,
6533 subtarget, ffs_optab);
6538 CASE_INT_FN (BUILT_IN_CLZ):
6539 case BUILT_IN_CLZIMAX:
6540 target = expand_builtin_unop (target_mode, exp, target,
6541 subtarget, clz_optab);
6546 CASE_INT_FN (BUILT_IN_CTZ):
6547 case BUILT_IN_CTZIMAX:
6548 target = expand_builtin_unop (target_mode, exp, target,
6549 subtarget, ctz_optab);
6554 CASE_INT_FN (BUILT_IN_POPCOUNT):
6555 case BUILT_IN_POPCOUNTIMAX:
6556 target = expand_builtin_unop (target_mode, exp, target,
6557 subtarget, popcount_optab);
6562 CASE_INT_FN (BUILT_IN_PARITY):
6563 case BUILT_IN_PARITYIMAX:
6564 target = expand_builtin_unop (target_mode, exp, target,
6565 subtarget, parity_optab);
6570 case BUILT_IN_STRLEN:
6571 target = expand_builtin_strlen (exp, target, target_mode);
6576 case BUILT_IN_STRCPY:
6577 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6582 case BUILT_IN_STRNCPY:
6583 target = expand_builtin_strncpy (exp, target, mode);
6588 case BUILT_IN_STPCPY:
6589 target = expand_builtin_stpcpy (exp, target, mode);
6594 case BUILT_IN_STRCAT:
6595 target = expand_builtin_strcat (fndecl, exp, target, mode);
6600 case BUILT_IN_STRNCAT:
6601 target = expand_builtin_strncat (exp, target, mode);
6606 case BUILT_IN_STRSPN:
6607 target = expand_builtin_strspn (exp, target, mode);
6612 case BUILT_IN_STRCSPN:
6613 target = expand_builtin_strcspn (exp, target, mode);
6618 case BUILT_IN_STRSTR:
6619 target = expand_builtin_strstr (exp, target, mode);
6624 case BUILT_IN_STRPBRK:
6625 target = expand_builtin_strpbrk (exp, target, mode);
6630 case BUILT_IN_INDEX:
6631 case BUILT_IN_STRCHR:
6632 target = expand_builtin_strchr (exp, target, mode);
6637 case BUILT_IN_RINDEX:
6638 case BUILT_IN_STRRCHR:
6639 target = expand_builtin_strrchr (exp, target, mode);
6644 case BUILT_IN_MEMCPY:
6645 target = expand_builtin_memcpy (exp, target, mode);
6650 case BUILT_IN_MEMPCPY:
6651 target = expand_builtin_mempcpy (exp, target, mode);
6656 case BUILT_IN_MEMMOVE:
6657 target = expand_builtin_memmove (exp, target, mode, ignore);
6662 case BUILT_IN_BCOPY:
6663 target = expand_builtin_bcopy (exp, ignore);
6668 case BUILT_IN_MEMSET:
6669 target = expand_builtin_memset (exp, target, mode);
6674 case BUILT_IN_BZERO:
6675 target = expand_builtin_bzero (exp);
6680 case BUILT_IN_STRCMP:
6681 target = expand_builtin_strcmp (exp, target, mode);
6686 case BUILT_IN_STRNCMP:
6687 target = expand_builtin_strncmp (exp, target, mode);
6692 case BUILT_IN_MEMCHR:
6693 target = expand_builtin_memchr (exp, target, mode);
6699 case BUILT_IN_MEMCMP:
6700 target = expand_builtin_memcmp (exp, target, mode);
6705 case BUILT_IN_SETJMP:
6706 /* This should have been lowered to the builtins below. */
6709 case BUILT_IN_SETJMP_SETUP:
6710 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6711 and the receiver label. */
6712 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6714 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6715 VOIDmode, EXPAND_NORMAL);
6716 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6717 rtx label_r = label_rtx (label);
6719 /* This is copied from the handling of non-local gotos. */
6720 expand_builtin_setjmp_setup (buf_addr, label_r);
6721 nonlocal_goto_handler_labels
6722 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6723 nonlocal_goto_handler_labels);
6724 /* ??? Do not let expand_label treat us as such since we would
6725 not want to be both on the list of non-local labels and on
6726 the list of forced labels. */
6727 FORCED_LABEL (label) = 0;
6732 case BUILT_IN_SETJMP_DISPATCHER:
6733 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6734 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6736 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6737 rtx label_r = label_rtx (label);
6739 /* Remove the dispatcher label from the list of non-local labels
6740 since the receiver labels have been added to it above. */
6741 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6746 case BUILT_IN_SETJMP_RECEIVER:
6747 /* __builtin_setjmp_receiver is passed the receiver label. */
6748 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6750 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6751 rtx label_r = label_rtx (label);
6753 expand_builtin_setjmp_receiver (label_r);
6758 /* __builtin_longjmp is passed a pointer to an array of five words.
6759 It's similar to the C library longjmp function but works with
6760 __builtin_setjmp above. */
6761 case BUILT_IN_LONGJMP:
6762 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6764 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6765 VOIDmode, EXPAND_NORMAL);
6766 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6768 if (value != const1_rtx)
6770 error ("%<__builtin_longjmp%> second argument must be 1");
6774 expand_builtin_longjmp (buf_addr, value);
6779 case BUILT_IN_NONLOCAL_GOTO:
6780 target = expand_builtin_nonlocal_goto (exp);
6785 /* This updates the setjmp buffer that is its argument with the value
6786 of the current stack pointer. */
6787 case BUILT_IN_UPDATE_SETJMP_BUF:
6788 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6791 = expand_normal (CALL_EXPR_ARG (exp, 0));
6793 expand_builtin_update_setjmp_buf (buf_addr);
6799 expand_builtin_trap ();
6802 case BUILT_IN_PRINTF:
6803 target = expand_builtin_printf (exp, target, mode, false);
6808 case BUILT_IN_PRINTF_UNLOCKED:
6809 target = expand_builtin_printf (exp, target, mode, true);
6814 case BUILT_IN_FPUTS:
6815 target = expand_builtin_fputs (exp, target, false);
6819 case BUILT_IN_FPUTS_UNLOCKED:
6820 target = expand_builtin_fputs (exp, target, true);
6825 case BUILT_IN_FPRINTF:
6826 target = expand_builtin_fprintf (exp, target, mode, false);
6831 case BUILT_IN_FPRINTF_UNLOCKED:
6832 target = expand_builtin_fprintf (exp, target, mode, true);
6837 case BUILT_IN_SPRINTF:
6838 target = expand_builtin_sprintf (exp, target, mode);
6843 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6844 case BUILT_IN_SIGNBITD32:
6845 case BUILT_IN_SIGNBITD64:
6846 case BUILT_IN_SIGNBITD128:
6847 target = expand_builtin_signbit (exp, target);
6852 /* Various hooks for the DWARF 2 __throw routine. */
6853 case BUILT_IN_UNWIND_INIT:
6854 expand_builtin_unwind_init ();
6856 case BUILT_IN_DWARF_CFA:
6857 return virtual_cfa_rtx;
6858 #ifdef DWARF2_UNWIND_INFO
6859 case BUILT_IN_DWARF_SP_COLUMN:
6860 return expand_builtin_dwarf_sp_column ();
6861 case BUILT_IN_INIT_DWARF_REG_SIZES:
6862 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6865 case BUILT_IN_FROB_RETURN_ADDR:
6866 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6867 case BUILT_IN_EXTRACT_RETURN_ADDR:
6868 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6869 case BUILT_IN_EH_RETURN:
6870 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6871 CALL_EXPR_ARG (exp, 1));
6873 #ifdef EH_RETURN_DATA_REGNO
6874 case BUILT_IN_EH_RETURN_DATA_REGNO:
6875 return expand_builtin_eh_return_data_regno (exp);
6877 case BUILT_IN_EXTEND_POINTER:
6878 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6880 case BUILT_IN_VA_START:
6881 return expand_builtin_va_start (exp);
6882 case BUILT_IN_VA_END:
6883 return expand_builtin_va_end (exp);
6884 case BUILT_IN_VA_COPY:
6885 return expand_builtin_va_copy (exp);
6886 case BUILT_IN_EXPECT:
6887 return expand_builtin_expect (exp, target);
6888 case BUILT_IN_PREFETCH:
6889 expand_builtin_prefetch (exp);
6892 case BUILT_IN_PROFILE_FUNC_ENTER:
6893 return expand_builtin_profile_func (false);
6894 case BUILT_IN_PROFILE_FUNC_EXIT:
6895 return expand_builtin_profile_func (true);
6897 case BUILT_IN_INIT_TRAMPOLINE:
6898 return expand_builtin_init_trampoline (exp);
6899 case BUILT_IN_ADJUST_TRAMPOLINE:
6900 return expand_builtin_adjust_trampoline (exp);
6903 case BUILT_IN_EXECL:
6904 case BUILT_IN_EXECV:
6905 case BUILT_IN_EXECLP:
6906 case BUILT_IN_EXECLE:
6907 case BUILT_IN_EXECVP:
6908 case BUILT_IN_EXECVE:
6909 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6914 case BUILT_IN_FETCH_AND_ADD_1:
6915 case BUILT_IN_FETCH_AND_ADD_2:
6916 case BUILT_IN_FETCH_AND_ADD_4:
6917 case BUILT_IN_FETCH_AND_ADD_8:
6918 case BUILT_IN_FETCH_AND_ADD_16:
6919 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6920 target = expand_builtin_sync_operation (mode, exp, PLUS,
6921 false, target, ignore);
6926 case BUILT_IN_FETCH_AND_SUB_1:
6927 case BUILT_IN_FETCH_AND_SUB_2:
6928 case BUILT_IN_FETCH_AND_SUB_4:
6929 case BUILT_IN_FETCH_AND_SUB_8:
6930 case BUILT_IN_FETCH_AND_SUB_16:
6931 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6932 target = expand_builtin_sync_operation (mode, exp, MINUS,
6933 false, target, ignore);
6938 case BUILT_IN_FETCH_AND_OR_1:
6939 case BUILT_IN_FETCH_AND_OR_2:
6940 case BUILT_IN_FETCH_AND_OR_4:
6941 case BUILT_IN_FETCH_AND_OR_8:
6942 case BUILT_IN_FETCH_AND_OR_16:
6943 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6944 target = expand_builtin_sync_operation (mode, exp, IOR,
6945 false, target, ignore);
6950 case BUILT_IN_FETCH_AND_AND_1:
6951 case BUILT_IN_FETCH_AND_AND_2:
6952 case BUILT_IN_FETCH_AND_AND_4:
6953 case BUILT_IN_FETCH_AND_AND_8:
6954 case BUILT_IN_FETCH_AND_AND_16:
6955 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6956 target = expand_builtin_sync_operation (mode, exp, AND,
6957 false, target, ignore);
6962 case BUILT_IN_FETCH_AND_XOR_1:
6963 case BUILT_IN_FETCH_AND_XOR_2:
6964 case BUILT_IN_FETCH_AND_XOR_4:
6965 case BUILT_IN_FETCH_AND_XOR_8:
6966 case BUILT_IN_FETCH_AND_XOR_16:
6967 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6968 target = expand_builtin_sync_operation (mode, exp, XOR,
6969 false, target, ignore);
6974 case BUILT_IN_FETCH_AND_NAND_1:
6975 case BUILT_IN_FETCH_AND_NAND_2:
6976 case BUILT_IN_FETCH_AND_NAND_4:
6977 case BUILT_IN_FETCH_AND_NAND_8:
6978 case BUILT_IN_FETCH_AND_NAND_16:
6979 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6980 target = expand_builtin_sync_operation (mode, exp, NOT,
6981 false, target, ignore);
6986 case BUILT_IN_ADD_AND_FETCH_1:
6987 case BUILT_IN_ADD_AND_FETCH_2:
6988 case BUILT_IN_ADD_AND_FETCH_4:
6989 case BUILT_IN_ADD_AND_FETCH_8:
6990 case BUILT_IN_ADD_AND_FETCH_16:
6991 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6992 target = expand_builtin_sync_operation (mode, exp, PLUS,
6993 true, target, ignore);
6998 case BUILT_IN_SUB_AND_FETCH_1:
6999 case BUILT_IN_SUB_AND_FETCH_2:
7000 case BUILT_IN_SUB_AND_FETCH_4:
7001 case BUILT_IN_SUB_AND_FETCH_8:
7002 case BUILT_IN_SUB_AND_FETCH_16:
7003 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7004 target = expand_builtin_sync_operation (mode, exp, MINUS,
7005 true, target, ignore);
7010 case BUILT_IN_OR_AND_FETCH_1:
7011 case BUILT_IN_OR_AND_FETCH_2:
7012 case BUILT_IN_OR_AND_FETCH_4:
7013 case BUILT_IN_OR_AND_FETCH_8:
7014 case BUILT_IN_OR_AND_FETCH_16:
7015 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7016 target = expand_builtin_sync_operation (mode, exp, IOR,
7017 true, target, ignore);
7022 case BUILT_IN_AND_AND_FETCH_1:
7023 case BUILT_IN_AND_AND_FETCH_2:
7024 case BUILT_IN_AND_AND_FETCH_4:
7025 case BUILT_IN_AND_AND_FETCH_8:
7026 case BUILT_IN_AND_AND_FETCH_16:
7027 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7028 target = expand_builtin_sync_operation (mode, exp, AND,
7029 true, target, ignore);
7034 case BUILT_IN_XOR_AND_FETCH_1:
7035 case BUILT_IN_XOR_AND_FETCH_2:
7036 case BUILT_IN_XOR_AND_FETCH_4:
7037 case BUILT_IN_XOR_AND_FETCH_8:
7038 case BUILT_IN_XOR_AND_FETCH_16:
7039 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7040 target = expand_builtin_sync_operation (mode, exp, XOR,
7041 true, target, ignore);
7046 case BUILT_IN_NAND_AND_FETCH_1:
7047 case BUILT_IN_NAND_AND_FETCH_2:
7048 case BUILT_IN_NAND_AND_FETCH_4:
7049 case BUILT_IN_NAND_AND_FETCH_8:
7050 case BUILT_IN_NAND_AND_FETCH_16:
7051 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7052 target = expand_builtin_sync_operation (mode, exp, NOT,
7053 true, target, ignore);
7058 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7059 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7060 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7061 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7062 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7063 if (mode == VOIDmode)
7064 mode = TYPE_MODE (boolean_type_node);
7065 if (!target || !register_operand (target, mode))
7066 target = gen_reg_rtx (mode);
7068 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7069 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7074 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7075 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7076 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7077 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7078 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7079 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7080 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7085 case BUILT_IN_LOCK_TEST_AND_SET_1:
7086 case BUILT_IN_LOCK_TEST_AND_SET_2:
7087 case BUILT_IN_LOCK_TEST_AND_SET_4:
7088 case BUILT_IN_LOCK_TEST_AND_SET_8:
7089 case BUILT_IN_LOCK_TEST_AND_SET_16:
7090 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7091 target = expand_builtin_lock_test_and_set (mode, exp, target);
7096 case BUILT_IN_LOCK_RELEASE_1:
7097 case BUILT_IN_LOCK_RELEASE_2:
7098 case BUILT_IN_LOCK_RELEASE_4:
7099 case BUILT_IN_LOCK_RELEASE_8:
7100 case BUILT_IN_LOCK_RELEASE_16:
7101 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7102 expand_builtin_lock_release (mode, exp);
7105 case BUILT_IN_SYNCHRONIZE:
7106 expand_builtin_synchronize ();
7109 case BUILT_IN_OBJECT_SIZE:
7110 return expand_builtin_object_size (exp);
7112 case BUILT_IN_MEMCPY_CHK:
7113 case BUILT_IN_MEMPCPY_CHK:
7114 case BUILT_IN_MEMMOVE_CHK:
7115 case BUILT_IN_MEMSET_CHK:
7116 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7121 case BUILT_IN_STRCPY_CHK:
7122 case BUILT_IN_STPCPY_CHK:
7123 case BUILT_IN_STRNCPY_CHK:
7124 case BUILT_IN_STRCAT_CHK:
7125 case BUILT_IN_STRNCAT_CHK:
7126 case BUILT_IN_SNPRINTF_CHK:
7127 case BUILT_IN_VSNPRINTF_CHK:
7128 maybe_emit_chk_warning (exp, fcode);
7131 case BUILT_IN_SPRINTF_CHK:
7132 case BUILT_IN_VSPRINTF_CHK:
7133 maybe_emit_sprintf_chk_warning (exp, fcode);
7137 maybe_emit_free_warning (exp);
7140 default: /* just do library call, if unknown builtin */
7144 /* The switch statement above can drop through to cause the function
7145 to be called normally. */
7146 return expand_call (exp, target, ignore);
7149 /* Determine whether a tree node represents a call to a built-in
7150 function. If the tree T is a call to a built-in function with
7151 the right number of arguments of the appropriate types, return
7152 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7153 Otherwise the return value is END_BUILTINS. */
7155 enum built_in_function
7156 builtin_mathfn_code (const_tree t)
7158 const_tree fndecl, arg, parmlist;
7159 const_tree argtype, parmtype;
7160 const_call_expr_arg_iterator iter;
7162 if (TREE_CODE (t) != CALL_EXPR
7163 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7164 return END_BUILTINS;
7166 fndecl = get_callee_fndecl (t);
7167 if (fndecl == NULL_TREE
7168 || TREE_CODE (fndecl) != FUNCTION_DECL
7169 || ! DECL_BUILT_IN (fndecl)
7170 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7171 return END_BUILTINS;
7173 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7174 init_const_call_expr_arg_iterator (t, &iter);
7175 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7177 /* If a function doesn't take a variable number of arguments,
7178 the last element in the list will have type `void'. */
7179 parmtype = TREE_VALUE (parmlist);
7180 if (VOID_TYPE_P (parmtype))
7182 if (more_const_call_expr_args_p (&iter))
7183 return END_BUILTINS;
7184 return DECL_FUNCTION_CODE (fndecl);
7187 if (! more_const_call_expr_args_p (&iter))
7188 return END_BUILTINS;
7190 arg = next_const_call_expr_arg (&iter);
7191 argtype = TREE_TYPE (arg);
7193 if (SCALAR_FLOAT_TYPE_P (parmtype))
7195 if (! SCALAR_FLOAT_TYPE_P (argtype))
7196 return END_BUILTINS;
7198 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7200 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7201 return END_BUILTINS;
7203 else if (POINTER_TYPE_P (parmtype))
7205 if (! POINTER_TYPE_P (argtype))
7206 return END_BUILTINS;
7208 else if (INTEGRAL_TYPE_P (parmtype))
7210 if (! INTEGRAL_TYPE_P (argtype))
7211 return END_BUILTINS;
7214 return END_BUILTINS;
7217 /* Variable-length argument list. */
7218 return DECL_FUNCTION_CODE (fndecl);
7221 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7222 evaluate to a constant. */
7225 fold_builtin_constant_p (tree arg)
7227 /* We return 1 for a numeric type that's known to be a constant
7228 value at compile-time or for an aggregate type that's a
7229 literal constant. */
7232 /* If we know this is a constant, emit the constant of one. */
7233 if (CONSTANT_CLASS_P (arg)
7234 || (TREE_CODE (arg) == CONSTRUCTOR
7235 && TREE_CONSTANT (arg)))
7236 return integer_one_node;
7237 if (TREE_CODE (arg) == ADDR_EXPR)
7239 tree op = TREE_OPERAND (arg, 0);
7240 if (TREE_CODE (op) == STRING_CST
7241 || (TREE_CODE (op) == ARRAY_REF
7242 && integer_zerop (TREE_OPERAND (op, 1))
7243 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7244 return integer_one_node;
7247 /* If this expression has side effects, show we don't know it to be a
7248 constant. Likewise if it's a pointer or aggregate type since in
7249 those case we only want literals, since those are only optimized
7250 when generating RTL, not later.
7251 And finally, if we are compiling an initializer, not code, we
7252 need to return a definite result now; there's not going to be any
7253 more optimization done. */
7254 if (TREE_SIDE_EFFECTS (arg)
7255 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7256 || POINTER_TYPE_P (TREE_TYPE (arg))
7258 || folding_initializer)
7259 return integer_zero_node;
7264 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7265 return it as a truthvalue. */
7268 build_builtin_expect_predicate (tree pred, tree expected)
7270 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7272 fn = built_in_decls[BUILT_IN_EXPECT];
7273 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7274 ret_type = TREE_TYPE (TREE_TYPE (fn));
7275 pred_type = TREE_VALUE (arg_types);
7276 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7278 pred = fold_convert (pred_type, pred);
7279 expected = fold_convert (expected_type, expected);
7280 call_expr = build_call_expr (fn, 2, pred, expected);
7282 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7283 build_int_cst (ret_type, 0));
7286 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7287 NULL_TREE if no simplification is possible. */
7290 fold_builtin_expect (tree arg0, tree arg1)
7293 enum tree_code code;
7295 /* If this is a builtin_expect within a builtin_expect keep the
7296 inner one. See through a comparison against a constant. It
7297 might have been added to create a thruthvalue. */
7299 if (COMPARISON_CLASS_P (inner)
7300 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7301 inner = TREE_OPERAND (inner, 0);
7303 if (TREE_CODE (inner) == CALL_EXPR
7304 && (fndecl = get_callee_fndecl (inner))
7305 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7306 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7309 /* Distribute the expected value over short-circuiting operators.
7310 See through the cast from truthvalue_type_node to long. */
7312 while (TREE_CODE (inner) == NOP_EXPR
7313 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7314 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7315 inner = TREE_OPERAND (inner, 0);
7317 code = TREE_CODE (inner);
7318 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7320 tree op0 = TREE_OPERAND (inner, 0);
7321 tree op1 = TREE_OPERAND (inner, 1);
7323 op0 = build_builtin_expect_predicate (op0, arg1);
7324 op1 = build_builtin_expect_predicate (op1, arg1);
7325 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7327 return fold_convert (TREE_TYPE (arg0), inner);
7330 /* If the argument isn't invariant then there's nothing else we can do. */
7331 if (!TREE_CONSTANT (arg0))
7334 /* If we expect that a comparison against the argument will fold to
7335 a constant return the constant. In practice, this means a true
7336 constant or the address of a non-weak symbol. */
7339 if (TREE_CODE (inner) == ADDR_EXPR)
7343 inner = TREE_OPERAND (inner, 0);
7345 while (TREE_CODE (inner) == COMPONENT_REF
7346 || TREE_CODE (inner) == ARRAY_REF);
7347 if ((TREE_CODE (inner) == VAR_DECL
7348 || TREE_CODE (inner) == FUNCTION_DECL)
7349 && DECL_WEAK (inner))
7353 /* Otherwise, ARG0 already has the proper type for the return value. */
7357 /* Fold a call to __builtin_classify_type with argument ARG. */
7360 fold_builtin_classify_type (tree arg)
7363 return build_int_cst (NULL_TREE, no_type_class);
7365 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7368 /* Fold a call to __builtin_strlen with argument ARG. */
7371 fold_builtin_strlen (tree type, tree arg)
7373 if (!validate_arg (arg, POINTER_TYPE))
7377 tree len = c_strlen (arg, 0);
7380 return fold_convert (type, len);
7386 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7389 fold_builtin_inf (tree type, int warn)
7391 REAL_VALUE_TYPE real;
7393 /* __builtin_inff is intended to be usable to define INFINITY on all
7394 targets. If an infinity is not available, INFINITY expands "to a
7395 positive constant of type float that overflows at translation
7396 time", footnote "In this case, using INFINITY will violate the
7397 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7398 Thus we pedwarn to ensure this constraint violation is
7400 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7401 pedwarn (input_location, 0, "target format does not support infinity");
7404 return build_real (type, real);
7407 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7410 fold_builtin_nan (tree arg, tree type, int quiet)
7412 REAL_VALUE_TYPE real;
7415 if (!validate_arg (arg, POINTER_TYPE))
7417 str = c_getstr (arg);
7421 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7424 return build_real (type, real);
7427 /* Return true if the floating point expression T has an integer value.
7428 We also allow +Inf, -Inf and NaN to be considered integer values. */
7431 integer_valued_real_p (tree t)
7433 switch (TREE_CODE (t))
7440 return integer_valued_real_p (TREE_OPERAND (t, 0));
7445 return integer_valued_real_p (TREE_OPERAND (t, 1));
7452 return integer_valued_real_p (TREE_OPERAND (t, 0))
7453 && integer_valued_real_p (TREE_OPERAND (t, 1));
7456 return integer_valued_real_p (TREE_OPERAND (t, 1))
7457 && integer_valued_real_p (TREE_OPERAND (t, 2));
7460 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7464 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7465 if (TREE_CODE (type) == INTEGER_TYPE)
7467 if (TREE_CODE (type) == REAL_TYPE)
7468 return integer_valued_real_p (TREE_OPERAND (t, 0));
7473 switch (builtin_mathfn_code (t))
7475 CASE_FLT_FN (BUILT_IN_CEIL):
7476 CASE_FLT_FN (BUILT_IN_FLOOR):
7477 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7478 CASE_FLT_FN (BUILT_IN_RINT):
7479 CASE_FLT_FN (BUILT_IN_ROUND):
7480 CASE_FLT_FN (BUILT_IN_TRUNC):
7483 CASE_FLT_FN (BUILT_IN_FMIN):
7484 CASE_FLT_FN (BUILT_IN_FMAX):
7485 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7486 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7499 /* FNDECL is assumed to be a builtin where truncation can be propagated
7500 across (for instance floor((double)f) == (double)floorf (f).
7501 Do the transformation for a call with argument ARG. */
7504 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7506 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7508 if (!validate_arg (arg, REAL_TYPE))
7511 /* Integer rounding functions are idempotent. */
7512 if (fcode == builtin_mathfn_code (arg))
7515 /* If argument is already integer valued, and we don't need to worry
7516 about setting errno, there's no need to perform rounding. */
7517 if (! flag_errno_math && integer_valued_real_p (arg))
7522 tree arg0 = strip_float_extensions (arg);
7523 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7524 tree newtype = TREE_TYPE (arg0);
7527 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7528 && (decl = mathfn_built_in (newtype, fcode)))
7529 return fold_convert (ftype,
7530 build_call_expr (decl, 1,
7531 fold_convert (newtype, arg0)));
7536 /* FNDECL is assumed to be builtin which can narrow the FP type of
7537 the argument, for instance lround((double)f) -> lroundf (f).
7538 Do the transformation for a call with argument ARG. */
7541 fold_fixed_mathfn (tree fndecl, tree arg)
7543 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7545 if (!validate_arg (arg, REAL_TYPE))
7548 /* If argument is already integer valued, and we don't need to worry
7549 about setting errno, there's no need to perform rounding. */
7550 if (! flag_errno_math && integer_valued_real_p (arg))
7551 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7555 tree ftype = TREE_TYPE (arg);
7556 tree arg0 = strip_float_extensions (arg);
7557 tree newtype = TREE_TYPE (arg0);
7560 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7561 && (decl = mathfn_built_in (newtype, fcode)))
7562 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7565 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7566 sizeof (long long) == sizeof (long). */
7567 if (TYPE_PRECISION (long_long_integer_type_node)
7568 == TYPE_PRECISION (long_integer_type_node))
7570 tree newfn = NULL_TREE;
7573 CASE_FLT_FN (BUILT_IN_LLCEIL):
7574 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7577 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7578 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7581 CASE_FLT_FN (BUILT_IN_LLROUND):
7582 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7585 CASE_FLT_FN (BUILT_IN_LLRINT):
7586 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7595 tree newcall = build_call_expr(newfn, 1, arg);
7596 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7603 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7604 return type. Return NULL_TREE if no simplification can be made. */
7607 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7611 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7612 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7615 /* Calculate the result when the argument is a constant. */
7616 if (TREE_CODE (arg) == COMPLEX_CST
7617 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7621 if (TREE_CODE (arg) == COMPLEX_EXPR)
7623 tree real = TREE_OPERAND (arg, 0);
7624 tree imag = TREE_OPERAND (arg, 1);
7626 /* If either part is zero, cabs is fabs of the other. */
7627 if (real_zerop (real))
7628 return fold_build1 (ABS_EXPR, type, imag);
7629 if (real_zerop (imag))
7630 return fold_build1 (ABS_EXPR, type, real);
7632 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7633 if (flag_unsafe_math_optimizations
7634 && operand_equal_p (real, imag, OEP_PURE_SAME))
7636 const REAL_VALUE_TYPE sqrt2_trunc
7637 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7639 return fold_build2 (MULT_EXPR, type,
7640 fold_build1 (ABS_EXPR, type, real),
7641 build_real (type, sqrt2_trunc));
7645 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7646 if (TREE_CODE (arg) == NEGATE_EXPR
7647 || TREE_CODE (arg) == CONJ_EXPR)
7648 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7650 /* Don't do this when optimizing for size. */
7651 if (flag_unsafe_math_optimizations
7652 && optimize && optimize_function_for_speed_p (cfun))
7654 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7656 if (sqrtfn != NULL_TREE)
7658 tree rpart, ipart, result;
7660 arg = builtin_save_expr (arg);
7662 rpart = fold_build1 (REALPART_EXPR, type, arg);
7663 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7665 rpart = builtin_save_expr (rpart);
7666 ipart = builtin_save_expr (ipart);
7668 result = fold_build2 (PLUS_EXPR, type,
7669 fold_build2 (MULT_EXPR, type,
7671 fold_build2 (MULT_EXPR, type,
7674 return build_call_expr (sqrtfn, 1, result);
7681 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7682 Return NULL_TREE if no simplification can be made. */
7685 fold_builtin_sqrt (tree arg, tree type)
7688 enum built_in_function fcode;
7691 if (!validate_arg (arg, REAL_TYPE))
7694 /* Calculate the result when the argument is a constant. */
7695 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7698 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7699 fcode = builtin_mathfn_code (arg);
7700 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7702 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7703 arg = fold_build2 (MULT_EXPR, type,
7704 CALL_EXPR_ARG (arg, 0),
7705 build_real (type, dconsthalf));
7706 return build_call_expr (expfn, 1, arg);
7709 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7710 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7712 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7716 tree arg0 = CALL_EXPR_ARG (arg, 0);
7718 /* The inner root was either sqrt or cbrt. */
7719 /* This was a conditional expression but it triggered a bug
7721 REAL_VALUE_TYPE dconstroot;
7722 if (BUILTIN_SQRT_P (fcode))
7723 dconstroot = dconsthalf;
7725 dconstroot = dconst_third ();
7727 /* Adjust for the outer root. */
7728 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7729 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7730 tree_root = build_real (type, dconstroot);
7731 return build_call_expr (powfn, 2, arg0, tree_root);
7735 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7736 if (flag_unsafe_math_optimizations
7737 && (fcode == BUILT_IN_POW
7738 || fcode == BUILT_IN_POWF
7739 || fcode == BUILT_IN_POWL))
7741 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7742 tree arg0 = CALL_EXPR_ARG (arg, 0);
7743 tree arg1 = CALL_EXPR_ARG (arg, 1);
7745 if (!tree_expr_nonnegative_p (arg0))
7746 arg0 = build1 (ABS_EXPR, type, arg0);
7747 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7748 build_real (type, dconsthalf));
7749 return build_call_expr (powfn, 2, arg0, narg1);
7755 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7756 Return NULL_TREE if no simplification can be made. */
7759 fold_builtin_cbrt (tree arg, tree type)
7761 const enum built_in_function fcode = builtin_mathfn_code (arg);
7764 if (!validate_arg (arg, REAL_TYPE))
7767 /* Calculate the result when the argument is a constant. */
7768 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7771 if (flag_unsafe_math_optimizations)
7773 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7774 if (BUILTIN_EXPONENT_P (fcode))
7776 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7777 const REAL_VALUE_TYPE third_trunc =
7778 real_value_truncate (TYPE_MODE (type), dconst_third ());
7779 arg = fold_build2 (MULT_EXPR, type,
7780 CALL_EXPR_ARG (arg, 0),
7781 build_real (type, third_trunc));
7782 return build_call_expr (expfn, 1, arg);
7785 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7786 if (BUILTIN_SQRT_P (fcode))
7788 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7792 tree arg0 = CALL_EXPR_ARG (arg, 0);
7794 REAL_VALUE_TYPE dconstroot = dconst_third ();
7796 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7797 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7798 tree_root = build_real (type, dconstroot);
7799 return build_call_expr (powfn, 2, arg0, tree_root);
7803 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7804 if (BUILTIN_CBRT_P (fcode))
7806 tree arg0 = CALL_EXPR_ARG (arg, 0);
7807 if (tree_expr_nonnegative_p (arg0))
7809 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7814 REAL_VALUE_TYPE dconstroot;
7816 real_arithmetic (&dconstroot, MULT_EXPR,
7817 dconst_third_ptr (), dconst_third_ptr ());
7818 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7819 tree_root = build_real (type, dconstroot);
7820 return build_call_expr (powfn, 2, arg0, tree_root);
7825 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7826 if (fcode == BUILT_IN_POW
7827 || fcode == BUILT_IN_POWF
7828 || fcode == BUILT_IN_POWL)
7830 tree arg00 = CALL_EXPR_ARG (arg, 0);
7831 tree arg01 = CALL_EXPR_ARG (arg, 1);
7832 if (tree_expr_nonnegative_p (arg00))
7834 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7835 const REAL_VALUE_TYPE dconstroot
7836 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7837 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7838 build_real (type, dconstroot));
7839 return build_call_expr (powfn, 2, arg00, narg01);
7846 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7847 TYPE is the type of the return value. Return NULL_TREE if no
7848 simplification can be made. */
7851 fold_builtin_cos (tree arg, tree type, tree fndecl)
7855 if (!validate_arg (arg, REAL_TYPE))
7858 /* Calculate the result when the argument is a constant. */
7859 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7862 /* Optimize cos(-x) into cos (x). */
7863 if ((narg = fold_strip_sign_ops (arg)))
7864 return build_call_expr (fndecl, 1, narg);
7869 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7870 Return NULL_TREE if no simplification can be made. */
7873 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7875 if (validate_arg (arg, REAL_TYPE))
7879 /* Calculate the result when the argument is a constant. */
7880 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7883 /* Optimize cosh(-x) into cosh (x). */
7884 if ((narg = fold_strip_sign_ops (arg)))
7885 return build_call_expr (fndecl, 1, narg);
7891 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7892 Return NULL_TREE if no simplification can be made. */
7895 fold_builtin_tan (tree arg, tree type)
7897 enum built_in_function fcode;
7900 if (!validate_arg (arg, REAL_TYPE))
7903 /* Calculate the result when the argument is a constant. */
7904 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7907 /* Optimize tan(atan(x)) = x. */
7908 fcode = builtin_mathfn_code (arg);
7909 if (flag_unsafe_math_optimizations
7910 && (fcode == BUILT_IN_ATAN
7911 || fcode == BUILT_IN_ATANF
7912 || fcode == BUILT_IN_ATANL))
7913 return CALL_EXPR_ARG (arg, 0);
7918 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7919 NULL_TREE if no simplification can be made. */
7922 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7927 if (!validate_arg (arg0, REAL_TYPE)
7928 || !validate_arg (arg1, POINTER_TYPE)
7929 || !validate_arg (arg2, POINTER_TYPE))
7932 type = TREE_TYPE (arg0);
7934 /* Calculate the result when the argument is a constant. */
7935 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7938 /* Canonicalize sincos to cexpi. */
7939 if (!TARGET_C99_FUNCTIONS)
7941 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7945 call = build_call_expr (fn, 1, arg0);
7946 call = builtin_save_expr (call);
7948 return build2 (COMPOUND_EXPR, type,
7949 build2 (MODIFY_EXPR, void_type_node,
7950 build_fold_indirect_ref (arg1),
7951 build1 (IMAGPART_EXPR, type, call)),
7952 build2 (MODIFY_EXPR, void_type_node,
7953 build_fold_indirect_ref (arg2),
7954 build1 (REALPART_EXPR, type, call)));
7957 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7958 NULL_TREE if no simplification can be made. */
7961 fold_builtin_cexp (tree arg0, tree type)
7964 tree realp, imagp, ifn;
7966 if (!validate_arg (arg0, COMPLEX_TYPE))
7969 rtype = TREE_TYPE (TREE_TYPE (arg0));
7971 /* In case we can figure out the real part of arg0 and it is constant zero
7973 if (!TARGET_C99_FUNCTIONS)
7975 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7979 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7980 && real_zerop (realp))
7982 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7983 return build_call_expr (ifn, 1, narg);
7986 /* In case we can easily decompose real and imaginary parts split cexp
7987 to exp (r) * cexpi (i). */
7988 if (flag_unsafe_math_optimizations
7991 tree rfn, rcall, icall;
7993 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7997 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
8001 icall = build_call_expr (ifn, 1, imagp);
8002 icall = builtin_save_expr (icall);
8003 rcall = build_call_expr (rfn, 1, realp);
8004 rcall = builtin_save_expr (rcall);
8005 return fold_build2 (COMPLEX_EXPR, type,
8006 fold_build2 (MULT_EXPR, rtype,
8008 fold_build1 (REALPART_EXPR, rtype, icall)),
8009 fold_build2 (MULT_EXPR, rtype,
8011 fold_build1 (IMAGPART_EXPR, rtype, icall)));
8017 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8018 Return NULL_TREE if no simplification can be made. */
8021 fold_builtin_trunc (tree fndecl, tree arg)
8023 if (!validate_arg (arg, REAL_TYPE))
8026 /* Optimize trunc of constant value. */
8027 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8029 REAL_VALUE_TYPE r, x;
8030 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8032 x = TREE_REAL_CST (arg);
8033 real_trunc (&r, TYPE_MODE (type), &x);
8034 return build_real (type, r);
8037 return fold_trunc_transparent_mathfn (fndecl, arg);
8040 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8041 Return NULL_TREE if no simplification can be made. */
8044 fold_builtin_floor (tree fndecl, tree arg)
8046 if (!validate_arg (arg, REAL_TYPE))
8049 /* Optimize floor of constant value. */
8050 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8054 x = TREE_REAL_CST (arg);
8055 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8057 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8060 real_floor (&r, TYPE_MODE (type), &x);
8061 return build_real (type, r);
8065 /* Fold floor (x) where x is nonnegative to trunc (x). */
8066 if (tree_expr_nonnegative_p (arg))
8068 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8070 return build_call_expr (truncfn, 1, arg);
8073 return fold_trunc_transparent_mathfn (fndecl, arg);
8076 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8077 Return NULL_TREE if no simplification can be made. */
8080 fold_builtin_ceil (tree fndecl, tree arg)
8082 if (!validate_arg (arg, REAL_TYPE))
8085 /* Optimize ceil of constant value. */
8086 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8090 x = TREE_REAL_CST (arg);
8091 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8093 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8096 real_ceil (&r, TYPE_MODE (type), &x);
8097 return build_real (type, r);
8101 return fold_trunc_transparent_mathfn (fndecl, arg);
8104 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8105 Return NULL_TREE if no simplification can be made. */
8108 fold_builtin_round (tree fndecl, tree arg)
8110 if (!validate_arg (arg, REAL_TYPE))
8113 /* Optimize round of constant value. */
8114 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8118 x = TREE_REAL_CST (arg);
8119 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8121 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8124 real_round (&r, TYPE_MODE (type), &x);
8125 return build_real (type, r);
8129 return fold_trunc_transparent_mathfn (fndecl, arg);
8132 /* Fold function call to builtin lround, lroundf or lroundl (or the
8133 corresponding long long versions) and other rounding functions. ARG
8134 is the argument to the call. Return NULL_TREE if no simplification
8138 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8140 if (!validate_arg (arg, REAL_TYPE))
8143 /* Optimize lround of constant value. */
8144 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8146 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8148 if (real_isfinite (&x))
8150 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8151 tree ftype = TREE_TYPE (arg);
8152 unsigned HOST_WIDE_INT lo2;
8153 HOST_WIDE_INT hi, lo;
8156 switch (DECL_FUNCTION_CODE (fndecl))
8158 CASE_FLT_FN (BUILT_IN_LFLOOR):
8159 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8160 real_floor (&r, TYPE_MODE (ftype), &x);
8163 CASE_FLT_FN (BUILT_IN_LCEIL):
8164 CASE_FLT_FN (BUILT_IN_LLCEIL):
8165 real_ceil (&r, TYPE_MODE (ftype), &x);
8168 CASE_FLT_FN (BUILT_IN_LROUND):
8169 CASE_FLT_FN (BUILT_IN_LLROUND):
8170 real_round (&r, TYPE_MODE (ftype), &x);
8177 REAL_VALUE_TO_INT (&lo, &hi, r);
8178 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8179 return build_int_cst_wide (itype, lo2, hi);
8183 switch (DECL_FUNCTION_CODE (fndecl))
8185 CASE_FLT_FN (BUILT_IN_LFLOOR):
8186 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8187 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8188 if (tree_expr_nonnegative_p (arg))
8189 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8195 return fold_fixed_mathfn (fndecl, arg);
8198 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8199 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8200 the argument to the call. Return NULL_TREE if no simplification can
8204 fold_builtin_bitop (tree fndecl, tree arg)
8206 if (!validate_arg (arg, INTEGER_TYPE))
8209 /* Optimize for constant argument. */
8210 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8212 HOST_WIDE_INT hi, width, result;
8213 unsigned HOST_WIDE_INT lo;
8216 type = TREE_TYPE (arg);
8217 width = TYPE_PRECISION (type);
8218 lo = TREE_INT_CST_LOW (arg);
8220 /* Clear all the bits that are beyond the type's precision. */
8221 if (width > HOST_BITS_PER_WIDE_INT)
8223 hi = TREE_INT_CST_HIGH (arg);
8224 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8225 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8230 if (width < HOST_BITS_PER_WIDE_INT)
8231 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8234 switch (DECL_FUNCTION_CODE (fndecl))
8236 CASE_INT_FN (BUILT_IN_FFS):
8238 result = exact_log2 (lo & -lo) + 1;
8240 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8245 CASE_INT_FN (BUILT_IN_CLZ):
8247 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8249 result = width - floor_log2 (lo) - 1;
8250 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8254 CASE_INT_FN (BUILT_IN_CTZ):
8256 result = exact_log2 (lo & -lo);
8258 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8259 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8263 CASE_INT_FN (BUILT_IN_POPCOUNT):
8266 result++, lo &= lo - 1;
8268 result++, hi &= hi - 1;
8271 CASE_INT_FN (BUILT_IN_PARITY):
8274 result++, lo &= lo - 1;
8276 result++, hi &= hi - 1;
8284 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8290 /* Fold function call to builtin_bswap and the long and long long
8291 variants. Return NULL_TREE if no simplification can be made. */
8293 fold_builtin_bswap (tree fndecl, tree arg)
8295 if (! validate_arg (arg, INTEGER_TYPE))
8298 /* Optimize constant value. */
8299 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8301 HOST_WIDE_INT hi, width, r_hi = 0;
8302 unsigned HOST_WIDE_INT lo, r_lo = 0;
8305 type = TREE_TYPE (arg);
8306 width = TYPE_PRECISION (type);
8307 lo = TREE_INT_CST_LOW (arg);
8308 hi = TREE_INT_CST_HIGH (arg);
8310 switch (DECL_FUNCTION_CODE (fndecl))
8312 case BUILT_IN_BSWAP32:
8313 case BUILT_IN_BSWAP64:
8317 for (s = 0; s < width; s += 8)
8319 int d = width - s - 8;
8320 unsigned HOST_WIDE_INT byte;
8322 if (s < HOST_BITS_PER_WIDE_INT)
8323 byte = (lo >> s) & 0xff;
8325 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8327 if (d < HOST_BITS_PER_WIDE_INT)
8330 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8340 if (width < HOST_BITS_PER_WIDE_INT)
8341 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8343 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8349 /* Return true if EXPR is the real constant contained in VALUE. */
8352 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8356 return ((TREE_CODE (expr) == REAL_CST
8357 && !TREE_OVERFLOW (expr)
8358 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8359 || (TREE_CODE (expr) == COMPLEX_CST
8360 && real_dconstp (TREE_REALPART (expr), value)
8361 && real_zerop (TREE_IMAGPART (expr))));
8364 /* A subroutine of fold_builtin to fold the various logarithmic
8365 functions. Return NULL_TREE if no simplification can me made.
8366 FUNC is the corresponding MPFR logarithm function. */
8369 fold_builtin_logarithm (tree fndecl, tree arg,
8370 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8372 if (validate_arg (arg, REAL_TYPE))
8374 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8376 const enum built_in_function fcode = builtin_mathfn_code (arg);
8378 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8379 instead we'll look for 'e' truncated to MODE. So only do
8380 this if flag_unsafe_math_optimizations is set. */
8381 if (flag_unsafe_math_optimizations && func == mpfr_log)
8383 const REAL_VALUE_TYPE e_truncated =
8384 real_value_truncate (TYPE_MODE (type), dconst_e ());
8385 if (real_dconstp (arg, &e_truncated))
8386 return build_real (type, dconst1);
8389 /* Calculate the result when the argument is a constant. */
8390 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8393 /* Special case, optimize logN(expN(x)) = x. */
8394 if (flag_unsafe_math_optimizations
8395 && ((func == mpfr_log
8396 && (fcode == BUILT_IN_EXP
8397 || fcode == BUILT_IN_EXPF
8398 || fcode == BUILT_IN_EXPL))
8399 || (func == mpfr_log2
8400 && (fcode == BUILT_IN_EXP2
8401 || fcode == BUILT_IN_EXP2F
8402 || fcode == BUILT_IN_EXP2L))
8403 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8404 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8406 /* Optimize logN(func()) for various exponential functions. We
8407 want to determine the value "x" and the power "exponent" in
8408 order to transform logN(x**exponent) into exponent*logN(x). */
8409 if (flag_unsafe_math_optimizations)
8411 tree exponent = 0, x = 0;
8415 CASE_FLT_FN (BUILT_IN_EXP):
8416 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8417 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8419 exponent = CALL_EXPR_ARG (arg, 0);
8421 CASE_FLT_FN (BUILT_IN_EXP2):
8422 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8423 x = build_real (type, dconst2);
8424 exponent = CALL_EXPR_ARG (arg, 0);
8426 CASE_FLT_FN (BUILT_IN_EXP10):
8427 CASE_FLT_FN (BUILT_IN_POW10):
8428 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8430 REAL_VALUE_TYPE dconst10;
8431 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8432 x = build_real (type, dconst10);
8434 exponent = CALL_EXPR_ARG (arg, 0);
8436 CASE_FLT_FN (BUILT_IN_SQRT):
8437 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8438 x = CALL_EXPR_ARG (arg, 0);
8439 exponent = build_real (type, dconsthalf);
8441 CASE_FLT_FN (BUILT_IN_CBRT):
8442 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8443 x = CALL_EXPR_ARG (arg, 0);
8444 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8447 CASE_FLT_FN (BUILT_IN_POW):
8448 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8449 x = CALL_EXPR_ARG (arg, 0);
8450 exponent = CALL_EXPR_ARG (arg, 1);
8456 /* Now perform the optimization. */
8459 tree logfn = build_call_expr (fndecl, 1, x);
8460 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8468 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8469 NULL_TREE if no simplification can be made. */
8472 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8474 tree res, narg0, narg1;
8476 if (!validate_arg (arg0, REAL_TYPE)
8477 || !validate_arg (arg1, REAL_TYPE))
8480 /* Calculate the result when the argument is a constant. */
8481 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8484 /* If either argument to hypot has a negate or abs, strip that off.
8485 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8486 narg0 = fold_strip_sign_ops (arg0);
8487 narg1 = fold_strip_sign_ops (arg1);
8490 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8491 narg1 ? narg1 : arg1);
8494 /* If either argument is zero, hypot is fabs of the other. */
8495 if (real_zerop (arg0))
8496 return fold_build1 (ABS_EXPR, type, arg1);
8497 else if (real_zerop (arg1))
8498 return fold_build1 (ABS_EXPR, type, arg0);
8500 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8501 if (flag_unsafe_math_optimizations
8502 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8504 const REAL_VALUE_TYPE sqrt2_trunc
8505 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8506 return fold_build2 (MULT_EXPR, type,
8507 fold_build1 (ABS_EXPR, type, arg0),
8508 build_real (type, sqrt2_trunc));
8515 /* Fold a builtin function call to pow, powf, or powl. Return
8516 NULL_TREE if no simplification can be made. */
8518 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8522 if (!validate_arg (arg0, REAL_TYPE)
8523 || !validate_arg (arg1, REAL_TYPE))
8526 /* Calculate the result when the argument is a constant. */
8527 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8530 /* Optimize pow(1.0,y) = 1.0. */
8531 if (real_onep (arg0))
8532 return omit_one_operand (type, build_real (type, dconst1), arg1);
8534 if (TREE_CODE (arg1) == REAL_CST
8535 && !TREE_OVERFLOW (arg1))
8537 REAL_VALUE_TYPE cint;
8541 c = TREE_REAL_CST (arg1);
8543 /* Optimize pow(x,0.0) = 1.0. */
8544 if (REAL_VALUES_EQUAL (c, dconst0))
8545 return omit_one_operand (type, build_real (type, dconst1),
8548 /* Optimize pow(x,1.0) = x. */
8549 if (REAL_VALUES_EQUAL (c, dconst1))
8552 /* Optimize pow(x,-1.0) = 1.0/x. */
8553 if (REAL_VALUES_EQUAL (c, dconstm1))
8554 return fold_build2 (RDIV_EXPR, type,
8555 build_real (type, dconst1), arg0);
8557 /* Optimize pow(x,0.5) = sqrt(x). */
8558 if (flag_unsafe_math_optimizations
8559 && REAL_VALUES_EQUAL (c, dconsthalf))
8561 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8563 if (sqrtfn != NULL_TREE)
8564 return build_call_expr (sqrtfn, 1, arg0);
8567 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8568 if (flag_unsafe_math_optimizations)
8570 const REAL_VALUE_TYPE dconstroot
8571 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8573 if (REAL_VALUES_EQUAL (c, dconstroot))
8575 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8576 if (cbrtfn != NULL_TREE)
8577 return build_call_expr (cbrtfn, 1, arg0);
8581 /* Check for an integer exponent. */
8582 n = real_to_integer (&c);
8583 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8584 if (real_identical (&c, &cint))
8586 /* Attempt to evaluate pow at compile-time, unless this should
8587 raise an exception. */
8588 if (TREE_CODE (arg0) == REAL_CST
8589 && !TREE_OVERFLOW (arg0)
8591 || (!flag_trapping_math && !flag_errno_math)
8592 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8597 x = TREE_REAL_CST (arg0);
8598 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8599 if (flag_unsafe_math_optimizations || !inexact)
8600 return build_real (type, x);
8603 /* Strip sign ops from even integer powers. */
8604 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8606 tree narg0 = fold_strip_sign_ops (arg0);
8608 return build_call_expr (fndecl, 2, narg0, arg1);
8613 if (flag_unsafe_math_optimizations)
8615 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8617 /* Optimize pow(expN(x),y) = expN(x*y). */
8618 if (BUILTIN_EXPONENT_P (fcode))
8620 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8621 tree arg = CALL_EXPR_ARG (arg0, 0);
8622 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8623 return build_call_expr (expfn, 1, arg);
8626 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8627 if (BUILTIN_SQRT_P (fcode))
8629 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8630 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8631 build_real (type, dconsthalf));
8632 return build_call_expr (fndecl, 2, narg0, narg1);
8635 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8636 if (BUILTIN_CBRT_P (fcode))
8638 tree arg = CALL_EXPR_ARG (arg0, 0);
8639 if (tree_expr_nonnegative_p (arg))
8641 const REAL_VALUE_TYPE dconstroot
8642 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8643 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8644 build_real (type, dconstroot));
8645 return build_call_expr (fndecl, 2, arg, narg1);
8649 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8650 if (fcode == BUILT_IN_POW
8651 || fcode == BUILT_IN_POWF
8652 || fcode == BUILT_IN_POWL)
8654 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8655 if (tree_expr_nonnegative_p (arg00))
8657 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8658 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8659 return build_call_expr (fndecl, 2, arg00, narg1);
8667 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8668 Return NULL_TREE if no simplification can be made. */
8670 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8671 tree arg0, tree arg1, tree type)
8673 if (!validate_arg (arg0, REAL_TYPE)
8674 || !validate_arg (arg1, INTEGER_TYPE))
8677 /* Optimize pow(1.0,y) = 1.0. */
8678 if (real_onep (arg0))
8679 return omit_one_operand (type, build_real (type, dconst1), arg1);
8681 if (host_integerp (arg1, 0))
8683 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8685 /* Evaluate powi at compile-time. */
8686 if (TREE_CODE (arg0) == REAL_CST
8687 && !TREE_OVERFLOW (arg0))
8690 x = TREE_REAL_CST (arg0);
8691 real_powi (&x, TYPE_MODE (type), &x, c);
8692 return build_real (type, x);
8695 /* Optimize pow(x,0) = 1.0. */
8697 return omit_one_operand (type, build_real (type, dconst1),
8700 /* Optimize pow(x,1) = x. */
8704 /* Optimize pow(x,-1) = 1.0/x. */
8706 return fold_build2 (RDIV_EXPR, type,
8707 build_real (type, dconst1), arg0);
8713 /* A subroutine of fold_builtin to fold the various exponent
8714 functions. Return NULL_TREE if no simplification can be made.
8715 FUNC is the corresponding MPFR exponent function. */
8718 fold_builtin_exponent (tree fndecl, tree arg,
8719 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8721 if (validate_arg (arg, REAL_TYPE))
8723 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8726 /* Calculate the result when the argument is a constant. */
8727 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8730 /* Optimize expN(logN(x)) = x. */
8731 if (flag_unsafe_math_optimizations)
8733 const enum built_in_function fcode = builtin_mathfn_code (arg);
8735 if ((func == mpfr_exp
8736 && (fcode == BUILT_IN_LOG
8737 || fcode == BUILT_IN_LOGF
8738 || fcode == BUILT_IN_LOGL))
8739 || (func == mpfr_exp2
8740 && (fcode == BUILT_IN_LOG2
8741 || fcode == BUILT_IN_LOG2F
8742 || fcode == BUILT_IN_LOG2L))
8743 || (func == mpfr_exp10
8744 && (fcode == BUILT_IN_LOG10
8745 || fcode == BUILT_IN_LOG10F
8746 || fcode == BUILT_IN_LOG10L)))
8747 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8754 /* Return true if VAR is a VAR_DECL or a component thereof. */
8757 var_decl_component_p (tree var)
8760 while (handled_component_p (inner))
8761 inner = TREE_OPERAND (inner, 0);
8762 return SSA_VAR_P (inner);
8765 /* Fold function call to builtin memset. Return
8766 NULL_TREE if no simplification can be made. */
8769 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8772 unsigned HOST_WIDE_INT length, cval;
8774 if (! validate_arg (dest, POINTER_TYPE)
8775 || ! validate_arg (c, INTEGER_TYPE)
8776 || ! validate_arg (len, INTEGER_TYPE))
8779 if (! host_integerp (len, 1))
8782 /* If the LEN parameter is zero, return DEST. */
8783 if (integer_zerop (len))
8784 return omit_one_operand (type, dest, c);
8786 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8791 if (TREE_CODE (var) != ADDR_EXPR)
8794 var = TREE_OPERAND (var, 0);
8795 if (TREE_THIS_VOLATILE (var))
8798 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8799 && !POINTER_TYPE_P (TREE_TYPE (var)))
8802 if (! var_decl_component_p (var))
8805 length = tree_low_cst (len, 1);
8806 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8807 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8811 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8814 if (integer_zerop (c))
8818 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8821 cval = tree_low_cst (c, 1);
8825 cval |= (cval << 31) << 1;
8828 ret = build_int_cst_type (TREE_TYPE (var), cval);
8829 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8833 return omit_one_operand (type, dest, ret);
8836 /* Fold function call to builtin memset. Return
8837 NULL_TREE if no simplification can be made. */
8840 fold_builtin_bzero (tree dest, tree size, bool ignore)
8842 if (! validate_arg (dest, POINTER_TYPE)
8843 || ! validate_arg (size, INTEGER_TYPE))
8849 /* New argument list transforming bzero(ptr x, int y) to
8850 memset(ptr x, int 0, size_t y). This is done this way
8851 so that if it isn't expanded inline, we fallback to
8852 calling bzero instead of memset. */
8854 return fold_builtin_memset (dest, integer_zero_node,
8855 fold_convert (sizetype, size),
8856 void_type_node, ignore);
8859 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8860 NULL_TREE if no simplification can be made.
8861 If ENDP is 0, return DEST (like memcpy).
8862 If ENDP is 1, return DEST+LEN (like mempcpy).
8863 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8864 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8868 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8870 tree destvar, srcvar, expr;
8872 if (! validate_arg (dest, POINTER_TYPE)
8873 || ! validate_arg (src, POINTER_TYPE)
8874 || ! validate_arg (len, INTEGER_TYPE))
8877 /* If the LEN parameter is zero, return DEST. */
8878 if (integer_zerop (len))
8879 return omit_one_operand (type, dest, src);
8881 /* If SRC and DEST are the same (and not volatile), return
8882 DEST{,+LEN,+LEN-1}. */
8883 if (operand_equal_p (src, dest, 0))
8887 tree srctype, desttype;
8888 int src_align, dest_align;
8892 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8893 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8895 /* Both DEST and SRC must be pointer types.
8896 ??? This is what old code did. Is the testing for pointer types
8899 If either SRC is readonly or length is 1, we can use memcpy. */
8900 if (dest_align && src_align
8901 && (readonly_data_expr (src)
8902 || (host_integerp (len, 1)
8903 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8904 tree_low_cst (len, 1)))))
8906 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8909 return build_call_expr (fn, 3, dest, src, len);
8914 if (!host_integerp (len, 0))
8917 This logic lose for arguments like (type *)malloc (sizeof (type)),
8918 since we strip the casts of up to VOID return value from malloc.
8919 Perhaps we ought to inherit type from non-VOID argument here? */
8922 srctype = TREE_TYPE (TREE_TYPE (src));
8923 desttype = TREE_TYPE (TREE_TYPE (dest));
8924 if (!srctype || !desttype
8925 || !TYPE_SIZE_UNIT (srctype)
8926 || !TYPE_SIZE_UNIT (desttype)
8927 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8928 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8929 || TYPE_VOLATILE (srctype)
8930 || TYPE_VOLATILE (desttype))
8933 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8934 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8935 if (dest_align < (int) TYPE_ALIGN (desttype)
8936 || src_align < (int) TYPE_ALIGN (srctype))
8940 dest = builtin_save_expr (dest);
8943 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8945 srcvar = build_fold_indirect_ref (src);
8946 if (TREE_THIS_VOLATILE (srcvar))
8948 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8950 /* With memcpy, it is possible to bypass aliasing rules, so without
8951 this check i.e. execute/20060930-2.c would be misoptimized,
8952 because it use conflicting alias set to hold argument for the
8953 memcpy call. This check is probably unnecessary with
8954 -fno-strict-aliasing. Similarly for destvar. See also
8956 else if (!var_decl_component_p (srcvar))
8960 destvar = NULL_TREE;
8961 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8963 destvar = build_fold_indirect_ref (dest);
8964 if (TREE_THIS_VOLATILE (destvar))
8966 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8967 destvar = NULL_TREE;
8968 else if (!var_decl_component_p (destvar))
8969 destvar = NULL_TREE;
8972 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8975 if (srcvar == NULL_TREE)
8978 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8981 srctype = build_qualified_type (desttype, 0);
8982 if (src_align < (int) TYPE_ALIGN (srctype))
8984 if (AGGREGATE_TYPE_P (srctype)
8985 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8988 srctype = build_variant_type_copy (srctype);
8989 TYPE_ALIGN (srctype) = src_align;
8990 TYPE_USER_ALIGN (srctype) = 1;
8991 TYPE_PACKED (srctype) = 1;
8993 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8994 src = fold_convert (srcptype, src);
8995 srcvar = build_fold_indirect_ref (src);
8997 else if (destvar == NULL_TREE)
9000 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9003 desttype = build_qualified_type (srctype, 0);
9004 if (dest_align < (int) TYPE_ALIGN (desttype))
9006 if (AGGREGATE_TYPE_P (desttype)
9007 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9010 desttype = build_variant_type_copy (desttype);
9011 TYPE_ALIGN (desttype) = dest_align;
9012 TYPE_USER_ALIGN (desttype) = 1;
9013 TYPE_PACKED (desttype) = 1;
9015 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9016 dest = fold_convert (destptype, dest);
9017 destvar = build_fold_indirect_ref (dest);
9020 if (srctype == desttype
9021 || (gimple_in_ssa_p (cfun)
9022 && useless_type_conversion_p (desttype, srctype)))
9024 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9025 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9026 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9027 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9028 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9030 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9031 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9037 if (endp == 0 || endp == 3)
9038 return omit_one_operand (type, dest, expr);
9044 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9047 len = fold_convert (sizetype, len);
9048 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9049 dest = fold_convert (type, dest);
9051 dest = omit_one_operand (type, dest, expr);
9055 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9056 If LEN is not NULL, it represents the length of the string to be
9057 copied. Return NULL_TREE if no simplification can be made. */
9060 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9064 if (!validate_arg (dest, POINTER_TYPE)
9065 || !validate_arg (src, POINTER_TYPE))
9068 /* If SRC and DEST are the same (and not volatile), return DEST. */
9069 if (operand_equal_p (src, dest, 0))
9070 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9072 if (optimize_function_for_size_p (cfun))
9075 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9081 len = c_strlen (src, 1);
9082 if (! len || TREE_SIDE_EFFECTS (len))
9086 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9087 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9088 build_call_expr (fn, 3, dest, src, len));
9091 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9092 If SLEN is not NULL, it represents the length of the source string.
9093 Return NULL_TREE if no simplification can be made. */
9096 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9100 if (!validate_arg (dest, POINTER_TYPE)
9101 || !validate_arg (src, POINTER_TYPE)
9102 || !validate_arg (len, INTEGER_TYPE))
9105 /* If the LEN parameter is zero, return DEST. */
9106 if (integer_zerop (len))
9107 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9109 /* We can't compare slen with len as constants below if len is not a
9111 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9115 slen = c_strlen (src, 1);
9117 /* Now, we must be passed a constant src ptr parameter. */
9118 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9121 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9123 /* We do not support simplification of this case, though we do
9124 support it when expanding trees into RTL. */
9125 /* FIXME: generate a call to __builtin_memset. */
9126 if (tree_int_cst_lt (slen, len))
9129 /* OK transform into builtin memcpy. */
9130 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9133 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9134 build_call_expr (fn, 3, dest, src, len));
9137 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9138 arguments to the call, and TYPE is its return type.
9139 Return NULL_TREE if no simplification can be made. */
9142 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9144 if (!validate_arg (arg1, POINTER_TYPE)
9145 || !validate_arg (arg2, INTEGER_TYPE)
9146 || !validate_arg (len, INTEGER_TYPE))
9152 if (TREE_CODE (arg2) != INTEGER_CST
9153 || !host_integerp (len, 1))
9156 p1 = c_getstr (arg1);
9157 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9163 if (target_char_cast (arg2, &c))
9166 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9169 return build_int_cst (TREE_TYPE (arg1), 0);
9171 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9173 return fold_convert (type, tem);
9179 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9180 Return NULL_TREE if no simplification can be made. */
9183 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9185 const char *p1, *p2;
9187 if (!validate_arg (arg1, POINTER_TYPE)
9188 || !validate_arg (arg2, POINTER_TYPE)
9189 || !validate_arg (len, INTEGER_TYPE))
9192 /* If the LEN parameter is zero, return zero. */
9193 if (integer_zerop (len))
9194 return omit_two_operands (integer_type_node, integer_zero_node,
9197 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9198 if (operand_equal_p (arg1, arg2, 0))
9199 return omit_one_operand (integer_type_node, integer_zero_node, len);
9201 p1 = c_getstr (arg1);
9202 p2 = c_getstr (arg2);
9204 /* If all arguments are constant, and the value of len is not greater
9205 than the lengths of arg1 and arg2, evaluate at compile-time. */
9206 if (host_integerp (len, 1) && p1 && p2
9207 && compare_tree_int (len, strlen (p1) + 1) <= 0
9208 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9210 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9213 return integer_one_node;
9215 return integer_minus_one_node;
9217 return integer_zero_node;
9220 /* If len parameter is one, return an expression corresponding to
9221 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9222 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9224 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9225 tree cst_uchar_ptr_node
9226 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9228 tree ind1 = fold_convert (integer_type_node,
9229 build1 (INDIRECT_REF, cst_uchar_node,
9230 fold_convert (cst_uchar_ptr_node,
9232 tree ind2 = fold_convert (integer_type_node,
9233 build1 (INDIRECT_REF, cst_uchar_node,
9234 fold_convert (cst_uchar_ptr_node,
9236 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9242 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9243 Return NULL_TREE if no simplification can be made. */
9246 fold_builtin_strcmp (tree arg1, tree arg2)
9248 const char *p1, *p2;
9250 if (!validate_arg (arg1, POINTER_TYPE)
9251 || !validate_arg (arg2, POINTER_TYPE))
9254 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9255 if (operand_equal_p (arg1, arg2, 0))
9256 return integer_zero_node;
9258 p1 = c_getstr (arg1);
9259 p2 = c_getstr (arg2);
9263 const int i = strcmp (p1, p2);
9265 return integer_minus_one_node;
9267 return integer_one_node;
9269 return integer_zero_node;
9272 /* If the second arg is "", return *(const unsigned char*)arg1. */
9273 if (p2 && *p2 == '\0')
9275 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9276 tree cst_uchar_ptr_node
9277 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9279 return fold_convert (integer_type_node,
9280 build1 (INDIRECT_REF, cst_uchar_node,
9281 fold_convert (cst_uchar_ptr_node,
9285 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9286 if (p1 && *p1 == '\0')
9288 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9289 tree cst_uchar_ptr_node
9290 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9292 tree temp = fold_convert (integer_type_node,
9293 build1 (INDIRECT_REF, cst_uchar_node,
9294 fold_convert (cst_uchar_ptr_node,
9296 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9302 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9303 Return NULL_TREE if no simplification can be made. */
9306 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9308 const char *p1, *p2;
9310 if (!validate_arg (arg1, POINTER_TYPE)
9311 || !validate_arg (arg2, POINTER_TYPE)
9312 || !validate_arg (len, INTEGER_TYPE))
9315 /* If the LEN parameter is zero, return zero. */
9316 if (integer_zerop (len))
9317 return omit_two_operands (integer_type_node, integer_zero_node,
9320 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9321 if (operand_equal_p (arg1, arg2, 0))
9322 return omit_one_operand (integer_type_node, integer_zero_node, len);
9324 p1 = c_getstr (arg1);
9325 p2 = c_getstr (arg2);
9327 if (host_integerp (len, 1) && p1 && p2)
9329 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9331 return integer_one_node;
9333 return integer_minus_one_node;
9335 return integer_zero_node;
9338 /* If the second arg is "", and the length is greater than zero,
9339 return *(const unsigned char*)arg1. */
9340 if (p2 && *p2 == '\0'
9341 && TREE_CODE (len) == INTEGER_CST
9342 && tree_int_cst_sgn (len) == 1)
9344 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9345 tree cst_uchar_ptr_node
9346 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9348 return fold_convert (integer_type_node,
9349 build1 (INDIRECT_REF, cst_uchar_node,
9350 fold_convert (cst_uchar_ptr_node,
9354 /* If the first arg is "", and the length is greater than zero,
9355 return -*(const unsigned char*)arg2. */
9356 if (p1 && *p1 == '\0'
9357 && TREE_CODE (len) == INTEGER_CST
9358 && tree_int_cst_sgn (len) == 1)
9360 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9361 tree cst_uchar_ptr_node
9362 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9364 tree temp = fold_convert (integer_type_node,
9365 build1 (INDIRECT_REF, cst_uchar_node,
9366 fold_convert (cst_uchar_ptr_node,
9368 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9371 /* If len parameter is one, return an expression corresponding to
9372 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9373 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9375 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9376 tree cst_uchar_ptr_node
9377 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9379 tree ind1 = fold_convert (integer_type_node,
9380 build1 (INDIRECT_REF, cst_uchar_node,
9381 fold_convert (cst_uchar_ptr_node,
9383 tree ind2 = fold_convert (integer_type_node,
9384 build1 (INDIRECT_REF, cst_uchar_node,
9385 fold_convert (cst_uchar_ptr_node,
9387 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9393 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9394 ARG. Return NULL_TREE if no simplification can be made. */
9397 fold_builtin_signbit (tree arg, tree type)
9401 if (!validate_arg (arg, REAL_TYPE))
9404 /* If ARG is a compile-time constant, determine the result. */
9405 if (TREE_CODE (arg) == REAL_CST
9406 && !TREE_OVERFLOW (arg))
9410 c = TREE_REAL_CST (arg);
9411 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9412 return fold_convert (type, temp);
9415 /* If ARG is non-negative, the result is always zero. */
9416 if (tree_expr_nonnegative_p (arg))
9417 return omit_one_operand (type, integer_zero_node, arg);
9419 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9420 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9421 return fold_build2 (LT_EXPR, type, arg,
9422 build_real (TREE_TYPE (arg), dconst0));
9427 /* Fold function call to builtin copysign, copysignf or copysignl with
9428 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9432 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9436 if (!validate_arg (arg1, REAL_TYPE)
9437 || !validate_arg (arg2, REAL_TYPE))
9440 /* copysign(X,X) is X. */
9441 if (operand_equal_p (arg1, arg2, 0))
9442 return fold_convert (type, arg1);
9444 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9445 if (TREE_CODE (arg1) == REAL_CST
9446 && TREE_CODE (arg2) == REAL_CST
9447 && !TREE_OVERFLOW (arg1)
9448 && !TREE_OVERFLOW (arg2))
9450 REAL_VALUE_TYPE c1, c2;
9452 c1 = TREE_REAL_CST (arg1);
9453 c2 = TREE_REAL_CST (arg2);
9454 /* c1.sign := c2.sign. */
9455 real_copysign (&c1, &c2);
9456 return build_real (type, c1);
9459 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9460 Remember to evaluate Y for side-effects. */
9461 if (tree_expr_nonnegative_p (arg2))
9462 return omit_one_operand (type,
9463 fold_build1 (ABS_EXPR, type, arg1),
9466 /* Strip sign changing operations for the first argument. */
9467 tem = fold_strip_sign_ops (arg1);
9469 return build_call_expr (fndecl, 2, tem, arg2);
9474 /* Fold a call to builtin isascii with argument ARG. */
9477 fold_builtin_isascii (tree arg)
9479 if (!validate_arg (arg, INTEGER_TYPE))
9483 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9484 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9485 build_int_cst (NULL_TREE,
9486 ~ (unsigned HOST_WIDE_INT) 0x7f));
9487 return fold_build2 (EQ_EXPR, integer_type_node,
9488 arg, integer_zero_node);
9492 /* Fold a call to builtin toascii with argument ARG. */
9495 fold_builtin_toascii (tree arg)
9497 if (!validate_arg (arg, INTEGER_TYPE))
9500 /* Transform toascii(c) -> (c & 0x7f). */
9501 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9502 build_int_cst (NULL_TREE, 0x7f));
9505 /* Fold a call to builtin isdigit with argument ARG. */
9508 fold_builtin_isdigit (tree arg)
9510 if (!validate_arg (arg, INTEGER_TYPE))
9514 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9515 /* According to the C standard, isdigit is unaffected by locale.
9516 However, it definitely is affected by the target character set. */
9517 unsigned HOST_WIDE_INT target_digit0
9518 = lang_hooks.to_target_charset ('0');
9520 if (target_digit0 == 0)
9523 arg = fold_convert (unsigned_type_node, arg);
9524 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9525 build_int_cst (unsigned_type_node, target_digit0));
9526 return fold_build2 (LE_EXPR, integer_type_node, arg,
9527 build_int_cst (unsigned_type_node, 9));
9531 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9534 fold_builtin_fabs (tree arg, tree type)
9536 if (!validate_arg (arg, REAL_TYPE))
9539 arg = fold_convert (type, arg);
9540 if (TREE_CODE (arg) == REAL_CST)
9541 return fold_abs_const (arg, type);
9542 return fold_build1 (ABS_EXPR, type, arg);
9545 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9548 fold_builtin_abs (tree arg, tree type)
9550 if (!validate_arg (arg, INTEGER_TYPE))
9553 arg = fold_convert (type, arg);
9554 if (TREE_CODE (arg) == INTEGER_CST)
9555 return fold_abs_const (arg, type);
9556 return fold_build1 (ABS_EXPR, type, arg);
9559 /* Fold a call to builtin fmin or fmax. */
9562 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9564 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9566 /* Calculate the result when the argument is a constant. */
9567 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9572 /* If either argument is NaN, return the other one. Avoid the
9573 transformation if we get (and honor) a signalling NaN. Using
9574 omit_one_operand() ensures we create a non-lvalue. */
9575 if (TREE_CODE (arg0) == REAL_CST
9576 && real_isnan (&TREE_REAL_CST (arg0))
9577 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9578 || ! TREE_REAL_CST (arg0).signalling))
9579 return omit_one_operand (type, arg1, arg0);
9580 if (TREE_CODE (arg1) == REAL_CST
9581 && real_isnan (&TREE_REAL_CST (arg1))
9582 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9583 || ! TREE_REAL_CST (arg1).signalling))
9584 return omit_one_operand (type, arg0, arg1);
9586 /* Transform fmin/fmax(x,x) -> x. */
9587 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9588 return omit_one_operand (type, arg0, arg1);
9590 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9591 functions to return the numeric arg if the other one is NaN.
9592 These tree codes don't honor that, so only transform if
9593 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9594 handled, so we don't have to worry about it either. */
9595 if (flag_finite_math_only)
9596 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9597 fold_convert (type, arg0),
9598 fold_convert (type, arg1));
9603 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9606 fold_builtin_carg (tree arg, tree type)
9608 if (validate_arg (arg, COMPLEX_TYPE))
9610 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9614 tree new_arg = builtin_save_expr (arg);
9615 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9616 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9617 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9624 /* Fold a call to builtin logb/ilogb. */
9627 fold_builtin_logb (tree arg, tree rettype)
9629 if (! validate_arg (arg, REAL_TYPE))
9634 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9636 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9642 /* If arg is Inf or NaN and we're logb, return it. */
9643 if (TREE_CODE (rettype) == REAL_TYPE)
9644 return fold_convert (rettype, arg);
9645 /* Fall through... */
9647 /* Zero may set errno and/or raise an exception for logb, also
9648 for ilogb we don't know FP_ILOGB0. */
9651 /* For normal numbers, proceed iff radix == 2. In GCC,
9652 normalized significands are in the range [0.5, 1.0). We
9653 want the exponent as if they were [1.0, 2.0) so get the
9654 exponent and subtract 1. */
9655 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9656 return fold_convert (rettype, build_int_cst (NULL_TREE,
9657 REAL_EXP (value)-1));
9665 /* Fold a call to builtin significand, if radix == 2. */
9668 fold_builtin_significand (tree arg, tree rettype)
9670 if (! validate_arg (arg, REAL_TYPE))
9675 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9677 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9684 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9685 return fold_convert (rettype, arg);
9687 /* For normal numbers, proceed iff radix == 2. */
9688 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9690 REAL_VALUE_TYPE result = *value;
9691 /* In GCC, normalized significands are in the range [0.5,
9692 1.0). We want them to be [1.0, 2.0) so set the
9694 SET_REAL_EXP (&result, 1);
9695 return build_real (rettype, result);
9704 /* Fold a call to builtin frexp, we can assume the base is 2. */
9707 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9709 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9714 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9717 arg1 = build_fold_indirect_ref (arg1);
9719 /* Proceed if a valid pointer type was passed in. */
9720 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9722 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9728 /* For +-0, return (*exp = 0, +-0). */
9729 exp = integer_zero_node;
9734 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9735 return omit_one_operand (rettype, arg0, arg1);
9738 /* Since the frexp function always expects base 2, and in
9739 GCC normalized significands are already in the range
9740 [0.5, 1.0), we have exactly what frexp wants. */
9741 REAL_VALUE_TYPE frac_rvt = *value;
9742 SET_REAL_EXP (&frac_rvt, 0);
9743 frac = build_real (rettype, frac_rvt);
9744 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9751 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9752 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9753 TREE_SIDE_EFFECTS (arg1) = 1;
9754 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9760 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9761 then we can assume the base is two. If it's false, then we have to
9762 check the mode of the TYPE parameter in certain cases. */
9765 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9767 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9772 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9773 if (real_zerop (arg0) || integer_zerop (arg1)
9774 || (TREE_CODE (arg0) == REAL_CST
9775 && !real_isfinite (&TREE_REAL_CST (arg0))))
9776 return omit_one_operand (type, arg0, arg1);
9778 /* If both arguments are constant, then try to evaluate it. */
9779 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9780 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9781 && host_integerp (arg1, 0))
9783 /* Bound the maximum adjustment to twice the range of the
9784 mode's valid exponents. Use abs to ensure the range is
9785 positive as a sanity check. */
9786 const long max_exp_adj = 2 *
9787 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9788 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9790 /* Get the user-requested adjustment. */
9791 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9793 /* The requested adjustment must be inside this range. This
9794 is a preliminary cap to avoid things like overflow, we
9795 may still fail to compute the result for other reasons. */
9796 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9798 REAL_VALUE_TYPE initial_result;
9800 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9802 /* Ensure we didn't overflow. */
9803 if (! real_isinf (&initial_result))
9805 const REAL_VALUE_TYPE trunc_result
9806 = real_value_truncate (TYPE_MODE (type), initial_result);
9808 /* Only proceed if the target mode can hold the
9810 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9811 return build_real (type, trunc_result);
9820 /* Fold a call to builtin modf. */
9823 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9825 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9830 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9833 arg1 = build_fold_indirect_ref (arg1);
9835 /* Proceed if a valid pointer type was passed in. */
9836 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9838 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9839 REAL_VALUE_TYPE trunc, frac;
9845 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9846 trunc = frac = *value;
9849 /* For +-Inf, return (*arg1 = arg0, +-0). */
9851 frac.sign = value->sign;
9855 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9856 real_trunc (&trunc, VOIDmode, value);
9857 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9858 /* If the original number was negative and already
9859 integral, then the fractional part is -0.0. */
9860 if (value->sign && frac.cl == rvc_zero)
9861 frac.sign = value->sign;
9865 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9866 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9867 build_real (rettype, trunc));
9868 TREE_SIDE_EFFECTS (arg1) = 1;
9869 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9870 build_real (rettype, frac));
9876 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9877 ARG is the argument for the call. */
9880 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9882 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9885 if (!validate_arg (arg, REAL_TYPE))
9888 switch (builtin_index)
9890 case BUILT_IN_ISINF:
9891 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9892 return omit_one_operand (type, integer_zero_node, arg);
9894 if (TREE_CODE (arg) == REAL_CST)
9896 r = TREE_REAL_CST (arg);
9897 if (real_isinf (&r))
9898 return real_compare (GT_EXPR, &r, &dconst0)
9899 ? integer_one_node : integer_minus_one_node;
9901 return integer_zero_node;
9906 case BUILT_IN_ISINF_SIGN:
9908 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9909 /* In a boolean context, GCC will fold the inner COND_EXPR to
9910 1. So e.g. "if (isinf_sign(x))" would be folded to just
9911 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9912 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9913 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9914 tree tmp = NULL_TREE;
9916 arg = builtin_save_expr (arg);
9918 if (signbit_fn && isinf_fn)
9920 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9921 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9923 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9924 signbit_call, integer_zero_node);
9925 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9926 isinf_call, integer_zero_node);
9928 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9929 integer_minus_one_node, integer_one_node);
9930 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9937 case BUILT_IN_ISFINITE:
9938 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9939 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9940 return omit_one_operand (type, integer_one_node, arg);
9942 if (TREE_CODE (arg) == REAL_CST)
9944 r = TREE_REAL_CST (arg);
9945 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9950 case BUILT_IN_ISNAN:
9951 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9952 return omit_one_operand (type, integer_zero_node, arg);
9954 if (TREE_CODE (arg) == REAL_CST)
9956 r = TREE_REAL_CST (arg);
9957 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9960 arg = builtin_save_expr (arg);
9961 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9968 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9969 This builtin will generate code to return the appropriate floating
9970 point classification depending on the value of the floating point
9971 number passed in. The possible return values must be supplied as
9972 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9973 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9974 one floating point argument which is "type generic". */
9977 fold_builtin_fpclassify (tree exp)
9979 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9980 arg, type, res, tmp;
9981 enum machine_mode mode;
9985 /* Verify the required arguments in the original call. */
9986 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9987 INTEGER_TYPE, INTEGER_TYPE,
9988 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9991 fp_nan = CALL_EXPR_ARG (exp, 0);
9992 fp_infinite = CALL_EXPR_ARG (exp, 1);
9993 fp_normal = CALL_EXPR_ARG (exp, 2);
9994 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9995 fp_zero = CALL_EXPR_ARG (exp, 4);
9996 arg = CALL_EXPR_ARG (exp, 5);
9997 type = TREE_TYPE (arg);
9998 mode = TYPE_MODE (type);
9999 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
10001 /* fpclassify(x) ->
10002 isnan(x) ? FP_NAN :
10003 (fabs(x) == Inf ? FP_INFINITE :
10004 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10005 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10007 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10008 build_real (type, dconst0));
10009 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
10011 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10012 real_from_string (&r, buf);
10013 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
10014 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10016 if (HONOR_INFINITIES (mode))
10019 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10020 build_real (type, r));
10021 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10024 if (HONOR_NANS (mode))
10026 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10027 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10033 /* Fold a call to an unordered comparison function such as
10034 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10035 being called and ARG0 and ARG1 are the arguments for the call.
10036 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10037 the opposite of the desired result. UNORDERED_CODE is used
10038 for modes that can hold NaNs and ORDERED_CODE is used for
10042 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10043 enum tree_code unordered_code,
10044 enum tree_code ordered_code)
10046 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10047 enum tree_code code;
10049 enum tree_code code0, code1;
10050 tree cmp_type = NULL_TREE;
10052 type0 = TREE_TYPE (arg0);
10053 type1 = TREE_TYPE (arg1);
10055 code0 = TREE_CODE (type0);
10056 code1 = TREE_CODE (type1);
10058 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10059 /* Choose the wider of two real types. */
10060 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10062 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10064 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10067 arg0 = fold_convert (cmp_type, arg0);
10068 arg1 = fold_convert (cmp_type, arg1);
10070 if (unordered_code == UNORDERED_EXPR)
10072 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10073 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10074 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10077 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10079 return fold_build1 (TRUTH_NOT_EXPR, type,
10080 fold_build2 (code, type, arg0, arg1));
10083 /* Fold a call to built-in function FNDECL with 0 arguments.
10084 IGNORE is true if the result of the function call is ignored. This
10085 function returns NULL_TREE if no simplification was possible. */
10088 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10090 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10091 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10094 CASE_FLT_FN (BUILT_IN_INF):
10095 case BUILT_IN_INFD32:
10096 case BUILT_IN_INFD64:
10097 case BUILT_IN_INFD128:
10098 return fold_builtin_inf (type, true);
10100 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10101 return fold_builtin_inf (type, false);
10103 case BUILT_IN_CLASSIFY_TYPE:
10104 return fold_builtin_classify_type (NULL_TREE);
10112 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10113 IGNORE is true if the result of the function call is ignored. This
10114 function returns NULL_TREE if no simplification was possible. */
10117 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10119 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10120 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10124 case BUILT_IN_CONSTANT_P:
10126 tree val = fold_builtin_constant_p (arg0);
10128 /* Gimplification will pull the CALL_EXPR for the builtin out of
10129 an if condition. When not optimizing, we'll not CSE it back.
10130 To avoid link error types of regressions, return false now. */
10131 if (!val && !optimize)
10132 val = integer_zero_node;
10137 case BUILT_IN_CLASSIFY_TYPE:
10138 return fold_builtin_classify_type (arg0);
10140 case BUILT_IN_STRLEN:
10141 return fold_builtin_strlen (type, arg0);
10143 CASE_FLT_FN (BUILT_IN_FABS):
10144 return fold_builtin_fabs (arg0, type);
10147 case BUILT_IN_LABS:
10148 case BUILT_IN_LLABS:
10149 case BUILT_IN_IMAXABS:
10150 return fold_builtin_abs (arg0, type);
10152 CASE_FLT_FN (BUILT_IN_CONJ):
10153 if (validate_arg (arg0, COMPLEX_TYPE))
10154 return fold_build1 (CONJ_EXPR, type, arg0);
10157 CASE_FLT_FN (BUILT_IN_CREAL):
10158 if (validate_arg (arg0, COMPLEX_TYPE))
10159 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10162 CASE_FLT_FN (BUILT_IN_CIMAG):
10163 if (validate_arg (arg0, COMPLEX_TYPE))
10164 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10167 CASE_FLT_FN (BUILT_IN_CCOS):
10168 CASE_FLT_FN (BUILT_IN_CCOSH):
10169 /* These functions are "even", i.e. f(x) == f(-x). */
10170 if (validate_arg (arg0, COMPLEX_TYPE))
10172 tree narg = fold_strip_sign_ops (arg0);
10174 return build_call_expr (fndecl, 1, narg);
10178 CASE_FLT_FN (BUILT_IN_CABS):
10179 return fold_builtin_cabs (arg0, type, fndecl);
10181 CASE_FLT_FN (BUILT_IN_CARG):
10182 return fold_builtin_carg (arg0, type);
10184 CASE_FLT_FN (BUILT_IN_SQRT):
10185 return fold_builtin_sqrt (arg0, type);
10187 CASE_FLT_FN (BUILT_IN_CBRT):
10188 return fold_builtin_cbrt (arg0, type);
10190 CASE_FLT_FN (BUILT_IN_ASIN):
10191 if (validate_arg (arg0, REAL_TYPE))
10192 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10193 &dconstm1, &dconst1, true);
10196 CASE_FLT_FN (BUILT_IN_ACOS):
10197 if (validate_arg (arg0, REAL_TYPE))
10198 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10199 &dconstm1, &dconst1, true);
10202 CASE_FLT_FN (BUILT_IN_ATAN):
10203 if (validate_arg (arg0, REAL_TYPE))
10204 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10207 CASE_FLT_FN (BUILT_IN_ASINH):
10208 if (validate_arg (arg0, REAL_TYPE))
10209 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10212 CASE_FLT_FN (BUILT_IN_ACOSH):
10213 if (validate_arg (arg0, REAL_TYPE))
10214 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10215 &dconst1, NULL, true);
10218 CASE_FLT_FN (BUILT_IN_ATANH):
10219 if (validate_arg (arg0, REAL_TYPE))
10220 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10221 &dconstm1, &dconst1, false);
10224 CASE_FLT_FN (BUILT_IN_SIN):
10225 if (validate_arg (arg0, REAL_TYPE))
10226 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10229 CASE_FLT_FN (BUILT_IN_COS):
10230 return fold_builtin_cos (arg0, type, fndecl);
10233 CASE_FLT_FN (BUILT_IN_TAN):
10234 return fold_builtin_tan (arg0, type);
10236 CASE_FLT_FN (BUILT_IN_CEXP):
10237 return fold_builtin_cexp (arg0, type);
10239 CASE_FLT_FN (BUILT_IN_CEXPI):
10240 if (validate_arg (arg0, REAL_TYPE))
10241 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10244 CASE_FLT_FN (BUILT_IN_SINH):
10245 if (validate_arg (arg0, REAL_TYPE))
10246 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10249 CASE_FLT_FN (BUILT_IN_COSH):
10250 return fold_builtin_cosh (arg0, type, fndecl);
10252 CASE_FLT_FN (BUILT_IN_TANH):
10253 if (validate_arg (arg0, REAL_TYPE))
10254 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10257 CASE_FLT_FN (BUILT_IN_ERF):
10258 if (validate_arg (arg0, REAL_TYPE))
10259 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10262 CASE_FLT_FN (BUILT_IN_ERFC):
10263 if (validate_arg (arg0, REAL_TYPE))
10264 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10267 CASE_FLT_FN (BUILT_IN_TGAMMA):
10268 if (validate_arg (arg0, REAL_TYPE))
10269 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10272 CASE_FLT_FN (BUILT_IN_EXP):
10273 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10275 CASE_FLT_FN (BUILT_IN_EXP2):
10276 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10278 CASE_FLT_FN (BUILT_IN_EXP10):
10279 CASE_FLT_FN (BUILT_IN_POW10):
10280 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10282 CASE_FLT_FN (BUILT_IN_EXPM1):
10283 if (validate_arg (arg0, REAL_TYPE))
10284 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10287 CASE_FLT_FN (BUILT_IN_LOG):
10288 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10290 CASE_FLT_FN (BUILT_IN_LOG2):
10291 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10293 CASE_FLT_FN (BUILT_IN_LOG10):
10294 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10296 CASE_FLT_FN (BUILT_IN_LOG1P):
10297 if (validate_arg (arg0, REAL_TYPE))
10298 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10299 &dconstm1, NULL, false);
10302 CASE_FLT_FN (BUILT_IN_J0):
10303 if (validate_arg (arg0, REAL_TYPE))
10304 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10308 CASE_FLT_FN (BUILT_IN_J1):
10309 if (validate_arg (arg0, REAL_TYPE))
10310 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10314 CASE_FLT_FN (BUILT_IN_Y0):
10315 if (validate_arg (arg0, REAL_TYPE))
10316 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10317 &dconst0, NULL, false);
10320 CASE_FLT_FN (BUILT_IN_Y1):
10321 if (validate_arg (arg0, REAL_TYPE))
10322 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10323 &dconst0, NULL, false);
10326 CASE_FLT_FN (BUILT_IN_NAN):
10327 case BUILT_IN_NAND32:
10328 case BUILT_IN_NAND64:
10329 case BUILT_IN_NAND128:
10330 return fold_builtin_nan (arg0, type, true);
10332 CASE_FLT_FN (BUILT_IN_NANS):
10333 return fold_builtin_nan (arg0, type, false);
10335 CASE_FLT_FN (BUILT_IN_FLOOR):
10336 return fold_builtin_floor (fndecl, arg0);
10338 CASE_FLT_FN (BUILT_IN_CEIL):
10339 return fold_builtin_ceil (fndecl, arg0);
10341 CASE_FLT_FN (BUILT_IN_TRUNC):
10342 return fold_builtin_trunc (fndecl, arg0);
10344 CASE_FLT_FN (BUILT_IN_ROUND):
10345 return fold_builtin_round (fndecl, arg0);
10347 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10348 CASE_FLT_FN (BUILT_IN_RINT):
10349 return fold_trunc_transparent_mathfn (fndecl, arg0);
10351 CASE_FLT_FN (BUILT_IN_LCEIL):
10352 CASE_FLT_FN (BUILT_IN_LLCEIL):
10353 CASE_FLT_FN (BUILT_IN_LFLOOR):
10354 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10355 CASE_FLT_FN (BUILT_IN_LROUND):
10356 CASE_FLT_FN (BUILT_IN_LLROUND):
10357 return fold_builtin_int_roundingfn (fndecl, arg0);
10359 CASE_FLT_FN (BUILT_IN_LRINT):
10360 CASE_FLT_FN (BUILT_IN_LLRINT):
10361 return fold_fixed_mathfn (fndecl, arg0);
10363 case BUILT_IN_BSWAP32:
10364 case BUILT_IN_BSWAP64:
10365 return fold_builtin_bswap (fndecl, arg0);
10367 CASE_INT_FN (BUILT_IN_FFS):
10368 CASE_INT_FN (BUILT_IN_CLZ):
10369 CASE_INT_FN (BUILT_IN_CTZ):
10370 CASE_INT_FN (BUILT_IN_POPCOUNT):
10371 CASE_INT_FN (BUILT_IN_PARITY):
10372 return fold_builtin_bitop (fndecl, arg0);
10374 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10375 return fold_builtin_signbit (arg0, type);
10377 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10378 return fold_builtin_significand (arg0, type);
10380 CASE_FLT_FN (BUILT_IN_ILOGB):
10381 CASE_FLT_FN (BUILT_IN_LOGB):
10382 return fold_builtin_logb (arg0, type);
10384 case BUILT_IN_ISASCII:
10385 return fold_builtin_isascii (arg0);
10387 case BUILT_IN_TOASCII:
10388 return fold_builtin_toascii (arg0);
10390 case BUILT_IN_ISDIGIT:
10391 return fold_builtin_isdigit (arg0);
10393 CASE_FLT_FN (BUILT_IN_FINITE):
10394 case BUILT_IN_FINITED32:
10395 case BUILT_IN_FINITED64:
10396 case BUILT_IN_FINITED128:
10397 case BUILT_IN_ISFINITE:
10398 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10400 CASE_FLT_FN (BUILT_IN_ISINF):
10401 case BUILT_IN_ISINFD32:
10402 case BUILT_IN_ISINFD64:
10403 case BUILT_IN_ISINFD128:
10404 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10406 case BUILT_IN_ISINF_SIGN:
10407 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10409 CASE_FLT_FN (BUILT_IN_ISNAN):
10410 case BUILT_IN_ISNAND32:
10411 case BUILT_IN_ISNAND64:
10412 case BUILT_IN_ISNAND128:
10413 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10415 case BUILT_IN_PRINTF:
10416 case BUILT_IN_PRINTF_UNLOCKED:
10417 case BUILT_IN_VPRINTF:
10418 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10428 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10429 IGNORE is true if the result of the function call is ignored. This
10430 function returns NULL_TREE if no simplification was possible. */
10433 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10435 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10436 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10440 CASE_FLT_FN (BUILT_IN_JN):
10441 if (validate_arg (arg0, INTEGER_TYPE)
10442 && validate_arg (arg1, REAL_TYPE))
10443 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10446 CASE_FLT_FN (BUILT_IN_YN):
10447 if (validate_arg (arg0, INTEGER_TYPE)
10448 && validate_arg (arg1, REAL_TYPE))
10449 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10453 CASE_FLT_FN (BUILT_IN_DREM):
10454 CASE_FLT_FN (BUILT_IN_REMAINDER):
10455 if (validate_arg (arg0, REAL_TYPE)
10456 && validate_arg(arg1, REAL_TYPE))
10457 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10460 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10461 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10462 if (validate_arg (arg0, REAL_TYPE)
10463 && validate_arg(arg1, POINTER_TYPE))
10464 return do_mpfr_lgamma_r (arg0, arg1, type);
10467 CASE_FLT_FN (BUILT_IN_ATAN2):
10468 if (validate_arg (arg0, REAL_TYPE)
10469 && validate_arg(arg1, REAL_TYPE))
10470 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10473 CASE_FLT_FN (BUILT_IN_FDIM):
10474 if (validate_arg (arg0, REAL_TYPE)
10475 && validate_arg(arg1, REAL_TYPE))
10476 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10479 CASE_FLT_FN (BUILT_IN_HYPOT):
10480 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10482 CASE_FLT_FN (BUILT_IN_LDEXP):
10483 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10484 CASE_FLT_FN (BUILT_IN_SCALBN):
10485 CASE_FLT_FN (BUILT_IN_SCALBLN):
10486 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10488 CASE_FLT_FN (BUILT_IN_FREXP):
10489 return fold_builtin_frexp (arg0, arg1, type);
10491 CASE_FLT_FN (BUILT_IN_MODF):
10492 return fold_builtin_modf (arg0, arg1, type);
10494 case BUILT_IN_BZERO:
10495 return fold_builtin_bzero (arg0, arg1, ignore);
10497 case BUILT_IN_FPUTS:
10498 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10500 case BUILT_IN_FPUTS_UNLOCKED:
10501 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10503 case BUILT_IN_STRSTR:
10504 return fold_builtin_strstr (arg0, arg1, type);
10506 case BUILT_IN_STRCAT:
10507 return fold_builtin_strcat (arg0, arg1);
10509 case BUILT_IN_STRSPN:
10510 return fold_builtin_strspn (arg0, arg1);
10512 case BUILT_IN_STRCSPN:
10513 return fold_builtin_strcspn (arg0, arg1);
10515 case BUILT_IN_STRCHR:
10516 case BUILT_IN_INDEX:
10517 return fold_builtin_strchr (arg0, arg1, type);
10519 case BUILT_IN_STRRCHR:
10520 case BUILT_IN_RINDEX:
10521 return fold_builtin_strrchr (arg0, arg1, type);
10523 case BUILT_IN_STRCPY:
10524 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10526 case BUILT_IN_STPCPY:
10529 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10533 return build_call_expr (fn, 2, arg0, arg1);
10537 case BUILT_IN_STRCMP:
10538 return fold_builtin_strcmp (arg0, arg1);
10540 case BUILT_IN_STRPBRK:
10541 return fold_builtin_strpbrk (arg0, arg1, type);
10543 case BUILT_IN_EXPECT:
10544 return fold_builtin_expect (arg0, arg1);
10546 CASE_FLT_FN (BUILT_IN_POW):
10547 return fold_builtin_pow (fndecl, arg0, arg1, type);
10549 CASE_FLT_FN (BUILT_IN_POWI):
10550 return fold_builtin_powi (fndecl, arg0, arg1, type);
10552 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10553 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10555 CASE_FLT_FN (BUILT_IN_FMIN):
10556 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10558 CASE_FLT_FN (BUILT_IN_FMAX):
10559 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10561 case BUILT_IN_ISGREATER:
10562 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10563 case BUILT_IN_ISGREATEREQUAL:
10564 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10565 case BUILT_IN_ISLESS:
10566 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10567 case BUILT_IN_ISLESSEQUAL:
10568 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10569 case BUILT_IN_ISLESSGREATER:
10570 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10571 case BUILT_IN_ISUNORDERED:
10572 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10575 /* We do the folding for va_start in the expander. */
10576 case BUILT_IN_VA_START:
10579 case BUILT_IN_SPRINTF:
10580 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10582 case BUILT_IN_OBJECT_SIZE:
10583 return fold_builtin_object_size (arg0, arg1);
10585 case BUILT_IN_PRINTF:
10586 case BUILT_IN_PRINTF_UNLOCKED:
10587 case BUILT_IN_VPRINTF:
10588 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10590 case BUILT_IN_PRINTF_CHK:
10591 case BUILT_IN_VPRINTF_CHK:
10592 if (!validate_arg (arg0, INTEGER_TYPE)
10593 || TREE_SIDE_EFFECTS (arg0))
10596 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10599 case BUILT_IN_FPRINTF:
10600 case BUILT_IN_FPRINTF_UNLOCKED:
10601 case BUILT_IN_VFPRINTF:
10602 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10611 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10612 and ARG2. IGNORE is true if the result of the function call is ignored.
10613 This function returns NULL_TREE if no simplification was possible. */
10616 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10618 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10619 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10623 CASE_FLT_FN (BUILT_IN_SINCOS):
10624 return fold_builtin_sincos (arg0, arg1, arg2);
10626 CASE_FLT_FN (BUILT_IN_FMA):
10627 if (validate_arg (arg0, REAL_TYPE)
10628 && validate_arg(arg1, REAL_TYPE)
10629 && validate_arg(arg2, REAL_TYPE))
10630 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10633 CASE_FLT_FN (BUILT_IN_REMQUO):
10634 if (validate_arg (arg0, REAL_TYPE)
10635 && validate_arg(arg1, REAL_TYPE)
10636 && validate_arg(arg2, POINTER_TYPE))
10637 return do_mpfr_remquo (arg0, arg1, arg2);
10640 case BUILT_IN_MEMSET:
10641 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10643 case BUILT_IN_BCOPY:
10644 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10646 case BUILT_IN_MEMCPY:
10647 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10649 case BUILT_IN_MEMPCPY:
10650 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10652 case BUILT_IN_MEMMOVE:
10653 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10655 case BUILT_IN_STRNCAT:
10656 return fold_builtin_strncat (arg0, arg1, arg2);
10658 case BUILT_IN_STRNCPY:
10659 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10661 case BUILT_IN_STRNCMP:
10662 return fold_builtin_strncmp (arg0, arg1, arg2);
10664 case BUILT_IN_MEMCHR:
10665 return fold_builtin_memchr (arg0, arg1, arg2, type);
10667 case BUILT_IN_BCMP:
10668 case BUILT_IN_MEMCMP:
10669 return fold_builtin_memcmp (arg0, arg1, arg2);;
10671 case BUILT_IN_SPRINTF:
10672 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10674 case BUILT_IN_STRCPY_CHK:
10675 case BUILT_IN_STPCPY_CHK:
10676 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10679 case BUILT_IN_STRCAT_CHK:
10680 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10682 case BUILT_IN_PRINTF_CHK:
10683 case BUILT_IN_VPRINTF_CHK:
10684 if (!validate_arg (arg0, INTEGER_TYPE)
10685 || TREE_SIDE_EFFECTS (arg0))
10688 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10691 case BUILT_IN_FPRINTF:
10692 case BUILT_IN_FPRINTF_UNLOCKED:
10693 case BUILT_IN_VFPRINTF:
10694 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10696 case BUILT_IN_FPRINTF_CHK:
10697 case BUILT_IN_VFPRINTF_CHK:
10698 if (!validate_arg (arg1, INTEGER_TYPE)
10699 || TREE_SIDE_EFFECTS (arg1))
10702 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10711 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10712 ARG2, and ARG3. IGNORE is true if the result of the function call is
10713 ignored. This function returns NULL_TREE if no simplification was
10717 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10720 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10724 case BUILT_IN_MEMCPY_CHK:
10725 case BUILT_IN_MEMPCPY_CHK:
10726 case BUILT_IN_MEMMOVE_CHK:
10727 case BUILT_IN_MEMSET_CHK:
10728 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10730 DECL_FUNCTION_CODE (fndecl));
10732 case BUILT_IN_STRNCPY_CHK:
10733 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10735 case BUILT_IN_STRNCAT_CHK:
10736 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10738 case BUILT_IN_FPRINTF_CHK:
10739 case BUILT_IN_VFPRINTF_CHK:
10740 if (!validate_arg (arg1, INTEGER_TYPE)
10741 || TREE_SIDE_EFFECTS (arg1))
10744 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10754 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10755 arguments, where NARGS <= 4. IGNORE is true if the result of the
10756 function call is ignored. This function returns NULL_TREE if no
10757 simplification was possible. Note that this only folds builtins with
10758 fixed argument patterns. Foldings that do varargs-to-varargs
10759 transformations, or that match calls with more than 4 arguments,
10760 need to be handled with fold_builtin_varargs instead. */
10762 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10765 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10767 tree ret = NULL_TREE;
10772 ret = fold_builtin_0 (fndecl, ignore);
10775 ret = fold_builtin_1 (fndecl, args[0], ignore);
10778 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10781 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10784 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10792 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10793 TREE_NO_WARNING (ret) = 1;
10799 /* Builtins with folding operations that operate on "..." arguments
10800 need special handling; we need to store the arguments in a convenient
10801 data structure before attempting any folding. Fortunately there are
10802 only a few builtins that fall into this category. FNDECL is the
10803 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10804 result of the function call is ignored. */
10807 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10809 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10810 tree ret = NULL_TREE;
10814 case BUILT_IN_SPRINTF_CHK:
10815 case BUILT_IN_VSPRINTF_CHK:
10816 ret = fold_builtin_sprintf_chk (exp, fcode);
10819 case BUILT_IN_SNPRINTF_CHK:
10820 case BUILT_IN_VSNPRINTF_CHK:
10821 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10824 case BUILT_IN_FPCLASSIFY:
10825 ret = fold_builtin_fpclassify (exp);
10833 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10834 TREE_NO_WARNING (ret) = 1;
10840 /* Return true if FNDECL shouldn't be folded right now.
10841 If a built-in function has an inline attribute always_inline
10842 wrapper, defer folding it after always_inline functions have
10843 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10844 might not be performed. */
10847 avoid_folding_inline_builtin (tree fndecl)
10849 return (DECL_DECLARED_INLINE_P (fndecl)
10850 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10852 && !cfun->always_inline_functions_inlined
10853 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10856 /* A wrapper function for builtin folding that prevents warnings for
10857 "statement without effect" and the like, caused by removing the
10858 call node earlier than the warning is generated. */
10861 fold_call_expr (tree exp, bool ignore)
10863 tree ret = NULL_TREE;
10864 tree fndecl = get_callee_fndecl (exp);
10866 && TREE_CODE (fndecl) == FUNCTION_DECL
10867 && DECL_BUILT_IN (fndecl)
10868 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10869 yet. Defer folding until we see all the arguments
10870 (after inlining). */
10871 && !CALL_EXPR_VA_ARG_PACK (exp))
10873 int nargs = call_expr_nargs (exp);
10875 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10876 instead last argument is __builtin_va_arg_pack (). Defer folding
10877 even in that case, until arguments are finalized. */
10878 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10880 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10882 && TREE_CODE (fndecl2) == FUNCTION_DECL
10883 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10884 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10888 if (avoid_folding_inline_builtin (fndecl))
10891 /* FIXME: Don't use a list in this interface. */
10892 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10893 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10896 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10898 tree *args = CALL_EXPR_ARGP (exp);
10899 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10902 ret = fold_builtin_varargs (fndecl, exp, ignore);
10905 /* Propagate location information from original call to
10906 expansion of builtin. Otherwise things like
10907 maybe_emit_chk_warning, that operate on the expansion
10908 of a builtin, will use the wrong location information. */
10909 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10911 tree realret = ret;
10912 if (TREE_CODE (ret) == NOP_EXPR)
10913 realret = TREE_OPERAND (ret, 0);
10914 if (CAN_HAVE_LOCATION_P (realret)
10915 && !EXPR_HAS_LOCATION (realret))
10916 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10926 /* Conveniently construct a function call expression. FNDECL names the
10927 function to be called and ARGLIST is a TREE_LIST of arguments. */
10930 build_function_call_expr (tree fndecl, tree arglist)
10932 tree fntype = TREE_TYPE (fndecl);
10933 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10934 int n = list_length (arglist);
10935 tree *argarray = (tree *) alloca (n * sizeof (tree));
10938 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10939 argarray[i] = TREE_VALUE (arglist);
10940 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10943 /* Conveniently construct a function call expression. FNDECL names the
10944 function to be called, N is the number of arguments, and the "..."
10945 parameters are the argument expressions. */
10948 build_call_expr (tree fndecl, int n, ...)
10951 tree fntype = TREE_TYPE (fndecl);
10952 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10953 tree *argarray = (tree *) alloca (n * sizeof (tree));
10957 for (i = 0; i < n; i++)
10958 argarray[i] = va_arg (ap, tree);
10960 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10963 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10964 N arguments are passed in the array ARGARRAY. */
10967 fold_builtin_call_array (tree type,
10972 tree ret = NULL_TREE;
10976 if (TREE_CODE (fn) == ADDR_EXPR)
10978 tree fndecl = TREE_OPERAND (fn, 0);
10979 if (TREE_CODE (fndecl) == FUNCTION_DECL
10980 && DECL_BUILT_IN (fndecl))
10982 /* If last argument is __builtin_va_arg_pack (), arguments to this
10983 function are not finalized yet. Defer folding until they are. */
10984 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10986 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10988 && TREE_CODE (fndecl2) == FUNCTION_DECL
10989 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10990 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10991 return build_call_array (type, fn, n, argarray);
10993 if (avoid_folding_inline_builtin (fndecl))
10994 return build_call_array (type, fn, n, argarray);
10995 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10997 tree arglist = NULL_TREE;
10998 for (i = n - 1; i >= 0; i--)
10999 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11000 ret = targetm.fold_builtin (fndecl, arglist, false);
11003 return build_call_array (type, fn, n, argarray);
11005 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11007 /* First try the transformations that don't require consing up
11009 ret = fold_builtin_n (fndecl, argarray, n, false);
11014 /* If we got this far, we need to build an exp. */
11015 exp = build_call_array (type, fn, n, argarray);
11016 ret = fold_builtin_varargs (fndecl, exp, false);
11017 return ret ? ret : exp;
11021 return build_call_array (type, fn, n, argarray);
11024 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11025 along with N new arguments specified as the "..." parameters. SKIP
11026 is the number of arguments in EXP to be omitted. This function is used
11027 to do varargs-to-varargs transformations. */
11030 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11032 int oldnargs = call_expr_nargs (exp);
11033 int nargs = oldnargs - skip + n;
11034 tree fntype = TREE_TYPE (fndecl);
11035 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11043 buffer = XALLOCAVEC (tree, nargs);
11045 for (i = 0; i < n; i++)
11046 buffer[i] = va_arg (ap, tree);
11048 for (j = skip; j < oldnargs; j++, i++)
11049 buffer[i] = CALL_EXPR_ARG (exp, j);
11052 buffer = CALL_EXPR_ARGP (exp) + skip;
11054 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11057 /* Validate a single argument ARG against a tree code CODE representing
11061 validate_arg (const_tree arg, enum tree_code code)
11065 else if (code == POINTER_TYPE)
11066 return POINTER_TYPE_P (TREE_TYPE (arg));
11067 else if (code == INTEGER_TYPE)
11068 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11069 return code == TREE_CODE (TREE_TYPE (arg));
11072 /* This function validates the types of a function call argument list
11073 against a specified list of tree_codes. If the last specifier is a 0,
11074 that represents an ellipses, otherwise the last specifier must be a
11077 This is the GIMPLE version of validate_arglist. Eventually we want to
11078 completely convert builtins.c to work from GIMPLEs and the tree based
11079 validate_arglist will then be removed. */
11082 validate_gimple_arglist (const_gimple call, ...)
11084 enum tree_code code;
11090 va_start (ap, call);
11095 code = va_arg (ap, enum tree_code);
11099 /* This signifies an ellipses, any further arguments are all ok. */
11103 /* This signifies an endlink, if no arguments remain, return
11104 true, otherwise return false. */
11105 res = (i == gimple_call_num_args (call));
11108 /* If no parameters remain or the parameter's code does not
11109 match the specified code, return false. Otherwise continue
11110 checking any remaining arguments. */
11111 arg = gimple_call_arg (call, i++);
11112 if (!validate_arg (arg, code))
11119 /* We need gotos here since we can only have one VA_CLOSE in a
11127 /* This function validates the types of a function call argument list
11128 against a specified list of tree_codes. If the last specifier is a 0,
11129 that represents an ellipses, otherwise the last specifier must be a
11133 validate_arglist (const_tree callexpr, ...)
11135 enum tree_code code;
11138 const_call_expr_arg_iterator iter;
11141 va_start (ap, callexpr);
11142 init_const_call_expr_arg_iterator (callexpr, &iter);
11146 code = va_arg (ap, enum tree_code);
11150 /* This signifies an ellipses, any further arguments are all ok. */
11154 /* This signifies an endlink, if no arguments remain, return
11155 true, otherwise return false. */
11156 res = !more_const_call_expr_args_p (&iter);
11159 /* If no parameters remain or the parameter's code does not
11160 match the specified code, return false. Otherwise continue
11161 checking any remaining arguments. */
11162 arg = next_const_call_expr_arg (&iter);
11163 if (!validate_arg (arg, code))
11170 /* We need gotos here since we can only have one VA_CLOSE in a
11178 /* Default target-specific builtin expander that does nothing. */
11181 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11182 rtx target ATTRIBUTE_UNUSED,
11183 rtx subtarget ATTRIBUTE_UNUSED,
11184 enum machine_mode mode ATTRIBUTE_UNUSED,
11185 int ignore ATTRIBUTE_UNUSED)
11190 /* Returns true is EXP represents data that would potentially reside
11191 in a readonly section. */
11194 readonly_data_expr (tree exp)
11198 if (TREE_CODE (exp) != ADDR_EXPR)
11201 exp = get_base_address (TREE_OPERAND (exp, 0));
11205 /* Make sure we call decl_readonly_section only for trees it
11206 can handle (since it returns true for everything it doesn't
11208 if (TREE_CODE (exp) == STRING_CST
11209 || TREE_CODE (exp) == CONSTRUCTOR
11210 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11211 return decl_readonly_section (exp, 0);
11216 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11217 to the call, and TYPE is its return type.
11219 Return NULL_TREE if no simplification was possible, otherwise return the
11220 simplified form of the call as a tree.
11222 The simplified form may be a constant or other expression which
11223 computes the same value, but in a more efficient manner (including
11224 calls to other builtin functions).
11226 The call may contain arguments which need to be evaluated, but
11227 which are not useful to determine the result of the call. In
11228 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11229 COMPOUND_EXPR will be an argument which must be evaluated.
11230 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11231 COMPOUND_EXPR in the chain will contain the tree for the simplified
11232 form of the builtin function call. */
11235 fold_builtin_strstr (tree s1, tree s2, tree type)
11237 if (!validate_arg (s1, POINTER_TYPE)
11238 || !validate_arg (s2, POINTER_TYPE))
11243 const char *p1, *p2;
11245 p2 = c_getstr (s2);
11249 p1 = c_getstr (s1);
11252 const char *r = strstr (p1, p2);
11256 return build_int_cst (TREE_TYPE (s1), 0);
11258 /* Return an offset into the constant string argument. */
11259 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11260 s1, size_int (r - p1));
11261 return fold_convert (type, tem);
11264 /* The argument is const char *, and the result is char *, so we need
11265 a type conversion here to avoid a warning. */
11267 return fold_convert (type, s1);
11272 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11276 /* New argument list transforming strstr(s1, s2) to
11277 strchr(s1, s2[0]). */
11278 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11282 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11283 the call, and TYPE is its return type.
11285 Return NULL_TREE if no simplification was possible, otherwise return the
11286 simplified form of the call as a tree.
11288 The simplified form may be a constant or other expression which
11289 computes the same value, but in a more efficient manner (including
11290 calls to other builtin functions).
11292 The call may contain arguments which need to be evaluated, but
11293 which are not useful to determine the result of the call. In
11294 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11295 COMPOUND_EXPR will be an argument which must be evaluated.
11296 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11297 COMPOUND_EXPR in the chain will contain the tree for the simplified
11298 form of the builtin function call. */
11301 fold_builtin_strchr (tree s1, tree s2, tree type)
11303 if (!validate_arg (s1, POINTER_TYPE)
11304 || !validate_arg (s2, INTEGER_TYPE))
11310 if (TREE_CODE (s2) != INTEGER_CST)
11313 p1 = c_getstr (s1);
11320 if (target_char_cast (s2, &c))
11323 r = strchr (p1, c);
11326 return build_int_cst (TREE_TYPE (s1), 0);
11328 /* Return an offset into the constant string argument. */
11329 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11330 s1, size_int (r - p1));
11331 return fold_convert (type, tem);
11337 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11338 the call, and TYPE is its return type.
11340 Return NULL_TREE if no simplification was possible, otherwise return the
11341 simplified form of the call as a tree.
11343 The simplified form may be a constant or other expression which
11344 computes the same value, but in a more efficient manner (including
11345 calls to other builtin functions).
11347 The call may contain arguments which need to be evaluated, but
11348 which are not useful to determine the result of the call. In
11349 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11350 COMPOUND_EXPR will be an argument which must be evaluated.
11351 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11352 COMPOUND_EXPR in the chain will contain the tree for the simplified
11353 form of the builtin function call. */
11356 fold_builtin_strrchr (tree s1, tree s2, tree type)
11358 if (!validate_arg (s1, POINTER_TYPE)
11359 || !validate_arg (s2, INTEGER_TYPE))
11366 if (TREE_CODE (s2) != INTEGER_CST)
11369 p1 = c_getstr (s1);
11376 if (target_char_cast (s2, &c))
11379 r = strrchr (p1, c);
11382 return build_int_cst (TREE_TYPE (s1), 0);
11384 /* Return an offset into the constant string argument. */
11385 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11386 s1, size_int (r - p1));
11387 return fold_convert (type, tem);
11390 if (! integer_zerop (s2))
11393 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11397 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11398 return build_call_expr (fn, 2, s1, s2);
11402 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11403 to the call, and TYPE is its return type.
11405 Return NULL_TREE if no simplification was possible, otherwise return the
11406 simplified form of the call as a tree.
11408 The simplified form may be a constant or other expression which
11409 computes the same value, but in a more efficient manner (including
11410 calls to other builtin functions).
11412 The call may contain arguments which need to be evaluated, but
11413 which are not useful to determine the result of the call. In
11414 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11415 COMPOUND_EXPR will be an argument which must be evaluated.
11416 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11417 COMPOUND_EXPR in the chain will contain the tree for the simplified
11418 form of the builtin function call. */
11421 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11423 if (!validate_arg (s1, POINTER_TYPE)
11424 || !validate_arg (s2, POINTER_TYPE))
11429 const char *p1, *p2;
11431 p2 = c_getstr (s2);
11435 p1 = c_getstr (s1);
11438 const char *r = strpbrk (p1, p2);
11442 return build_int_cst (TREE_TYPE (s1), 0);
11444 /* Return an offset into the constant string argument. */
11445 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11446 s1, size_int (r - p1));
11447 return fold_convert (type, tem);
11451 /* strpbrk(x, "") == NULL.
11452 Evaluate and ignore s1 in case it had side-effects. */
11453 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11456 return NULL_TREE; /* Really call strpbrk. */
11458 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11462 /* New argument list transforming strpbrk(s1, s2) to
11463 strchr(s1, s2[0]). */
11464 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11468 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11471 Return NULL_TREE if no simplification was possible, otherwise return the
11472 simplified form of the call as a tree.
11474 The simplified form may be a constant or other expression which
11475 computes the same value, but in a more efficient manner (including
11476 calls to other builtin functions).
11478 The call may contain arguments which need to be evaluated, but
11479 which are not useful to determine the result of the call. In
11480 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11481 COMPOUND_EXPR will be an argument which must be evaluated.
11482 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11483 COMPOUND_EXPR in the chain will contain the tree for the simplified
11484 form of the builtin function call. */
11487 fold_builtin_strcat (tree dst, tree src)
11489 if (!validate_arg (dst, POINTER_TYPE)
11490 || !validate_arg (src, POINTER_TYPE))
11494 const char *p = c_getstr (src);
11496 /* If the string length is zero, return the dst parameter. */
11497 if (p && *p == '\0')
11504 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11505 arguments to the call.
11507 Return NULL_TREE if no simplification was possible, otherwise return the
11508 simplified form of the call as a tree.
11510 The simplified form may be a constant or other expression which
11511 computes the same value, but in a more efficient manner (including
11512 calls to other builtin functions).
11514 The call may contain arguments which need to be evaluated, but
11515 which are not useful to determine the result of the call. In
11516 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11517 COMPOUND_EXPR will be an argument which must be evaluated.
11518 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11519 COMPOUND_EXPR in the chain will contain the tree for the simplified
11520 form of the builtin function call. */
11523 fold_builtin_strncat (tree dst, tree src, tree len)
11525 if (!validate_arg (dst, POINTER_TYPE)
11526 || !validate_arg (src, POINTER_TYPE)
11527 || !validate_arg (len, INTEGER_TYPE))
11531 const char *p = c_getstr (src);
11533 /* If the requested length is zero, or the src parameter string
11534 length is zero, return the dst parameter. */
11535 if (integer_zerop (len) || (p && *p == '\0'))
11536 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11538 /* If the requested len is greater than or equal to the string
11539 length, call strcat. */
11540 if (TREE_CODE (len) == INTEGER_CST && p
11541 && compare_tree_int (len, strlen (p)) >= 0)
11543 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11545 /* If the replacement _DECL isn't initialized, don't do the
11550 return build_call_expr (fn, 2, dst, src);
11556 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11559 Return NULL_TREE if no simplification was possible, otherwise return the
11560 simplified form of the call as a tree.
11562 The simplified form may be a constant or other expression which
11563 computes the same value, but in a more efficient manner (including
11564 calls to other builtin functions).
11566 The call may contain arguments which need to be evaluated, but
11567 which are not useful to determine the result of the call. In
11568 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11569 COMPOUND_EXPR will be an argument which must be evaluated.
11570 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11571 COMPOUND_EXPR in the chain will contain the tree for the simplified
11572 form of the builtin function call. */
11575 fold_builtin_strspn (tree s1, tree s2)
11577 if (!validate_arg (s1, POINTER_TYPE)
11578 || !validate_arg (s2, POINTER_TYPE))
11582 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11584 /* If both arguments are constants, evaluate at compile-time. */
11587 const size_t r = strspn (p1, p2);
11588 return size_int (r);
11591 /* If either argument is "", return NULL_TREE. */
11592 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11593 /* Evaluate and ignore both arguments in case either one has
11595 return omit_two_operands (size_type_node, size_zero_node,
11601 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11604 Return NULL_TREE if no simplification was possible, otherwise return the
11605 simplified form of the call as a tree.
11607 The simplified form may be a constant or other expression which
11608 computes the same value, but in a more efficient manner (including
11609 calls to other builtin functions).
11611 The call may contain arguments which need to be evaluated, but
11612 which are not useful to determine the result of the call. In
11613 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11614 COMPOUND_EXPR will be an argument which must be evaluated.
11615 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11616 COMPOUND_EXPR in the chain will contain the tree for the simplified
11617 form of the builtin function call. */
11620 fold_builtin_strcspn (tree s1, tree s2)
11622 if (!validate_arg (s1, POINTER_TYPE)
11623 || !validate_arg (s2, POINTER_TYPE))
11627 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11629 /* If both arguments are constants, evaluate at compile-time. */
11632 const size_t r = strcspn (p1, p2);
11633 return size_int (r);
11636 /* If the first argument is "", return NULL_TREE. */
11637 if (p1 && *p1 == '\0')
11639 /* Evaluate and ignore argument s2 in case it has
11641 return omit_one_operand (size_type_node,
11642 size_zero_node, s2);
11645 /* If the second argument is "", return __builtin_strlen(s1). */
11646 if (p2 && *p2 == '\0')
11648 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11650 /* If the replacement _DECL isn't initialized, don't do the
11655 return build_call_expr (fn, 1, s1);
11661 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11662 to the call. IGNORE is true if the value returned
11663 by the builtin will be ignored. UNLOCKED is true is true if this
11664 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11665 the known length of the string. Return NULL_TREE if no simplification
11669 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11671 /* If we're using an unlocked function, assume the other unlocked
11672 functions exist explicitly. */
11673 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11674 : implicit_built_in_decls[BUILT_IN_FPUTC];
11675 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11676 : implicit_built_in_decls[BUILT_IN_FWRITE];
11678 /* If the return value is used, don't do the transformation. */
11682 /* Verify the arguments in the original call. */
11683 if (!validate_arg (arg0, POINTER_TYPE)
11684 || !validate_arg (arg1, POINTER_TYPE))
11688 len = c_strlen (arg0, 0);
11690 /* Get the length of the string passed to fputs. If the length
11691 can't be determined, punt. */
11693 || TREE_CODE (len) != INTEGER_CST)
11696 switch (compare_tree_int (len, 1))
11698 case -1: /* length is 0, delete the call entirely . */
11699 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11701 case 0: /* length is 1, call fputc. */
11703 const char *p = c_getstr (arg0);
11708 return build_call_expr (fn_fputc, 2,
11709 build_int_cst (NULL_TREE, p[0]), arg1);
11715 case 1: /* length is greater than 1, call fwrite. */
11717 /* If optimizing for size keep fputs. */
11718 if (optimize_function_for_size_p (cfun))
11720 /* New argument list transforming fputs(string, stream) to
11721 fwrite(string, 1, len, stream). */
11723 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11728 gcc_unreachable ();
11733 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11734 produced. False otherwise. This is done so that we don't output the error
11735 or warning twice or three times. */
11738 fold_builtin_next_arg (tree exp, bool va_start_p)
11740 tree fntype = TREE_TYPE (current_function_decl);
11741 int nargs = call_expr_nargs (exp);
11744 if (TYPE_ARG_TYPES (fntype) == 0
11745 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11746 == void_type_node))
11748 error ("%<va_start%> used in function with fixed args");
11754 if (va_start_p && (nargs != 2))
11756 error ("wrong number of arguments to function %<va_start%>");
11759 arg = CALL_EXPR_ARG (exp, 1);
11761 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11762 when we checked the arguments and if needed issued a warning. */
11767 /* Evidently an out of date version of <stdarg.h>; can't validate
11768 va_start's second argument, but can still work as intended. */
11769 warning (0, "%<__builtin_next_arg%> called without an argument");
11772 else if (nargs > 1)
11774 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11777 arg = CALL_EXPR_ARG (exp, 0);
11780 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11781 or __builtin_next_arg (0) the first time we see it, after checking
11782 the arguments and if needed issuing a warning. */
11783 if (!integer_zerop (arg))
11785 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11787 /* Strip off all nops for the sake of the comparison. This
11788 is not quite the same as STRIP_NOPS. It does more.
11789 We must also strip off INDIRECT_EXPR for C++ reference
11791 while (CONVERT_EXPR_P (arg)
11792 || TREE_CODE (arg) == INDIRECT_REF)
11793 arg = TREE_OPERAND (arg, 0);
11794 if (arg != last_parm)
11796 /* FIXME: Sometimes with the tree optimizers we can get the
11797 not the last argument even though the user used the last
11798 argument. We just warn and set the arg to be the last
11799 argument so that we will get wrong-code because of
11801 warning (0, "second parameter of %<va_start%> not last named argument");
11804 /* Undefined by C99 7.15.1.4p4 (va_start):
11805 "If the parameter parmN is declared with the register storage
11806 class, with a function or array type, or with a type that is
11807 not compatible with the type that results after application of
11808 the default argument promotions, the behavior is undefined."
11810 else if (DECL_REGISTER (arg))
11811 warning (0, "undefined behaviour when second parameter of "
11812 "%<va_start%> is declared with %<register%> storage");
11814 /* We want to verify the second parameter just once before the tree
11815 optimizers are run and then avoid keeping it in the tree,
11816 as otherwise we could warn even for correct code like:
11817 void foo (int i, ...)
11818 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11820 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11822 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11828 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11829 ORIG may be null if this is a 2-argument call. We don't attempt to
11830 simplify calls with more than 3 arguments.
11832 Return NULL_TREE if no simplification was possible, otherwise return the
11833 simplified form of the call as a tree. If IGNORED is true, it means that
11834 the caller does not use the returned value of the function. */
11837 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11840 const char *fmt_str = NULL;
11842 /* Verify the required arguments in the original call. We deal with two
11843 types of sprintf() calls: 'sprintf (str, fmt)' and
11844 'sprintf (dest, "%s", orig)'. */
11845 if (!validate_arg (dest, POINTER_TYPE)
11846 || !validate_arg (fmt, POINTER_TYPE))
11848 if (orig && !validate_arg (orig, POINTER_TYPE))
11851 /* Check whether the format is a literal string constant. */
11852 fmt_str = c_getstr (fmt);
11853 if (fmt_str == NULL)
11857 retval = NULL_TREE;
11859 if (!init_target_chars ())
11862 /* If the format doesn't contain % args or %%, use strcpy. */
11863 if (strchr (fmt_str, target_percent) == NULL)
11865 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11870 /* Don't optimize sprintf (buf, "abc", ptr++). */
11874 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11875 'format' is known to contain no % formats. */
11876 call = build_call_expr (fn, 2, dest, fmt);
11878 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11881 /* If the format is "%s", use strcpy if the result isn't used. */
11882 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11885 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11890 /* Don't crash on sprintf (str1, "%s"). */
11894 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11897 retval = c_strlen (orig, 1);
11898 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11901 call = build_call_expr (fn, 2, dest, orig);
11904 if (call && retval)
11906 retval = fold_convert
11907 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11909 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11915 /* Expand a call EXP to __builtin_object_size. */
11918 expand_builtin_object_size (tree exp)
11921 int object_size_type;
11922 tree fndecl = get_callee_fndecl (exp);
11924 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11926 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11928 expand_builtin_trap ();
11932 ost = CALL_EXPR_ARG (exp, 1);
11935 if (TREE_CODE (ost) != INTEGER_CST
11936 || tree_int_cst_sgn (ost) < 0
11937 || compare_tree_int (ost, 3) > 0)
11939 error ("%Klast argument of %D is not integer constant between 0 and 3",
11941 expand_builtin_trap ();
11945 object_size_type = tree_low_cst (ost, 0);
11947 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11950 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11951 FCODE is the BUILT_IN_* to use.
11952 Return NULL_RTX if we failed; the caller should emit a normal call,
11953 otherwise try to get the result in TARGET, if convenient (and in
11954 mode MODE if that's convenient). */
11957 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11958 enum built_in_function fcode)
11960 tree dest, src, len, size;
11962 if (!validate_arglist (exp,
11964 fcode == BUILT_IN_MEMSET_CHK
11965 ? INTEGER_TYPE : POINTER_TYPE,
11966 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11969 dest = CALL_EXPR_ARG (exp, 0);
11970 src = CALL_EXPR_ARG (exp, 1);
11971 len = CALL_EXPR_ARG (exp, 2);
11972 size = CALL_EXPR_ARG (exp, 3);
11974 if (! host_integerp (size, 1))
11977 if (host_integerp (len, 1) || integer_all_onesp (size))
11981 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11983 warning_at (tree_nonartificial_location (exp),
11984 0, "%Kcall to %D will always overflow destination buffer",
11985 exp, get_callee_fndecl (exp));
11990 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11991 mem{cpy,pcpy,move,set} is available. */
11994 case BUILT_IN_MEMCPY_CHK:
11995 fn = built_in_decls[BUILT_IN_MEMCPY];
11997 case BUILT_IN_MEMPCPY_CHK:
11998 fn = built_in_decls[BUILT_IN_MEMPCPY];
12000 case BUILT_IN_MEMMOVE_CHK:
12001 fn = built_in_decls[BUILT_IN_MEMMOVE];
12003 case BUILT_IN_MEMSET_CHK:
12004 fn = built_in_decls[BUILT_IN_MEMSET];
12013 fn = build_call_expr (fn, 3, dest, src, len);
12014 STRIP_TYPE_NOPS (fn);
12015 while (TREE_CODE (fn) == COMPOUND_EXPR)
12017 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12019 fn = TREE_OPERAND (fn, 1);
12021 if (TREE_CODE (fn) == CALL_EXPR)
12022 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12023 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12025 else if (fcode == BUILT_IN_MEMSET_CHK)
12029 unsigned int dest_align
12030 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12032 /* If DEST is not a pointer type, call the normal function. */
12033 if (dest_align == 0)
12036 /* If SRC and DEST are the same (and not volatile), do nothing. */
12037 if (operand_equal_p (src, dest, 0))
12041 if (fcode != BUILT_IN_MEMPCPY_CHK)
12043 /* Evaluate and ignore LEN in case it has side-effects. */
12044 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12045 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12048 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12049 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12052 /* __memmove_chk special case. */
12053 if (fcode == BUILT_IN_MEMMOVE_CHK)
12055 unsigned int src_align
12056 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12058 if (src_align == 0)
12061 /* If src is categorized for a readonly section we can use
12062 normal __memcpy_chk. */
12063 if (readonly_data_expr (src))
12065 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12068 fn = build_call_expr (fn, 4, dest, src, len, size);
12069 STRIP_TYPE_NOPS (fn);
12070 while (TREE_CODE (fn) == COMPOUND_EXPR)
12072 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12074 fn = TREE_OPERAND (fn, 1);
12076 if (TREE_CODE (fn) == CALL_EXPR)
12077 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12078 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12085 /* Emit warning if a buffer overflow is detected at compile time. */
12088 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12092 location_t loc = tree_nonartificial_location (exp);
12096 case BUILT_IN_STRCPY_CHK:
12097 case BUILT_IN_STPCPY_CHK:
12098 /* For __strcat_chk the warning will be emitted only if overflowing
12099 by at least strlen (dest) + 1 bytes. */
12100 case BUILT_IN_STRCAT_CHK:
12101 len = CALL_EXPR_ARG (exp, 1);
12102 size = CALL_EXPR_ARG (exp, 2);
12105 case BUILT_IN_STRNCAT_CHK:
12106 case BUILT_IN_STRNCPY_CHK:
12107 len = CALL_EXPR_ARG (exp, 2);
12108 size = CALL_EXPR_ARG (exp, 3);
12110 case BUILT_IN_SNPRINTF_CHK:
12111 case BUILT_IN_VSNPRINTF_CHK:
12112 len = CALL_EXPR_ARG (exp, 1);
12113 size = CALL_EXPR_ARG (exp, 3);
12116 gcc_unreachable ();
12122 if (! host_integerp (size, 1) || integer_all_onesp (size))
12127 len = c_strlen (len, 1);
12128 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12131 else if (fcode == BUILT_IN_STRNCAT_CHK)
12133 tree src = CALL_EXPR_ARG (exp, 1);
12134 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12136 src = c_strlen (src, 1);
12137 if (! src || ! host_integerp (src, 1))
12139 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12140 exp, get_callee_fndecl (exp));
12143 else if (tree_int_cst_lt (src, size))
12146 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12149 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12150 exp, get_callee_fndecl (exp));
12153 /* Emit warning if a buffer overflow is detected at compile time
12154 in __sprintf_chk/__vsprintf_chk calls. */
12157 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12159 tree dest, size, len, fmt, flag;
12160 const char *fmt_str;
12161 int nargs = call_expr_nargs (exp);
12163 /* Verify the required arguments in the original call. */
12167 dest = CALL_EXPR_ARG (exp, 0);
12168 flag = CALL_EXPR_ARG (exp, 1);
12169 size = CALL_EXPR_ARG (exp, 2);
12170 fmt = CALL_EXPR_ARG (exp, 3);
12172 if (! host_integerp (size, 1) || integer_all_onesp (size))
12175 /* Check whether the format is a literal string constant. */
12176 fmt_str = c_getstr (fmt);
12177 if (fmt_str == NULL)
12180 if (!init_target_chars ())
12183 /* If the format doesn't contain % args or %%, we know its size. */
12184 if (strchr (fmt_str, target_percent) == 0)
12185 len = build_int_cstu (size_type_node, strlen (fmt_str));
12186 /* If the format is "%s" and first ... argument is a string literal,
12188 else if (fcode == BUILT_IN_SPRINTF_CHK
12189 && strcmp (fmt_str, target_percent_s) == 0)
12195 arg = CALL_EXPR_ARG (exp, 4);
12196 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12199 len = c_strlen (arg, 1);
12200 if (!len || ! host_integerp (len, 1))
12206 if (! tree_int_cst_lt (len, size))
12207 warning_at (tree_nonartificial_location (exp),
12208 0, "%Kcall to %D will always overflow destination buffer",
12209 exp, get_callee_fndecl (exp));
12212 /* Emit warning if a free is called with address of a variable. */
12215 maybe_emit_free_warning (tree exp)
12217 tree arg = CALL_EXPR_ARG (exp, 0);
12220 if (TREE_CODE (arg) != ADDR_EXPR)
12223 arg = get_base_address (TREE_OPERAND (arg, 0));
12224 if (arg == NULL || INDIRECT_REF_P (arg))
12227 if (SSA_VAR_P (arg))
12228 warning_at (tree_nonartificial_location (exp),
12229 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12231 warning_at (tree_nonartificial_location (exp),
12232 0, "%Kattempt to free a non-heap object", exp);
12235 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12239 fold_builtin_object_size (tree ptr, tree ost)
12241 tree ret = NULL_TREE;
12242 int object_size_type;
12244 if (!validate_arg (ptr, POINTER_TYPE)
12245 || !validate_arg (ost, INTEGER_TYPE))
12250 if (TREE_CODE (ost) != INTEGER_CST
12251 || tree_int_cst_sgn (ost) < 0
12252 || compare_tree_int (ost, 3) > 0)
12255 object_size_type = tree_low_cst (ost, 0);
12257 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12258 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12259 and (size_t) 0 for types 2 and 3. */
12260 if (TREE_SIDE_EFFECTS (ptr))
12261 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12263 if (TREE_CODE (ptr) == ADDR_EXPR)
12264 ret = build_int_cstu (size_type_node,
12265 compute_builtin_object_size (ptr, object_size_type));
12267 else if (TREE_CODE (ptr) == SSA_NAME)
12269 unsigned HOST_WIDE_INT bytes;
12271 /* If object size is not known yet, delay folding until
12272 later. Maybe subsequent passes will help determining
12274 bytes = compute_builtin_object_size (ptr, object_size_type);
12275 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12277 ret = build_int_cstu (size_type_node, bytes);
12282 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12283 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12284 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12291 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12292 DEST, SRC, LEN, and SIZE are the arguments to the call.
12293 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12294 code of the builtin. If MAXLEN is not NULL, it is maximum length
12295 passed as third argument. */
12298 fold_builtin_memory_chk (tree fndecl,
12299 tree dest, tree src, tree len, tree size,
12300 tree maxlen, bool ignore,
12301 enum built_in_function fcode)
12305 if (!validate_arg (dest, POINTER_TYPE)
12306 || !validate_arg (src,
12307 (fcode == BUILT_IN_MEMSET_CHK
12308 ? INTEGER_TYPE : POINTER_TYPE))
12309 || !validate_arg (len, INTEGER_TYPE)
12310 || !validate_arg (size, INTEGER_TYPE))
12313 /* If SRC and DEST are the same (and not volatile), return DEST
12314 (resp. DEST+LEN for __mempcpy_chk). */
12315 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12317 if (fcode != BUILT_IN_MEMPCPY_CHK)
12318 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12321 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12322 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12326 if (! host_integerp (size, 1))
12329 if (! integer_all_onesp (size))
12331 if (! host_integerp (len, 1))
12333 /* If LEN is not constant, try MAXLEN too.
12334 For MAXLEN only allow optimizing into non-_ocs function
12335 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12336 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12338 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12340 /* (void) __mempcpy_chk () can be optimized into
12341 (void) __memcpy_chk (). */
12342 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12346 return build_call_expr (fn, 4, dest, src, len, size);
12354 if (tree_int_cst_lt (size, maxlen))
12359 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12360 mem{cpy,pcpy,move,set} is available. */
12363 case BUILT_IN_MEMCPY_CHK:
12364 fn = built_in_decls[BUILT_IN_MEMCPY];
12366 case BUILT_IN_MEMPCPY_CHK:
12367 fn = built_in_decls[BUILT_IN_MEMPCPY];
12369 case BUILT_IN_MEMMOVE_CHK:
12370 fn = built_in_decls[BUILT_IN_MEMMOVE];
12372 case BUILT_IN_MEMSET_CHK:
12373 fn = built_in_decls[BUILT_IN_MEMSET];
12382 return build_call_expr (fn, 3, dest, src, len);
12385 /* Fold a call to the __st[rp]cpy_chk builtin.
12386 DEST, SRC, and SIZE are the arguments to the call.
12387 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12388 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12389 strings passed as second argument. */
12392 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12393 tree maxlen, bool ignore,
12394 enum built_in_function fcode)
12398 if (!validate_arg (dest, POINTER_TYPE)
12399 || !validate_arg (src, POINTER_TYPE)
12400 || !validate_arg (size, INTEGER_TYPE))
12403 /* If SRC and DEST are the same (and not volatile), return DEST. */
12404 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12405 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12407 if (! host_integerp (size, 1))
12410 if (! integer_all_onesp (size))
12412 len = c_strlen (src, 1);
12413 if (! len || ! host_integerp (len, 1))
12415 /* If LEN is not constant, try MAXLEN too.
12416 For MAXLEN only allow optimizing into non-_ocs function
12417 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12418 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12420 if (fcode == BUILT_IN_STPCPY_CHK)
12425 /* If return value of __stpcpy_chk is ignored,
12426 optimize into __strcpy_chk. */
12427 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12431 return build_call_expr (fn, 3, dest, src, size);
12434 if (! len || TREE_SIDE_EFFECTS (len))
12437 /* If c_strlen returned something, but not a constant,
12438 transform __strcpy_chk into __memcpy_chk. */
12439 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12443 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12444 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12445 build_call_expr (fn, 4,
12446 dest, src, len, size));
12452 if (! tree_int_cst_lt (maxlen, size))
12456 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12457 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12458 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12462 return build_call_expr (fn, 2, dest, src);
12465 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12466 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12467 length passed as third argument. */
12470 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12475 if (!validate_arg (dest, POINTER_TYPE)
12476 || !validate_arg (src, POINTER_TYPE)
12477 || !validate_arg (len, INTEGER_TYPE)
12478 || !validate_arg (size, INTEGER_TYPE))
12481 if (! host_integerp (size, 1))
12484 if (! integer_all_onesp (size))
12486 if (! host_integerp (len, 1))
12488 /* If LEN is not constant, try MAXLEN too.
12489 For MAXLEN only allow optimizing into non-_ocs function
12490 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12491 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12497 if (tree_int_cst_lt (size, maxlen))
12501 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12502 fn = built_in_decls[BUILT_IN_STRNCPY];
12506 return build_call_expr (fn, 3, dest, src, len);
12509 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12510 are the arguments to the call. */
12513 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12518 if (!validate_arg (dest, POINTER_TYPE)
12519 || !validate_arg (src, POINTER_TYPE)
12520 || !validate_arg (size, INTEGER_TYPE))
12523 p = c_getstr (src);
12524 /* If the SRC parameter is "", return DEST. */
12525 if (p && *p == '\0')
12526 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12528 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12531 /* If __builtin_strcat_chk is used, assume strcat is available. */
12532 fn = built_in_decls[BUILT_IN_STRCAT];
12536 return build_call_expr (fn, 2, dest, src);
12539 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12543 fold_builtin_strncat_chk (tree fndecl,
12544 tree dest, tree src, tree len, tree size)
12549 if (!validate_arg (dest, POINTER_TYPE)
12550 || !validate_arg (src, POINTER_TYPE)
12551 || !validate_arg (size, INTEGER_TYPE)
12552 || !validate_arg (size, INTEGER_TYPE))
12555 p = c_getstr (src);
12556 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12557 if (p && *p == '\0')
12558 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12559 else if (integer_zerop (len))
12560 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12562 if (! host_integerp (size, 1))
12565 if (! integer_all_onesp (size))
12567 tree src_len = c_strlen (src, 1);
12569 && host_integerp (src_len, 1)
12570 && host_integerp (len, 1)
12571 && ! tree_int_cst_lt (len, src_len))
12573 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12574 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12578 return build_call_expr (fn, 3, dest, src, size);
12583 /* If __builtin_strncat_chk is used, assume strncat is available. */
12584 fn = built_in_decls[BUILT_IN_STRNCAT];
12588 return build_call_expr (fn, 3, dest, src, len);
12591 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12592 a normal call should be emitted rather than expanding the function
12593 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12596 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12598 tree dest, size, len, fn, fmt, flag;
12599 const char *fmt_str;
12600 int nargs = call_expr_nargs (exp);
12602 /* Verify the required arguments in the original call. */
12605 dest = CALL_EXPR_ARG (exp, 0);
12606 if (!validate_arg (dest, POINTER_TYPE))
12608 flag = CALL_EXPR_ARG (exp, 1);
12609 if (!validate_arg (flag, INTEGER_TYPE))
12611 size = CALL_EXPR_ARG (exp, 2);
12612 if (!validate_arg (size, INTEGER_TYPE))
12614 fmt = CALL_EXPR_ARG (exp, 3);
12615 if (!validate_arg (fmt, POINTER_TYPE))
12618 if (! host_integerp (size, 1))
12623 if (!init_target_chars ())
12626 /* Check whether the format is a literal string constant. */
12627 fmt_str = c_getstr (fmt);
12628 if (fmt_str != NULL)
12630 /* If the format doesn't contain % args or %%, we know the size. */
12631 if (strchr (fmt_str, target_percent) == 0)
12633 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12634 len = build_int_cstu (size_type_node, strlen (fmt_str));
12636 /* If the format is "%s" and first ... argument is a string literal,
12637 we know the size too. */
12638 else if (fcode == BUILT_IN_SPRINTF_CHK
12639 && strcmp (fmt_str, target_percent_s) == 0)
12645 arg = CALL_EXPR_ARG (exp, 4);
12646 if (validate_arg (arg, POINTER_TYPE))
12648 len = c_strlen (arg, 1);
12649 if (! len || ! host_integerp (len, 1))
12656 if (! integer_all_onesp (size))
12658 if (! len || ! tree_int_cst_lt (len, size))
12662 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12663 or if format doesn't contain % chars or is "%s". */
12664 if (! integer_zerop (flag))
12666 if (fmt_str == NULL)
12668 if (strchr (fmt_str, target_percent) != NULL
12669 && strcmp (fmt_str, target_percent_s))
12673 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12674 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12675 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12679 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12682 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12683 a normal call should be emitted rather than expanding the function
12684 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12685 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12686 passed as second argument. */
12689 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12690 enum built_in_function fcode)
12692 tree dest, size, len, fn, fmt, flag;
12693 const char *fmt_str;
12695 /* Verify the required arguments in the original call. */
12696 if (call_expr_nargs (exp) < 5)
12698 dest = CALL_EXPR_ARG (exp, 0);
12699 if (!validate_arg (dest, POINTER_TYPE))
12701 len = CALL_EXPR_ARG (exp, 1);
12702 if (!validate_arg (len, INTEGER_TYPE))
12704 flag = CALL_EXPR_ARG (exp, 2);
12705 if (!validate_arg (flag, INTEGER_TYPE))
12707 size = CALL_EXPR_ARG (exp, 3);
12708 if (!validate_arg (size, INTEGER_TYPE))
12710 fmt = CALL_EXPR_ARG (exp, 4);
12711 if (!validate_arg (fmt, POINTER_TYPE))
12714 if (! host_integerp (size, 1))
12717 if (! integer_all_onesp (size))
12719 if (! host_integerp (len, 1))
12721 /* If LEN is not constant, try MAXLEN too.
12722 For MAXLEN only allow optimizing into non-_ocs function
12723 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12724 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12730 if (tree_int_cst_lt (size, maxlen))
12734 if (!init_target_chars ())
12737 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12738 or if format doesn't contain % chars or is "%s". */
12739 if (! integer_zerop (flag))
12741 fmt_str = c_getstr (fmt);
12742 if (fmt_str == NULL)
12744 if (strchr (fmt_str, target_percent) != NULL
12745 && strcmp (fmt_str, target_percent_s))
12749 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12751 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12752 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12756 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12759 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12760 FMT and ARG are the arguments to the call; we don't fold cases with
12761 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12763 Return NULL_TREE if no simplification was possible, otherwise return the
12764 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12765 code of the function to be simplified. */
12768 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12769 enum built_in_function fcode)
12771 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12772 const char *fmt_str = NULL;
12774 /* If the return value is used, don't do the transformation. */
12778 /* Verify the required arguments in the original call. */
12779 if (!validate_arg (fmt, POINTER_TYPE))
12782 /* Check whether the format is a literal string constant. */
12783 fmt_str = c_getstr (fmt);
12784 if (fmt_str == NULL)
12787 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12789 /* If we're using an unlocked function, assume the other
12790 unlocked functions exist explicitly. */
12791 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12792 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12796 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12797 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12800 if (!init_target_chars ())
12803 if (strcmp (fmt_str, target_percent_s) == 0
12804 || strchr (fmt_str, target_percent) == NULL)
12808 if (strcmp (fmt_str, target_percent_s) == 0)
12810 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12813 if (!arg || !validate_arg (arg, POINTER_TYPE))
12816 str = c_getstr (arg);
12822 /* The format specifier doesn't contain any '%' characters. */
12823 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12829 /* If the string was "", printf does nothing. */
12830 if (str[0] == '\0')
12831 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12833 /* If the string has length of 1, call putchar. */
12834 if (str[1] == '\0')
12836 /* Given printf("c"), (where c is any one character,)
12837 convert "c"[0] to an int and pass that to the replacement
12839 newarg = build_int_cst (NULL_TREE, str[0]);
12841 call = build_call_expr (fn_putchar, 1, newarg);
12845 /* If the string was "string\n", call puts("string"). */
12846 size_t len = strlen (str);
12847 if ((unsigned char)str[len - 1] == target_newline)
12849 /* Create a NUL-terminated string that's one char shorter
12850 than the original, stripping off the trailing '\n'. */
12851 char *newstr = XALLOCAVEC (char, len);
12852 memcpy (newstr, str, len - 1);
12853 newstr[len - 1] = 0;
12855 newarg = build_string_literal (len, newstr);
12857 call = build_call_expr (fn_puts, 1, newarg);
12860 /* We'd like to arrange to call fputs(string,stdout) here,
12861 but we need stdout and don't have a way to get it yet. */
12866 /* The other optimizations can be done only on the non-va_list variants. */
12867 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12870 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12871 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12873 if (!arg || !validate_arg (arg, POINTER_TYPE))
12876 call = build_call_expr (fn_puts, 1, arg);
12879 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12880 else if (strcmp (fmt_str, target_percent_c) == 0)
12882 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12885 call = build_call_expr (fn_putchar, 1, arg);
12891 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12894 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12895 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12896 more than 3 arguments, and ARG may be null in the 2-argument case.
12898 Return NULL_TREE if no simplification was possible, otherwise return the
12899 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12900 code of the function to be simplified. */
12903 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12904 enum built_in_function fcode)
12906 tree fn_fputc, fn_fputs, call = NULL_TREE;
12907 const char *fmt_str = NULL;
12909 /* If the return value is used, don't do the transformation. */
12913 /* Verify the required arguments in the original call. */
12914 if (!validate_arg (fp, POINTER_TYPE))
12916 if (!validate_arg (fmt, POINTER_TYPE))
12919 /* Check whether the format is a literal string constant. */
12920 fmt_str = c_getstr (fmt);
12921 if (fmt_str == NULL)
12924 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12926 /* If we're using an unlocked function, assume the other
12927 unlocked functions exist explicitly. */
12928 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12929 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12933 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12934 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12937 if (!init_target_chars ())
12940 /* If the format doesn't contain % args or %%, use strcpy. */
12941 if (strchr (fmt_str, target_percent) == NULL)
12943 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12947 /* If the format specifier was "", fprintf does nothing. */
12948 if (fmt_str[0] == '\0')
12950 /* If FP has side-effects, just wait until gimplification is
12952 if (TREE_SIDE_EFFECTS (fp))
12955 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12958 /* When "string" doesn't contain %, replace all cases of
12959 fprintf (fp, string) with fputs (string, fp). The fputs
12960 builtin will take care of special cases like length == 1. */
12962 call = build_call_expr (fn_fputs, 2, fmt, fp);
12965 /* The other optimizations can be done only on the non-va_list variants. */
12966 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12969 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12970 else if (strcmp (fmt_str, target_percent_s) == 0)
12972 if (!arg || !validate_arg (arg, POINTER_TYPE))
12975 call = build_call_expr (fn_fputs, 2, arg, fp);
12978 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12979 else if (strcmp (fmt_str, target_percent_c) == 0)
12981 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12984 call = build_call_expr (fn_fputc, 2, arg, fp);
12989 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12992 /* Initialize format string characters in the target charset. */
12995 init_target_chars (void)
13000 target_newline = lang_hooks.to_target_charset ('\n');
13001 target_percent = lang_hooks.to_target_charset ('%');
13002 target_c = lang_hooks.to_target_charset ('c');
13003 target_s = lang_hooks.to_target_charset ('s');
13004 if (target_newline == 0 || target_percent == 0 || target_c == 0
13008 target_percent_c[0] = target_percent;
13009 target_percent_c[1] = target_c;
13010 target_percent_c[2] = '\0';
13012 target_percent_s[0] = target_percent;
13013 target_percent_s[1] = target_s;
13014 target_percent_s[2] = '\0';
13016 target_percent_s_newline[0] = target_percent;
13017 target_percent_s_newline[1] = target_s;
13018 target_percent_s_newline[2] = target_newline;
13019 target_percent_s_newline[3] = '\0';
13026 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13027 and no overflow/underflow occurred. INEXACT is true if M was not
13028 exactly calculated. TYPE is the tree type for the result. This
13029 function assumes that you cleared the MPFR flags and then
13030 calculated M to see if anything subsequently set a flag prior to
13031 entering this function. Return NULL_TREE if any checks fail. */
13034 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13036 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13037 overflow/underflow occurred. If -frounding-math, proceed iff the
13038 result of calling FUNC was exact. */
13039 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13040 && (!flag_rounding_math || !inexact))
13042 REAL_VALUE_TYPE rr;
13044 real_from_mpfr (&rr, m, type, GMP_RNDN);
13045 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13046 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13047 but the mpft_t is not, then we underflowed in the
13049 if (real_isfinite (&rr)
13050 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13052 REAL_VALUE_TYPE rmode;
13054 real_convert (&rmode, TYPE_MODE (type), &rr);
13055 /* Proceed iff the specified mode can hold the value. */
13056 if (real_identical (&rmode, &rr))
13057 return build_real (type, rmode);
13063 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13064 FUNC on it and return the resulting value as a tree with type TYPE.
13065 If MIN and/or MAX are not NULL, then the supplied ARG must be
13066 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13067 acceptable values, otherwise they are not. The mpfr precision is
13068 set to the precision of TYPE. We assume that function FUNC returns
13069 zero if the result could be calculated exactly within the requested
13073 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13074 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13077 tree result = NULL_TREE;
13081 /* To proceed, MPFR must exactly represent the target floating point
13082 format, which only happens when the target base equals two. */
13083 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13084 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13086 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13088 if (real_isfinite (ra)
13089 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13090 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13092 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13093 const int prec = fmt->p;
13094 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13098 mpfr_init2 (m, prec);
13099 mpfr_from_real (m, ra, GMP_RNDN);
13100 mpfr_clear_flags ();
13101 inexact = func (m, m, rnd);
13102 result = do_mpfr_ckconv (m, type, inexact);
13110 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13111 FUNC on it and return the resulting value as a tree with type TYPE.
13112 The mpfr precision is set to the precision of TYPE. We assume that
13113 function FUNC returns zero if the result could be calculated
13114 exactly within the requested precision. */
13117 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13118 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13120 tree result = NULL_TREE;
13125 /* To proceed, MPFR must exactly represent the target floating point
13126 format, which only happens when the target base equals two. */
13127 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13128 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13129 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13131 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13132 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13134 if (real_isfinite (ra1) && real_isfinite (ra2))
13136 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13137 const int prec = fmt->p;
13138 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13142 mpfr_inits2 (prec, m1, m2, NULL);
13143 mpfr_from_real (m1, ra1, GMP_RNDN);
13144 mpfr_from_real (m2, ra2, GMP_RNDN);
13145 mpfr_clear_flags ();
13146 inexact = func (m1, m1, m2, rnd);
13147 result = do_mpfr_ckconv (m1, type, inexact);
13148 mpfr_clears (m1, m2, NULL);
13155 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13156 FUNC on it and return the resulting value as a tree with type TYPE.
13157 The mpfr precision is set to the precision of TYPE. We assume that
13158 function FUNC returns zero if the result could be calculated
13159 exactly within the requested precision. */
13162 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13163 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13165 tree result = NULL_TREE;
13171 /* To proceed, MPFR must exactly represent the target floating point
13172 format, which only happens when the target base equals two. */
13173 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13174 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13175 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13176 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13178 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13179 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13180 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13182 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13184 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13185 const int prec = fmt->p;
13186 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13190 mpfr_inits2 (prec, m1, m2, m3, NULL);
13191 mpfr_from_real (m1, ra1, GMP_RNDN);
13192 mpfr_from_real (m2, ra2, GMP_RNDN);
13193 mpfr_from_real (m3, ra3, GMP_RNDN);
13194 mpfr_clear_flags ();
13195 inexact = func (m1, m1, m2, m3, rnd);
13196 result = do_mpfr_ckconv (m1, type, inexact);
13197 mpfr_clears (m1, m2, m3, NULL);
13204 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13205 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13206 If ARG_SINP and ARG_COSP are NULL then the result is returned
13207 as a complex value.
13208 The type is taken from the type of ARG and is used for setting the
13209 precision of the calculation and results. */
13212 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13214 tree const type = TREE_TYPE (arg);
13215 tree result = NULL_TREE;
13219 /* To proceed, MPFR must exactly represent the target floating point
13220 format, which only happens when the target base equals two. */
13221 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13222 && TREE_CODE (arg) == REAL_CST
13223 && !TREE_OVERFLOW (arg))
13225 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13227 if (real_isfinite (ra))
13229 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13230 const int prec = fmt->p;
13231 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13232 tree result_s, result_c;
13236 mpfr_inits2 (prec, m, ms, mc, NULL);
13237 mpfr_from_real (m, ra, GMP_RNDN);
13238 mpfr_clear_flags ();
13239 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13240 result_s = do_mpfr_ckconv (ms, type, inexact);
13241 result_c = do_mpfr_ckconv (mc, type, inexact);
13242 mpfr_clears (m, ms, mc, NULL);
13243 if (result_s && result_c)
13245 /* If we are to return in a complex value do so. */
13246 if (!arg_sinp && !arg_cosp)
13247 return build_complex (build_complex_type (type),
13248 result_c, result_s);
13250 /* Dereference the sin/cos pointer arguments. */
13251 arg_sinp = build_fold_indirect_ref (arg_sinp);
13252 arg_cosp = build_fold_indirect_ref (arg_cosp);
13253 /* Proceed if valid pointer type were passed in. */
13254 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13255 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13257 /* Set the values. */
13258 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13260 TREE_SIDE_EFFECTS (result_s) = 1;
13261 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13263 TREE_SIDE_EFFECTS (result_c) = 1;
13264 /* Combine the assignments into a compound expr. */
13265 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13266 result_s, result_c));
13274 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13275 two-argument mpfr order N Bessel function FUNC on them and return
13276 the resulting value as a tree with type TYPE. The mpfr precision
13277 is set to the precision of TYPE. We assume that function FUNC
13278 returns zero if the result could be calculated exactly within the
13279 requested precision. */
13281 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13282 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13283 const REAL_VALUE_TYPE *min, bool inclusive)
13285 tree result = NULL_TREE;
13290 /* To proceed, MPFR must exactly represent the target floating point
13291 format, which only happens when the target base equals two. */
13292 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13293 && host_integerp (arg1, 0)
13294 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13296 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13297 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13300 && real_isfinite (ra)
13301 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13303 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13304 const int prec = fmt->p;
13305 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13309 mpfr_init2 (m, prec);
13310 mpfr_from_real (m, ra, GMP_RNDN);
13311 mpfr_clear_flags ();
13312 inexact = func (m, n, m, rnd);
13313 result = do_mpfr_ckconv (m, type, inexact);
13321 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13322 the pointer *(ARG_QUO) and return the result. The type is taken
13323 from the type of ARG0 and is used for setting the precision of the
13324 calculation and results. */
13327 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13329 tree const type = TREE_TYPE (arg0);
13330 tree result = NULL_TREE;
13335 /* To proceed, MPFR must exactly represent the target floating point
13336 format, which only happens when the target base equals two. */
13337 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13338 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13339 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13341 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13342 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13344 if (real_isfinite (ra0) && real_isfinite (ra1))
13346 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13347 const int prec = fmt->p;
13348 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13353 mpfr_inits2 (prec, m0, m1, NULL);
13354 mpfr_from_real (m0, ra0, GMP_RNDN);
13355 mpfr_from_real (m1, ra1, GMP_RNDN);
13356 mpfr_clear_flags ();
13357 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13358 /* Remquo is independent of the rounding mode, so pass
13359 inexact=0 to do_mpfr_ckconv(). */
13360 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13361 mpfr_clears (m0, m1, NULL);
13364 /* MPFR calculates quo in the host's long so it may
13365 return more bits in quo than the target int can hold
13366 if sizeof(host long) > sizeof(target int). This can
13367 happen even for native compilers in LP64 mode. In
13368 these cases, modulo the quo value with the largest
13369 number that the target int can hold while leaving one
13370 bit for the sign. */
13371 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13372 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13374 /* Dereference the quo pointer argument. */
13375 arg_quo = build_fold_indirect_ref (arg_quo);
13376 /* Proceed iff a valid pointer type was passed in. */
13377 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13379 /* Set the value. */
13380 tree result_quo = fold_build2 (MODIFY_EXPR,
13381 TREE_TYPE (arg_quo), arg_quo,
13382 build_int_cst (NULL, integer_quo));
13383 TREE_SIDE_EFFECTS (result_quo) = 1;
13384 /* Combine the quo assignment with the rem. */
13385 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13386 result_quo, result_rem));
13394 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13395 resulting value as a tree with type TYPE. The mpfr precision is
13396 set to the precision of TYPE. We assume that this mpfr function
13397 returns zero if the result could be calculated exactly within the
13398 requested precision. In addition, the integer pointer represented
13399 by ARG_SG will be dereferenced and set to the appropriate signgam
13403 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13405 tree result = NULL_TREE;
13409 /* To proceed, MPFR must exactly represent the target floating point
13410 format, which only happens when the target base equals two. Also
13411 verify ARG is a constant and that ARG_SG is an int pointer. */
13412 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13413 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13414 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13415 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13417 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13419 /* In addition to NaN and Inf, the argument cannot be zero or a
13420 negative integer. */
13421 if (real_isfinite (ra)
13422 && ra->cl != rvc_zero
13423 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13425 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13426 const int prec = fmt->p;
13427 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13432 mpfr_init2 (m, prec);
13433 mpfr_from_real (m, ra, GMP_RNDN);
13434 mpfr_clear_flags ();
13435 inexact = mpfr_lgamma (m, &sg, m, rnd);
13436 result_lg = do_mpfr_ckconv (m, type, inexact);
13442 /* Dereference the arg_sg pointer argument. */
13443 arg_sg = build_fold_indirect_ref (arg_sg);
13444 /* Assign the signgam value into *arg_sg. */
13445 result_sg = fold_build2 (MODIFY_EXPR,
13446 TREE_TYPE (arg_sg), arg_sg,
13447 build_int_cst (NULL, sg));
13448 TREE_SIDE_EFFECTS (result_sg) = 1;
13449 /* Combine the signgam assignment with the lgamma result. */
13450 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13451 result_sg, result_lg));
13460 The functions below provide an alternate interface for folding
13461 builtin function calls presented as GIMPLE_CALL statements rather
13462 than as CALL_EXPRs. The folded result is still expressed as a
13463 tree. There is too much code duplication in the handling of
13464 varargs functions, and a more intrusive re-factoring would permit
13465 better sharing of code between the tree and statement-based
13466 versions of these functions. */
13468 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13469 along with N new arguments specified as the "..." parameters. SKIP
13470 is the number of arguments in STMT to be omitted. This function is used
13471 to do varargs-to-varargs transformations. */
13474 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13476 int oldnargs = gimple_call_num_args (stmt);
13477 int nargs = oldnargs - skip + n;
13478 tree fntype = TREE_TYPE (fndecl);
13479 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13484 buffer = XALLOCAVEC (tree, nargs);
13486 for (i = 0; i < n; i++)
13487 buffer[i] = va_arg (ap, tree);
13489 for (j = skip; j < oldnargs; j++, i++)
13490 buffer[i] = gimple_call_arg (stmt, j);
13492 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13495 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13496 a normal call should be emitted rather than expanding the function
13497 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13500 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13502 tree dest, size, len, fn, fmt, flag;
13503 const char *fmt_str;
13504 int nargs = gimple_call_num_args (stmt);
13506 /* Verify the required arguments in the original call. */
13509 dest = gimple_call_arg (stmt, 0);
13510 if (!validate_arg (dest, POINTER_TYPE))
13512 flag = gimple_call_arg (stmt, 1);
13513 if (!validate_arg (flag, INTEGER_TYPE))
13515 size = gimple_call_arg (stmt, 2);
13516 if (!validate_arg (size, INTEGER_TYPE))
13518 fmt = gimple_call_arg (stmt, 3);
13519 if (!validate_arg (fmt, POINTER_TYPE))
13522 if (! host_integerp (size, 1))
13527 if (!init_target_chars ())
13530 /* Check whether the format is a literal string constant. */
13531 fmt_str = c_getstr (fmt);
13532 if (fmt_str != NULL)
13534 /* If the format doesn't contain % args or %%, we know the size. */
13535 if (strchr (fmt_str, target_percent) == 0)
13537 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13538 len = build_int_cstu (size_type_node, strlen (fmt_str));
13540 /* If the format is "%s" and first ... argument is a string literal,
13541 we know the size too. */
13542 else if (fcode == BUILT_IN_SPRINTF_CHK
13543 && strcmp (fmt_str, target_percent_s) == 0)
13549 arg = gimple_call_arg (stmt, 4);
13550 if (validate_arg (arg, POINTER_TYPE))
13552 len = c_strlen (arg, 1);
13553 if (! len || ! host_integerp (len, 1))
13560 if (! integer_all_onesp (size))
13562 if (! len || ! tree_int_cst_lt (len, size))
13566 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13567 or if format doesn't contain % chars or is "%s". */
13568 if (! integer_zerop (flag))
13570 if (fmt_str == NULL)
13572 if (strchr (fmt_str, target_percent) != NULL
13573 && strcmp (fmt_str, target_percent_s))
13577 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13578 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13579 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13583 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13586 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13587 a normal call should be emitted rather than expanding the function
13588 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13589 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13590 passed as second argument. */
13593 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13594 enum built_in_function fcode)
13596 tree dest, size, len, fn, fmt, flag;
13597 const char *fmt_str;
13599 /* Verify the required arguments in the original call. */
13600 if (gimple_call_num_args (stmt) < 5)
13602 dest = gimple_call_arg (stmt, 0);
13603 if (!validate_arg (dest, POINTER_TYPE))
13605 len = gimple_call_arg (stmt, 1);
13606 if (!validate_arg (len, INTEGER_TYPE))
13608 flag = gimple_call_arg (stmt, 2);
13609 if (!validate_arg (flag, INTEGER_TYPE))
13611 size = gimple_call_arg (stmt, 3);
13612 if (!validate_arg (size, INTEGER_TYPE))
13614 fmt = gimple_call_arg (stmt, 4);
13615 if (!validate_arg (fmt, POINTER_TYPE))
13618 if (! host_integerp (size, 1))
13621 if (! integer_all_onesp (size))
13623 if (! host_integerp (len, 1))
13625 /* If LEN is not constant, try MAXLEN too.
13626 For MAXLEN only allow optimizing into non-_ocs function
13627 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13628 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13634 if (tree_int_cst_lt (size, maxlen))
13638 if (!init_target_chars ())
13641 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13642 or if format doesn't contain % chars or is "%s". */
13643 if (! integer_zerop (flag))
13645 fmt_str = c_getstr (fmt);
13646 if (fmt_str == NULL)
13648 if (strchr (fmt_str, target_percent) != NULL
13649 && strcmp (fmt_str, target_percent_s))
13653 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13655 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13656 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13660 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13663 /* Builtins with folding operations that operate on "..." arguments
13664 need special handling; we need to store the arguments in a convenient
13665 data structure before attempting any folding. Fortunately there are
13666 only a few builtins that fall into this category. FNDECL is the
13667 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13668 result of the function call is ignored. */
13671 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13673 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13674 tree ret = NULL_TREE;
13678 case BUILT_IN_SPRINTF_CHK:
13679 case BUILT_IN_VSPRINTF_CHK:
13680 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13683 case BUILT_IN_SNPRINTF_CHK:
13684 case BUILT_IN_VSNPRINTF_CHK:
13685 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13692 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13693 TREE_NO_WARNING (ret) = 1;
13699 /* A wrapper function for builtin folding that prevents warnings for
13700 "statement without effect" and the like, caused by removing the
13701 call node earlier than the warning is generated. */
13704 fold_call_stmt (gimple stmt, bool ignore)
13706 tree ret = NULL_TREE;
13707 tree fndecl = gimple_call_fndecl (stmt);
13709 && TREE_CODE (fndecl) == FUNCTION_DECL
13710 && DECL_BUILT_IN (fndecl)
13711 && !gimple_call_va_arg_pack_p (stmt))
13713 int nargs = gimple_call_num_args (stmt);
13715 if (avoid_folding_inline_builtin (fndecl))
13717 /* FIXME: Don't use a list in this interface. */
13718 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13720 tree arglist = NULL_TREE;
13722 for (i = nargs - 1; i >= 0; i--)
13723 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13724 return targetm.fold_builtin (fndecl, arglist, ignore);
13728 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13730 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13732 for (i = 0; i < nargs; i++)
13733 args[i] = gimple_call_arg (stmt, i);
13734 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13737 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13740 /* Propagate location information from original call to
13741 expansion of builtin. Otherwise things like
13742 maybe_emit_chk_warning, that operate on the expansion
13743 of a builtin, will use the wrong location information. */
13744 if (gimple_has_location (stmt))
13746 tree realret = ret;
13747 if (TREE_CODE (ret) == NOP_EXPR)
13748 realret = TREE_OPERAND (ret, 0);
13749 if (CAN_HAVE_LOCATION_P (realret)
13750 && !EXPR_HAS_LOCATION (realret))
13751 SET_EXPR_LOCATION (realret, gimple_location (stmt));