Merge branch 'vendor/GCC47'
[dragonfly.git] / contrib / gcc-4.7 / gcc / builtins.c
CommitLineData
e4b17023
JM
1/* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "machmode.h"
27#include "rtl.h"
28#include "tree.h"
29#include "realmpfr.h"
30#include "gimple.h"
31#include "flags.h"
32#include "regs.h"
33#include "hard-reg-set.h"
34#include "except.h"
35#include "function.h"
36#include "insn-config.h"
37#include "expr.h"
38#include "optabs.h"
39#include "libfuncs.h"
40#include "recog.h"
41#include "output.h"
42#include "typeclass.h"
43#include "predict.h"
44#include "tm_p.h"
45#include "target.h"
46#include "langhooks.h"
47#include "basic-block.h"
48#include "tree-mudflap.h"
49#include "tree-flow.h"
50#include "value-prof.h"
51#include "diagnostic-core.h"
52#include "builtins.h"
53
54
55#ifndef PAD_VARARGS_DOWN
56#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57#endif
58static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
59
60struct target_builtins default_target_builtins;
61#if SWITCHABLE_TARGET
62struct target_builtins *this_target_builtins = &default_target_builtins;
63#endif
64
65/* Define the names of the builtin function types and codes. */
66const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
68
69#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70const char * built_in_names[(int) END_BUILTINS] =
71{
72#include "builtins.def"
73};
74#undef DEF_BUILTIN
75
76/* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78builtin_info_type builtin_info;
79
80static const char *c_getstr (tree);
81static rtx c_readstr (const char *, enum machine_mode);
82static int target_char_cast (tree, char *);
83static rtx get_memory_rtx (tree, tree);
84static int apply_args_size (void);
85static int apply_result_size (void);
86#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87static rtx result_vector (int, rtx);
88#endif
89static void expand_builtin_update_setjmp_buf (rtx);
90static void expand_builtin_prefetch (tree);
91static rtx expand_builtin_apply_args (void);
92static rtx expand_builtin_apply_args_1 (void);
93static rtx expand_builtin_apply (rtx, rtx, rtx);
94static void expand_builtin_return (rtx);
95static enum type_class type_to_class (tree);
96static rtx expand_builtin_classify_type (tree);
97static void expand_errno_check (tree, rtx);
98static rtx expand_builtin_mathfn (tree, rtx, rtx);
99static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102static rtx expand_builtin_interclass_mathfn (tree, rtx);
103static rtx expand_builtin_sincos (tree);
104static rtx expand_builtin_cexpi (tree, rtx);
105static rtx expand_builtin_int_roundingfn (tree, rtx);
106static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107static rtx expand_builtin_next_arg (void);
108static rtx expand_builtin_va_start (tree);
109static rtx expand_builtin_va_end (tree);
110static rtx expand_builtin_va_copy (tree);
111static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112static rtx expand_builtin_strcmp (tree, rtx);
113static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115static rtx expand_builtin_memcpy (tree, rtx);
116static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119static rtx expand_builtin_strcpy (tree, rtx);
120static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122static rtx expand_builtin_strncpy (tree, rtx);
123static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126static rtx expand_builtin_bzero (tree);
127static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128static rtx expand_builtin_alloca (tree, bool);
129static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130static rtx expand_builtin_frame_address (tree, tree);
131static tree stabilize_va_list_loc (location_t, tree, int);
132static rtx expand_builtin_expect (tree, rtx);
133static tree fold_builtin_constant_p (tree);
134static tree fold_builtin_expect (location_t, tree, tree);
135static tree fold_builtin_classify_type (tree);
136static tree fold_builtin_strlen (location_t, tree, tree);
137static tree fold_builtin_inf (location_t, tree, int);
138static tree fold_builtin_nan (tree, tree, int);
139static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140static bool validate_arg (const_tree, enum tree_code code);
141static bool integer_valued_real_p (tree);
142static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143static bool readonly_data_expr (tree);
144static rtx expand_builtin_fabs (tree, rtx, rtx);
145static rtx expand_builtin_signbit (tree, rtx);
146static tree fold_builtin_sqrt (location_t, tree, tree);
147static tree fold_builtin_cbrt (location_t, tree, tree);
148static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150static tree fold_builtin_cos (location_t, tree, tree, tree);
151static tree fold_builtin_cosh (location_t, tree, tree, tree);
152static tree fold_builtin_tan (tree, tree);
153static tree fold_builtin_trunc (location_t, tree, tree);
154static tree fold_builtin_floor (location_t, tree, tree);
155static tree fold_builtin_ceil (location_t, tree, tree);
156static tree fold_builtin_round (location_t, tree, tree);
157static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158static tree fold_builtin_bitop (tree, tree);
159static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160static tree fold_builtin_strchr (location_t, tree, tree, tree);
161static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163static tree fold_builtin_strcmp (location_t, tree, tree);
164static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165static tree fold_builtin_signbit (location_t, tree, tree);
166static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167static tree fold_builtin_isascii (location_t, tree);
168static tree fold_builtin_toascii (location_t, tree);
169static tree fold_builtin_isdigit (location_t, tree);
170static tree fold_builtin_fabs (location_t, tree, tree);
171static tree fold_builtin_abs (location_t, tree, tree);
172static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175static tree fold_builtin_0 (location_t, tree, bool);
176static tree fold_builtin_1 (location_t, tree, tree, bool);
177static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180static tree fold_builtin_varargs (location_t, tree, tree, bool);
181
182static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183static tree fold_builtin_strstr (location_t, tree, tree, tree);
184static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185static tree fold_builtin_strcat (location_t, tree, tree);
186static tree fold_builtin_strncat (location_t, tree, tree, tree);
187static tree fold_builtin_strspn (location_t, tree, tree);
188static tree fold_builtin_strcspn (location_t, tree, tree);
189static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
191
192static rtx expand_builtin_object_size (tree);
193static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195static void maybe_emit_chk_warning (tree, enum built_in_function);
196static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197static void maybe_emit_free_warning (tree);
198static tree fold_builtin_object_size (tree, tree);
199static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205static bool init_target_chars (void);
206
207static unsigned HOST_WIDE_INT target_newline;
208static unsigned HOST_WIDE_INT target_percent;
209static unsigned HOST_WIDE_INT target_c;
210static unsigned HOST_WIDE_INT target_s;
211static char target_percent_c[3];
212static char target_percent_s[3];
213static char target_percent_s_newline[4];
214static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220static tree do_mpfr_sincos (tree, tree, tree);
221static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224static tree do_mpfr_remquo (tree, tree, tree);
225static tree do_mpfr_lgamma_r (tree, tree, tree);
226static void expand_builtin_sync_synchronize (void);
227
228/* Return true if NAME starts with __builtin_ or __sync_. */
229
230static bool
231is_builtin_name (const char *name)
232{
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 return false;
240}
241
242
243/* Return true if DECL is a function symbol representing a built-in. */
244
245bool
246is_builtin_fn (tree decl)
247{
248 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
249}
250
251
252/* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
255
256static bool
257called_as_built_in (tree node)
258{
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
261 will have. */
262 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
263 return is_builtin_name (name);
264}
265
266/* Compute values M and N such that M divides (address of EXP - N) and
267 such that N < M. Store N in *BITPOSP and return M.
268
269 Note that the address (and thus the alignment) computed here is based
270 on the address to which a symbol resolves, whereas DECL_ALIGN is based
271 on the address at which an object is actually located. These two
272 addresses are not always the same. For example, on ARM targets,
273 the address &foo of a Thumb function foo() has the lowest bit set,
274 whereas foo() itself starts on an even address. */
275
276unsigned int
277get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
278{
279 HOST_WIDE_INT bitsize, bitpos;
280 tree offset;
281 enum machine_mode mode;
282 int unsignedp, volatilep;
283 unsigned int align, inner;
284
285 /* Get the innermost object and the constant (bitpos) and possibly
286 variable (offset) offset of the access. */
287 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
288 &mode, &unsignedp, &volatilep, true);
289
290 /* Extract alignment information from the innermost object and
291 possibly adjust bitpos and offset. */
292 if (TREE_CODE (exp) == CONST_DECL)
293 exp = DECL_INITIAL (exp);
294 if (DECL_P (exp)
295 && TREE_CODE (exp) != LABEL_DECL)
296 {
297 if (TREE_CODE (exp) == FUNCTION_DECL)
298 {
299 /* Function addresses can encode extra information besides their
300 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
301 allows the low bit to be used as a virtual bit, we know
302 that the address itself must be 2-byte aligned. */
303 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
304 align = 2 * BITS_PER_UNIT;
305 else
306 align = BITS_PER_UNIT;
307 }
308 else
309 align = DECL_ALIGN (exp);
310 }
311 else if (CONSTANT_CLASS_P (exp))
312 {
313 align = TYPE_ALIGN (TREE_TYPE (exp));
314#ifdef CONSTANT_ALIGNMENT
315 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
316#endif
317 }
318 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
319 align = TYPE_ALIGN (TREE_TYPE (exp));
320 else if (TREE_CODE (exp) == INDIRECT_REF)
321 align = TYPE_ALIGN (TREE_TYPE (exp));
322 else if (TREE_CODE (exp) == MEM_REF)
323 {
324 tree addr = TREE_OPERAND (exp, 0);
325 struct ptr_info_def *pi;
326 if (TREE_CODE (addr) == BIT_AND_EXPR
327 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
328 {
329 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
330 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
331 align *= BITS_PER_UNIT;
332 addr = TREE_OPERAND (addr, 0);
333 }
334 else
335 align = BITS_PER_UNIT;
336 if (TREE_CODE (addr) == SSA_NAME
337 && (pi = SSA_NAME_PTR_INFO (addr)))
338 {
339 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
340 align = MAX (pi->align * BITS_PER_UNIT, align);
341 }
342 else if (TREE_CODE (addr) == ADDR_EXPR)
343 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
344 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
345 }
346 else if (TREE_CODE (exp) == TARGET_MEM_REF)
347 {
348 struct ptr_info_def *pi;
349 tree addr = TMR_BASE (exp);
350 if (TREE_CODE (addr) == BIT_AND_EXPR
351 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
352 {
353 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
354 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
355 align *= BITS_PER_UNIT;
356 addr = TREE_OPERAND (addr, 0);
357 }
358 else
359 align = BITS_PER_UNIT;
360 if (TREE_CODE (addr) == SSA_NAME
361 && (pi = SSA_NAME_PTR_INFO (addr)))
362 {
363 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
364 align = MAX (pi->align * BITS_PER_UNIT, align);
365 }
366 else if (TREE_CODE (addr) == ADDR_EXPR)
367 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
368 if (TMR_OFFSET (exp))
369 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
370 if (TMR_INDEX (exp) && TMR_STEP (exp))
371 {
372 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
373 align = MIN (align, (step & -step) * BITS_PER_UNIT);
374 }
375 else if (TMR_INDEX (exp))
376 align = BITS_PER_UNIT;
377 if (TMR_INDEX2 (exp))
378 align = BITS_PER_UNIT;
379 }
380 else
381 align = BITS_PER_UNIT;
382
383 /* If there is a non-constant offset part extract the maximum
384 alignment that can prevail. */
385 inner = ~0U;
386 while (offset)
387 {
388 tree next_offset;
389
390 if (TREE_CODE (offset) == PLUS_EXPR)
391 {
392 next_offset = TREE_OPERAND (offset, 0);
393 offset = TREE_OPERAND (offset, 1);
394 }
395 else
396 next_offset = NULL;
397 if (host_integerp (offset, 1))
398 {
399 /* Any overflow in calculating offset_bits won't change
400 the alignment. */
401 unsigned offset_bits
402 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
403
404 if (offset_bits)
405 inner = MIN (inner, (offset_bits & -offset_bits));
406 }
407 else if (TREE_CODE (offset) == MULT_EXPR
408 && host_integerp (TREE_OPERAND (offset, 1), 1))
409 {
410 /* Any overflow in calculating offset_factor won't change
411 the alignment. */
412 unsigned offset_factor
413 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
414 * BITS_PER_UNIT);
415
416 if (offset_factor)
417 inner = MIN (inner, (offset_factor & -offset_factor));
418 }
419 else
420 {
421 inner = MIN (inner, BITS_PER_UNIT);
422 break;
423 }
424 offset = next_offset;
425 }
426
427 /* Alignment is innermost object alignment adjusted by the constant
428 and non-constant offset parts. */
429 align = MIN (align, inner);
430 bitpos = bitpos & (align - 1);
431
432 *bitposp = bitpos;
433 return align;
434}
435
436/* Return the alignment in bits of EXP, an object. */
437
438unsigned int
439get_object_alignment (tree exp)
440{
441 unsigned HOST_WIDE_INT bitpos = 0;
442 unsigned int align;
443
444 align = get_object_alignment_1 (exp, &bitpos);
445
446 /* align and bitpos now specify known low bits of the pointer.
447 ptr & (align - 1) == bitpos. */
448
449 if (bitpos != 0)
450 align = (bitpos & -bitpos);
451
452 return align;
453}
454
455/* Return the alignment of object EXP, also considering its type when we do
456 not know of explicit misalignment. Only handle MEM_REF and TARGET_MEM_REF.
457
458 ??? Note that, in the general case, the type of an expression is not kept
459 consistent with misalignment information by the front-end, for example when
460 taking the address of a member of a packed structure. However, in most of
461 the cases, expressions have the alignment of their type so we optimistically
462 fall back to this alignment when we cannot compute a misalignment. */
463
464unsigned int
465get_object_or_type_alignment (tree exp)
466{
467 unsigned HOST_WIDE_INT misalign;
468 unsigned int align = get_object_alignment_1 (exp, &misalign);
469
470 gcc_assert (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF);
471
472 if (misalign != 0)
473 align = (misalign & -misalign);
474 else
475 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)), align);
476
477 return align;
478}
479
480/* For a pointer valued expression EXP compute values M and N such that
481 M divides (EXP - N) and such that N < M. Store N in *BITPOSP and return M.
482
483 If EXP is not a pointer, 0 is returned. */
484
485unsigned int
486get_pointer_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
487{
488 STRIP_NOPS (exp);
489
490 if (TREE_CODE (exp) == ADDR_EXPR)
491 return get_object_alignment_1 (TREE_OPERAND (exp, 0), bitposp);
492 else if (TREE_CODE (exp) == SSA_NAME
493 && POINTER_TYPE_P (TREE_TYPE (exp)))
494 {
495 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
496 if (!pi)
497 {
498 *bitposp = 0;
499 return BITS_PER_UNIT;
500 }
501 *bitposp = pi->misalign * BITS_PER_UNIT;
502 return pi->align * BITS_PER_UNIT;
503 }
504
505 *bitposp = 0;
506 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
507}
508
509/* Return the alignment in bits of EXP, a pointer valued expression.
510 The alignment returned is, by default, the alignment of the thing that
511 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
512
513 Otherwise, look at the expression to see if we can do better, i.e., if the
514 expression is actually pointing at an object whose alignment is tighter. */
515
516unsigned int
517get_pointer_alignment (tree exp)
518{
519 unsigned HOST_WIDE_INT bitpos = 0;
520 unsigned int align;
521
522 align = get_pointer_alignment_1 (exp, &bitpos);
523
524 /* align and bitpos now specify known low bits of the pointer.
525 ptr & (align - 1) == bitpos. */
526
527 if (bitpos != 0)
528 align = (bitpos & -bitpos);
529
530 return align;
531}
532
533/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
534 way, because it could contain a zero byte in the middle.
535 TREE_STRING_LENGTH is the size of the character array, not the string.
536
537 ONLY_VALUE should be nonzero if the result is not going to be emitted
538 into the instruction stream and zero if it is going to be expanded.
539 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
540 is returned, otherwise NULL, since
541 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
542 evaluate the side-effects.
543
544 The value returned is of type `ssizetype'.
545
546 Unfortunately, string_constant can't access the values of const char
547 arrays with initializers, so neither can we do so here. */
548
549tree
550c_strlen (tree src, int only_value)
551{
552 tree offset_node;
553 HOST_WIDE_INT offset;
554 int max;
555 const char *ptr;
556 location_t loc;
557
558 STRIP_NOPS (src);
559 if (TREE_CODE (src) == COND_EXPR
560 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
561 {
562 tree len1, len2;
563
564 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
565 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
566 if (tree_int_cst_equal (len1, len2))
567 return len1;
568 }
569
570 if (TREE_CODE (src) == COMPOUND_EXPR
571 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
572 return c_strlen (TREE_OPERAND (src, 1), only_value);
573
574 loc = EXPR_LOC_OR_HERE (src);
575
576 src = string_constant (src, &offset_node);
577 if (src == 0)
578 return NULL_TREE;
579
580 max = TREE_STRING_LENGTH (src) - 1;
581 ptr = TREE_STRING_POINTER (src);
582
583 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
584 {
585 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
586 compute the offset to the following null if we don't know where to
587 start searching for it. */
588 int i;
589
590 for (i = 0; i < max; i++)
591 if (ptr[i] == 0)
592 return NULL_TREE;
593
594 /* We don't know the starting offset, but we do know that the string
595 has no internal zero bytes. We can assume that the offset falls
596 within the bounds of the string; otherwise, the programmer deserves
597 what he gets. Subtract the offset from the length of the string,
598 and return that. This would perhaps not be valid if we were dealing
599 with named arrays in addition to literal string constants. */
600
601 return size_diffop_loc (loc, size_int (max), offset_node);
602 }
603
604 /* We have a known offset into the string. Start searching there for
605 a null character if we can represent it as a single HOST_WIDE_INT. */
606 if (offset_node == 0)
607 offset = 0;
608 else if (! host_integerp (offset_node, 0))
609 offset = -1;
610 else
611 offset = tree_low_cst (offset_node, 0);
612
613 /* If the offset is known to be out of bounds, warn, and call strlen at
614 runtime. */
615 if (offset < 0 || offset > max)
616 {
617 /* Suppress multiple warnings for propagated constant strings. */
618 if (! TREE_NO_WARNING (src))
619 {
620 warning_at (loc, 0, "offset outside bounds of constant string");
621 TREE_NO_WARNING (src) = 1;
622 }
623 return NULL_TREE;
624 }
625
626 /* Use strlen to search for the first zero byte. Since any strings
627 constructed with build_string will have nulls appended, we win even
628 if we get handed something like (char[4])"abcd".
629
630 Since OFFSET is our starting index into the string, no further
631 calculation is needed. */
632 return ssize_int (strlen (ptr + offset));
633}
634
635/* Return a char pointer for a C string if it is a string constant
636 or sum of string constant and integer constant. */
637
638static const char *
639c_getstr (tree src)
640{
641 tree offset_node;
642
643 src = string_constant (src, &offset_node);
644 if (src == 0)
645 return 0;
646
647 if (offset_node == 0)
648 return TREE_STRING_POINTER (src);
649 else if (!host_integerp (offset_node, 1)
650 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
651 return 0;
652
653 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
654}
655
656/* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
657 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
658
659static rtx
660c_readstr (const char *str, enum machine_mode mode)
661{
662 HOST_WIDE_INT c[2];
663 HOST_WIDE_INT ch;
664 unsigned int i, j;
665
666 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
667
668 c[0] = 0;
669 c[1] = 0;
670 ch = 1;
671 for (i = 0; i < GET_MODE_SIZE (mode); i++)
672 {
673 j = i;
674 if (WORDS_BIG_ENDIAN)
675 j = GET_MODE_SIZE (mode) - i - 1;
676 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
677 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
678 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
679 j *= BITS_PER_UNIT;
680 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
681
682 if (ch)
683 ch = (unsigned char) str[i];
684 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
685 }
686 return immed_double_const (c[0], c[1], mode);
687}
688
689/* Cast a target constant CST to target CHAR and if that value fits into
690 host char type, return zero and put that value into variable pointed to by
691 P. */
692
693static int
694target_char_cast (tree cst, char *p)
695{
696 unsigned HOST_WIDE_INT val, hostval;
697
698 if (TREE_CODE (cst) != INTEGER_CST
699 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
700 return 1;
701
702 val = TREE_INT_CST_LOW (cst);
703 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
704 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
705
706 hostval = val;
707 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
708 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
709
710 if (val != hostval)
711 return 1;
712
713 *p = hostval;
714 return 0;
715}
716
717/* Similar to save_expr, but assumes that arbitrary code is not executed
718 in between the multiple evaluations. In particular, we assume that a
719 non-addressable local variable will not be modified. */
720
721static tree
722builtin_save_expr (tree exp)
723{
724 if (TREE_CODE (exp) == SSA_NAME
725 || (TREE_ADDRESSABLE (exp) == 0
726 && (TREE_CODE (exp) == PARM_DECL
727 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
728 return exp;
729
730 return save_expr (exp);
731}
732
733/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
734 times to get the address of either a higher stack frame, or a return
735 address located within it (depending on FNDECL_CODE). */
736
737static rtx
738expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
739{
740 int i;
741
742#ifdef INITIAL_FRAME_ADDRESS_RTX
743 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
744#else
745 rtx tem;
746
747 /* For a zero count with __builtin_return_address, we don't care what
748 frame address we return, because target-specific definitions will
749 override us. Therefore frame pointer elimination is OK, and using
750 the soft frame pointer is OK.
751
752 For a nonzero count, or a zero count with __builtin_frame_address,
753 we require a stable offset from the current frame pointer to the
754 previous one, so we must use the hard frame pointer, and
755 we must disable frame pointer elimination. */
756 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
757 tem = frame_pointer_rtx;
758 else
759 {
760 tem = hard_frame_pointer_rtx;
761
762 /* Tell reload not to eliminate the frame pointer. */
763 crtl->accesses_prior_frames = 1;
764 }
765#endif
766
767 /* Some machines need special handling before we can access
768 arbitrary frames. For example, on the SPARC, we must first flush
769 all register windows to the stack. */
770#ifdef SETUP_FRAME_ADDRESSES
771 if (count > 0)
772 SETUP_FRAME_ADDRESSES ();
773#endif
774
775 /* On the SPARC, the return address is not in the frame, it is in a
776 register. There is no way to access it off of the current frame
777 pointer, but it can be accessed off the previous frame pointer by
778 reading the value from the register window save area. */
779#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
780 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
781 count--;
782#endif
783
784 /* Scan back COUNT frames to the specified frame. */
785 for (i = 0; i < count; i++)
786 {
787 /* Assume the dynamic chain pointer is in the word that the
788 frame address points to, unless otherwise specified. */
789#ifdef DYNAMIC_CHAIN_ADDRESS
790 tem = DYNAMIC_CHAIN_ADDRESS (tem);
791#endif
792 tem = memory_address (Pmode, tem);
793 tem = gen_frame_mem (Pmode, tem);
794 tem = copy_to_reg (tem);
795 }
796
797 /* For __builtin_frame_address, return what we've got. But, on
798 the SPARC for example, we may have to add a bias. */
799 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
800#ifdef FRAME_ADDR_RTX
801 return FRAME_ADDR_RTX (tem);
802#else
803 return tem;
804#endif
805
806 /* For __builtin_return_address, get the return address from that frame. */
807#ifdef RETURN_ADDR_RTX
808 tem = RETURN_ADDR_RTX (count, tem);
809#else
810 tem = memory_address (Pmode,
811 plus_constant (tem, GET_MODE_SIZE (Pmode)));
812 tem = gen_frame_mem (Pmode, tem);
813#endif
814 return tem;
815}
816
817/* Alias set used for setjmp buffer. */
818static alias_set_type setjmp_alias_set = -1;
819
820/* Construct the leading half of a __builtin_setjmp call. Control will
821 return to RECEIVER_LABEL. This is also called directly by the SJLJ
822 exception handling code. */
823
824void
825expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
826{
827 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
828 rtx stack_save;
829 rtx mem;
830
831 if (setjmp_alias_set == -1)
832 setjmp_alias_set = new_alias_set ();
833
834 buf_addr = convert_memory_address (Pmode, buf_addr);
835
836 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
837
838 /* We store the frame pointer and the address of receiver_label in
839 the buffer and use the rest of it for the stack save area, which
840 is machine-dependent. */
841
842 mem = gen_rtx_MEM (Pmode, buf_addr);
843 set_mem_alias_set (mem, setjmp_alias_set);
844 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
845
846 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
847 set_mem_alias_set (mem, setjmp_alias_set);
848
849 emit_move_insn (validize_mem (mem),
850 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
851
852 stack_save = gen_rtx_MEM (sa_mode,
853 plus_constant (buf_addr,
854 2 * GET_MODE_SIZE (Pmode)));
855 set_mem_alias_set (stack_save, setjmp_alias_set);
856 emit_stack_save (SAVE_NONLOCAL, &stack_save);
857
858 /* If there is further processing to do, do it. */
859#ifdef HAVE_builtin_setjmp_setup
860 if (HAVE_builtin_setjmp_setup)
861 emit_insn (gen_builtin_setjmp_setup (buf_addr));
862#endif
863
864 /* We have a nonlocal label. */
865 cfun->has_nonlocal_label = 1;
866}
867
868/* Construct the trailing part of a __builtin_setjmp call. This is
869 also called directly by the SJLJ exception handling code. */
870
871void
872expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
873{
874 rtx chain;
875
876 /* Clobber the FP when we get here, so we have to make sure it's
877 marked as used by this function. */
878 emit_use (hard_frame_pointer_rtx);
879
880 /* Mark the static chain as clobbered here so life information
881 doesn't get messed up for it. */
882 chain = targetm.calls.static_chain (current_function_decl, true);
883 if (chain && REG_P (chain))
884 emit_clobber (chain);
885
886 /* Now put in the code to restore the frame pointer, and argument
887 pointer, if needed. */
888#ifdef HAVE_nonlocal_goto
889 if (! HAVE_nonlocal_goto)
890#endif
891 {
892 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
893 /* This might change the hard frame pointer in ways that aren't
894 apparent to early optimization passes, so force a clobber. */
895 emit_clobber (hard_frame_pointer_rtx);
896 }
897
898#if !HARD_FRAME_POINTER_IS_ARG_POINTER
899 if (fixed_regs[ARG_POINTER_REGNUM])
900 {
901#ifdef ELIMINABLE_REGS
902 size_t i;
903 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
904
905 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
906 if (elim_regs[i].from == ARG_POINTER_REGNUM
907 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
908 break;
909
910 if (i == ARRAY_SIZE (elim_regs))
911#endif
912 {
913 /* Now restore our arg pointer from the address at which it
914 was saved in our stack frame. */
915 emit_move_insn (crtl->args.internal_arg_pointer,
916 copy_to_reg (get_arg_pointer_save_area ()));
917 }
918 }
919#endif
920
921#ifdef HAVE_builtin_setjmp_receiver
922 if (HAVE_builtin_setjmp_receiver)
923 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
924 else
925#endif
926#ifdef HAVE_nonlocal_goto_receiver
927 if (HAVE_nonlocal_goto_receiver)
928 emit_insn (gen_nonlocal_goto_receiver ());
929 else
930#endif
931 { /* Nothing */ }
932
933 /* We must not allow the code we just generated to be reordered by
934 scheduling. Specifically, the update of the frame pointer must
935 happen immediately, not later. */
936 emit_insn (gen_blockage ());
937}
938
939/* __builtin_longjmp is passed a pointer to an array of five words (not
940 all will be used on all machines). It operates similarly to the C
941 library function of the same name, but is more efficient. Much of
942 the code below is copied from the handling of non-local gotos. */
943
944static void
945expand_builtin_longjmp (rtx buf_addr, rtx value)
946{
947 rtx fp, lab, stack, insn, last;
948 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
949
950 /* DRAP is needed for stack realign if longjmp is expanded to current
951 function */
952 if (SUPPORTS_STACK_ALIGNMENT)
953 crtl->need_drap = true;
954
955 if (setjmp_alias_set == -1)
956 setjmp_alias_set = new_alias_set ();
957
958 buf_addr = convert_memory_address (Pmode, buf_addr);
959
960 buf_addr = force_reg (Pmode, buf_addr);
961
962 /* We require that the user must pass a second argument of 1, because
963 that is what builtin_setjmp will return. */
964 gcc_assert (value == const1_rtx);
965
966 last = get_last_insn ();
967#ifdef HAVE_builtin_longjmp
968 if (HAVE_builtin_longjmp)
969 emit_insn (gen_builtin_longjmp (buf_addr));
970 else
971#endif
972 {
973 fp = gen_rtx_MEM (Pmode, buf_addr);
974 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
975 GET_MODE_SIZE (Pmode)));
976
977 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
978 2 * GET_MODE_SIZE (Pmode)));
979 set_mem_alias_set (fp, setjmp_alias_set);
980 set_mem_alias_set (lab, setjmp_alias_set);
981 set_mem_alias_set (stack, setjmp_alias_set);
982
983 /* Pick up FP, label, and SP from the block and jump. This code is
984 from expand_goto in stmt.c; see there for detailed comments. */
985#ifdef HAVE_nonlocal_goto
986 if (HAVE_nonlocal_goto)
987 /* We have to pass a value to the nonlocal_goto pattern that will
988 get copied into the static_chain pointer, but it does not matter
989 what that value is, because builtin_setjmp does not use it. */
990 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
991 else
992#endif
993 {
994 lab = copy_to_reg (lab);
995
996 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
997 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
998
999 emit_move_insn (hard_frame_pointer_rtx, fp);
1000 emit_stack_restore (SAVE_NONLOCAL, stack);
1001
1002 emit_use (hard_frame_pointer_rtx);
1003 emit_use (stack_pointer_rtx);
1004 emit_indirect_jump (lab);
1005 }
1006 }
1007
1008 /* Search backwards and mark the jump insn as a non-local goto.
1009 Note that this precludes the use of __builtin_longjmp to a
1010 __builtin_setjmp target in the same function. However, we've
1011 already cautioned the user that these functions are for
1012 internal exception handling use only. */
1013 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1014 {
1015 gcc_assert (insn != last);
1016
1017 if (JUMP_P (insn))
1018 {
1019 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1020 break;
1021 }
1022 else if (CALL_P (insn))
1023 break;
1024 }
1025}
1026
1027/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1028 and the address of the save area. */
1029
1030static rtx
1031expand_builtin_nonlocal_goto (tree exp)
1032{
1033 tree t_label, t_save_area;
1034 rtx r_label, r_save_area, r_fp, r_sp, insn;
1035
1036 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1037 return NULL_RTX;
1038
1039 t_label = CALL_EXPR_ARG (exp, 0);
1040 t_save_area = CALL_EXPR_ARG (exp, 1);
1041
1042 r_label = expand_normal (t_label);
1043 r_label = convert_memory_address (Pmode, r_label);
1044 r_save_area = expand_normal (t_save_area);
1045 r_save_area = convert_memory_address (Pmode, r_save_area);
1046 /* Copy the address of the save location to a register just in case it was
1047 based on the frame pointer. */
1048 r_save_area = copy_to_reg (r_save_area);
1049 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1050 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1051 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1052
1053 crtl->has_nonlocal_goto = 1;
1054
1055#ifdef HAVE_nonlocal_goto
1056 /* ??? We no longer need to pass the static chain value, afaik. */
1057 if (HAVE_nonlocal_goto)
1058 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1059 else
1060#endif
1061 {
1062 r_label = copy_to_reg (r_label);
1063
1064 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1065 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1066
1067 /* Restore frame pointer for containing function. */
1068 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1069 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1070
1071 /* USE of hard_frame_pointer_rtx added for consistency;
1072 not clear if really needed. */
1073 emit_use (hard_frame_pointer_rtx);
1074 emit_use (stack_pointer_rtx);
1075
1076 /* If the architecture is using a GP register, we must
1077 conservatively assume that the target function makes use of it.
1078 The prologue of functions with nonlocal gotos must therefore
1079 initialize the GP register to the appropriate value, and we
1080 must then make sure that this value is live at the point
1081 of the jump. (Note that this doesn't necessarily apply
1082 to targets with a nonlocal_goto pattern; they are free
1083 to implement it in their own way. Note also that this is
1084 a no-op if the GP register is a global invariant.) */
1085 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1086 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1087 emit_use (pic_offset_table_rtx);
1088
1089 emit_indirect_jump (r_label);
1090 }
1091
1092 /* Search backwards to the jump insn and mark it as a
1093 non-local goto. */
1094 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1095 {
1096 if (JUMP_P (insn))
1097 {
1098 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1099 break;
1100 }
1101 else if (CALL_P (insn))
1102 break;
1103 }
1104
1105 return const0_rtx;
1106}
1107
1108/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1109 (not all will be used on all machines) that was passed to __builtin_setjmp.
1110 It updates the stack pointer in that block to correspond to the current
1111 stack pointer. */
1112
1113static void
1114expand_builtin_update_setjmp_buf (rtx buf_addr)
1115{
1116 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1117 rtx stack_save
1118 = gen_rtx_MEM (sa_mode,
1119 memory_address
1120 (sa_mode,
1121 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1122
1123 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1124}
1125
1126/* Expand a call to __builtin_prefetch. For a target that does not support
1127 data prefetch, evaluate the memory address argument in case it has side
1128 effects. */
1129
1130static void
1131expand_builtin_prefetch (tree exp)
1132{
1133 tree arg0, arg1, arg2;
1134 int nargs;
1135 rtx op0, op1, op2;
1136
1137 if (!validate_arglist (exp, POINTER_TYPE, 0))
1138 return;
1139
1140 arg0 = CALL_EXPR_ARG (exp, 0);
1141
1142 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1143 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1144 locality). */
1145 nargs = call_expr_nargs (exp);
1146 if (nargs > 1)
1147 arg1 = CALL_EXPR_ARG (exp, 1);
1148 else
1149 arg1 = integer_zero_node;
1150 if (nargs > 2)
1151 arg2 = CALL_EXPR_ARG (exp, 2);
1152 else
1153 arg2 = integer_three_node;
1154
1155 /* Argument 0 is an address. */
1156 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1157
1158 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1159 if (TREE_CODE (arg1) != INTEGER_CST)
1160 {
1161 error ("second argument to %<__builtin_prefetch%> must be a constant");
1162 arg1 = integer_zero_node;
1163 }
1164 op1 = expand_normal (arg1);
1165 /* Argument 1 must be either zero or one. */
1166 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1167 {
1168 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1169 " using zero");
1170 op1 = const0_rtx;
1171 }
1172
1173 /* Argument 2 (locality) must be a compile-time constant int. */
1174 if (TREE_CODE (arg2) != INTEGER_CST)
1175 {
1176 error ("third argument to %<__builtin_prefetch%> must be a constant");
1177 arg2 = integer_zero_node;
1178 }
1179 op2 = expand_normal (arg2);
1180 /* Argument 2 must be 0, 1, 2, or 3. */
1181 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1182 {
1183 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1184 op2 = const0_rtx;
1185 }
1186
1187#ifdef HAVE_prefetch
1188 if (HAVE_prefetch)
1189 {
1190 struct expand_operand ops[3];
1191
1192 create_address_operand (&ops[0], op0);
1193 create_integer_operand (&ops[1], INTVAL (op1));
1194 create_integer_operand (&ops[2], INTVAL (op2));
1195 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1196 return;
1197 }
1198#endif
1199
1200 /* Don't do anything with direct references to volatile memory, but
1201 generate code to handle other side effects. */
1202 if (!MEM_P (op0) && side_effects_p (op0))
1203 emit_insn (op0);
1204}
1205
1206/* Get a MEM rtx for expression EXP which is the address of an operand
1207 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1208 the maximum length of the block of memory that might be accessed or
1209 NULL if unknown. */
1210
1211static rtx
1212get_memory_rtx (tree exp, tree len)
1213{
1214 tree orig_exp = exp;
1215 rtx addr, mem;
1216 HOST_WIDE_INT off;
1217
1218 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1219 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1220 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1221 exp = TREE_OPERAND (exp, 0);
1222
1223 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1224 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1225
1226 /* Get an expression we can use to find the attributes to assign to MEM.
1227 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1228 we can. First remove any nops. */
1229 while (CONVERT_EXPR_P (exp)
1230 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1231 exp = TREE_OPERAND (exp, 0);
1232
1233 off = 0;
1234 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1235 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1236 && host_integerp (TREE_OPERAND (exp, 1), 0)
1237 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1238 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1239 else if (TREE_CODE (exp) == ADDR_EXPR)
1240 exp = TREE_OPERAND (exp, 0);
1241 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1242 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1243 else
1244 exp = NULL;
1245
1246 /* Honor attributes derived from exp, except for the alias set
1247 (as builtin stringops may alias with anything) and the size
1248 (as stringops may access multiple array elements). */
1249 if (exp)
1250 {
1251 set_mem_attributes (mem, exp, 0);
1252
1253 if (off)
1254 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1255
1256 /* Allow the string and memory builtins to overflow from one
1257 field into another, see http://gcc.gnu.org/PR23561.
1258 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1259 memory accessed by the string or memory builtin will fit
1260 within the field. */
1261 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1262 {
1263 tree mem_expr = MEM_EXPR (mem);
1264 HOST_WIDE_INT offset = -1, length = -1;
1265 tree inner = exp;
1266
1267 while (TREE_CODE (inner) == ARRAY_REF
1268 || CONVERT_EXPR_P (inner)
1269 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1270 || TREE_CODE (inner) == SAVE_EXPR)
1271 inner = TREE_OPERAND (inner, 0);
1272
1273 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1274
1275 if (MEM_OFFSET_KNOWN_P (mem))
1276 offset = MEM_OFFSET (mem);
1277
1278 if (offset >= 0 && len && host_integerp (len, 0))
1279 length = tree_low_cst (len, 0);
1280
1281 while (TREE_CODE (inner) == COMPONENT_REF)
1282 {
1283 tree field = TREE_OPERAND (inner, 1);
1284 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1285 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1286
1287 /* Bitfields are generally not byte-addressable. */
1288 gcc_assert (!DECL_BIT_FIELD (field)
1289 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1290 % BITS_PER_UNIT) == 0
1291 && host_integerp (DECL_SIZE (field), 0)
1292 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1293 % BITS_PER_UNIT) == 0));
1294
1295 /* If we can prove that the memory starting at XEXP (mem, 0) and
1296 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1297 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1298 fields without DECL_SIZE_UNIT like flexible array members. */
1299 if (length >= 0
1300 && DECL_SIZE_UNIT (field)
1301 && host_integerp (DECL_SIZE_UNIT (field), 0))
1302 {
1303 HOST_WIDE_INT size
1304 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1305 if (offset <= size
1306 && length <= size
1307 && offset + length <= size)
1308 break;
1309 }
1310
1311 if (offset >= 0
1312 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1313 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1314 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1315 / BITS_PER_UNIT;
1316 else
1317 {
1318 offset = -1;
1319 length = -1;
1320 }
1321
1322 mem_expr = TREE_OPERAND (mem_expr, 0);
1323 inner = TREE_OPERAND (inner, 0);
1324 }
1325
1326 if (mem_expr == NULL)
1327 offset = -1;
1328 if (mem_expr != MEM_EXPR (mem))
1329 {
1330 set_mem_expr (mem, mem_expr);
1331 if (offset >= 0)
1332 set_mem_offset (mem, offset);
1333 else
1334 clear_mem_offset (mem);
1335 }
1336 }
1337 set_mem_alias_set (mem, 0);
1338 clear_mem_size (mem);
1339 }
1340
1341 return mem;
1342}
1343\f
1344/* Built-in functions to perform an untyped call and return. */
1345
1346#define apply_args_mode \
1347 (this_target_builtins->x_apply_args_mode)
1348#define apply_result_mode \
1349 (this_target_builtins->x_apply_result_mode)
1350
1351/* Return the size required for the block returned by __builtin_apply_args,
1352 and initialize apply_args_mode. */
1353
1354static int
1355apply_args_size (void)
1356{
1357 static int size = -1;
1358 int align;
1359 unsigned int regno;
1360 enum machine_mode mode;
1361
1362 /* The values computed by this function never change. */
1363 if (size < 0)
1364 {
1365 /* The first value is the incoming arg-pointer. */
1366 size = GET_MODE_SIZE (Pmode);
1367
1368 /* The second value is the structure value address unless this is
1369 passed as an "invisible" first argument. */
1370 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1371 size += GET_MODE_SIZE (Pmode);
1372
1373 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1374 if (FUNCTION_ARG_REGNO_P (regno))
1375 {
1376 mode = targetm.calls.get_raw_arg_mode (regno);
1377
1378 gcc_assert (mode != VOIDmode);
1379
1380 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1381 if (size % align != 0)
1382 size = CEIL (size, align) * align;
1383 size += GET_MODE_SIZE (mode);
1384 apply_args_mode[regno] = mode;
1385 }
1386 else
1387 {
1388 apply_args_mode[regno] = VOIDmode;
1389 }
1390 }
1391 return size;
1392}
1393
1394/* Return the size required for the block returned by __builtin_apply,
1395 and initialize apply_result_mode. */
1396
1397static int
1398apply_result_size (void)
1399{
1400 static int size = -1;
1401 int align, regno;
1402 enum machine_mode mode;
1403
1404 /* The values computed by this function never change. */
1405 if (size < 0)
1406 {
1407 size = 0;
1408
1409 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1410 if (targetm.calls.function_value_regno_p (regno))
1411 {
1412 mode = targetm.calls.get_raw_result_mode (regno);
1413
1414 gcc_assert (mode != VOIDmode);
1415
1416 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1417 if (size % align != 0)
1418 size = CEIL (size, align) * align;
1419 size += GET_MODE_SIZE (mode);
1420 apply_result_mode[regno] = mode;
1421 }
1422 else
1423 apply_result_mode[regno] = VOIDmode;
1424
1425 /* Allow targets that use untyped_call and untyped_return to override
1426 the size so that machine-specific information can be stored here. */
1427#ifdef APPLY_RESULT_SIZE
1428 size = APPLY_RESULT_SIZE;
1429#endif
1430 }
1431 return size;
1432}
1433
1434#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1435/* Create a vector describing the result block RESULT. If SAVEP is true,
1436 the result block is used to save the values; otherwise it is used to
1437 restore the values. */
1438
1439static rtx
1440result_vector (int savep, rtx result)
1441{
1442 int regno, size, align, nelts;
1443 enum machine_mode mode;
1444 rtx reg, mem;
1445 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1446
1447 size = nelts = 0;
1448 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1449 if ((mode = apply_result_mode[regno]) != VOIDmode)
1450 {
1451 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1452 if (size % align != 0)
1453 size = CEIL (size, align) * align;
1454 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1455 mem = adjust_address (result, mode, size);
1456 savevec[nelts++] = (savep
1457 ? gen_rtx_SET (VOIDmode, mem, reg)
1458 : gen_rtx_SET (VOIDmode, reg, mem));
1459 size += GET_MODE_SIZE (mode);
1460 }
1461 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1462}
1463#endif /* HAVE_untyped_call or HAVE_untyped_return */
1464
1465/* Save the state required to perform an untyped call with the same
1466 arguments as were passed to the current function. */
1467
1468static rtx
1469expand_builtin_apply_args_1 (void)
1470{
1471 rtx registers, tem;
1472 int size, align, regno;
1473 enum machine_mode mode;
1474 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1475
1476 /* Create a block where the arg-pointer, structure value address,
1477 and argument registers can be saved. */
1478 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1479
1480 /* Walk past the arg-pointer and structure value address. */
1481 size = GET_MODE_SIZE (Pmode);
1482 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1483 size += GET_MODE_SIZE (Pmode);
1484
1485 /* Save each register used in calling a function to the block. */
1486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1487 if ((mode = apply_args_mode[regno]) != VOIDmode)
1488 {
1489 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1490 if (size % align != 0)
1491 size = CEIL (size, align) * align;
1492
1493 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1494
1495 emit_move_insn (adjust_address (registers, mode, size), tem);
1496 size += GET_MODE_SIZE (mode);
1497 }
1498
1499 /* Save the arg pointer to the block. */
1500 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1501#ifdef STACK_GROWS_DOWNWARD
1502 /* We need the pointer as the caller actually passed them to us, not
1503 as we might have pretended they were passed. Make sure it's a valid
1504 operand, as emit_move_insn isn't expected to handle a PLUS. */
1505 tem
1506 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1507 NULL_RTX);
1508#endif
1509 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1510
1511 size = GET_MODE_SIZE (Pmode);
1512
1513 /* Save the structure value address unless this is passed as an
1514 "invisible" first argument. */
1515 if (struct_incoming_value)
1516 {
1517 emit_move_insn (adjust_address (registers, Pmode, size),
1518 copy_to_reg (struct_incoming_value));
1519 size += GET_MODE_SIZE (Pmode);
1520 }
1521
1522 /* Return the address of the block. */
1523 return copy_addr_to_reg (XEXP (registers, 0));
1524}
1525
1526/* __builtin_apply_args returns block of memory allocated on
1527 the stack into which is stored the arg pointer, structure
1528 value address, static chain, and all the registers that might
1529 possibly be used in performing a function call. The code is
1530 moved to the start of the function so the incoming values are
1531 saved. */
1532
1533static rtx
1534expand_builtin_apply_args (void)
1535{
1536 /* Don't do __builtin_apply_args more than once in a function.
1537 Save the result of the first call and reuse it. */
1538 if (apply_args_value != 0)
1539 return apply_args_value;
1540 {
1541 /* When this function is called, it means that registers must be
1542 saved on entry to this function. So we migrate the
1543 call to the first insn of this function. */
1544 rtx temp;
1545 rtx seq;
1546
1547 start_sequence ();
1548 temp = expand_builtin_apply_args_1 ();
1549 seq = get_insns ();
1550 end_sequence ();
1551
1552 apply_args_value = temp;
1553
1554 /* Put the insns after the NOTE that starts the function.
1555 If this is inside a start_sequence, make the outer-level insn
1556 chain current, so the code is placed at the start of the
1557 function. If internal_arg_pointer is a non-virtual pseudo,
1558 it needs to be placed after the function that initializes
1559 that pseudo. */
1560 push_topmost_sequence ();
1561 if (REG_P (crtl->args.internal_arg_pointer)
1562 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1563 emit_insn_before (seq, parm_birth_insn);
1564 else
1565 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1566 pop_topmost_sequence ();
1567 return temp;
1568 }
1569}
1570
1571/* Perform an untyped call and save the state required to perform an
1572 untyped return of whatever value was returned by the given function. */
1573
1574static rtx
1575expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1576{
1577 int size, align, regno;
1578 enum machine_mode mode;
1579 rtx incoming_args, result, reg, dest, src, call_insn;
1580 rtx old_stack_level = 0;
1581 rtx call_fusage = 0;
1582 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1583
1584 arguments = convert_memory_address (Pmode, arguments);
1585
1586 /* Create a block where the return registers can be saved. */
1587 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1588
1589 /* Fetch the arg pointer from the ARGUMENTS block. */
1590 incoming_args = gen_reg_rtx (Pmode);
1591 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1592#ifndef STACK_GROWS_DOWNWARD
1593 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1594 incoming_args, 0, OPTAB_LIB_WIDEN);
1595#endif
1596
1597 /* Push a new argument block and copy the arguments. Do not allow
1598 the (potential) memcpy call below to interfere with our stack
1599 manipulations. */
1600 do_pending_stack_adjust ();
1601 NO_DEFER_POP;
1602
1603 /* Save the stack with nonlocal if available. */
1604#ifdef HAVE_save_stack_nonlocal
1605 if (HAVE_save_stack_nonlocal)
1606 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1607 else
1608#endif
1609 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1610
1611 /* Allocate a block of memory onto the stack and copy the memory
1612 arguments to the outgoing arguments address. We can pass TRUE
1613 as the 4th argument because we just saved the stack pointer
1614 and will restore it right after the call. */
1615 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1616
1617 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1618 may have already set current_function_calls_alloca to true.
1619 current_function_calls_alloca won't be set if argsize is zero,
1620 so we have to guarantee need_drap is true here. */
1621 if (SUPPORTS_STACK_ALIGNMENT)
1622 crtl->need_drap = true;
1623
1624 dest = virtual_outgoing_args_rtx;
1625#ifndef STACK_GROWS_DOWNWARD
1626 if (CONST_INT_P (argsize))
1627 dest = plus_constant (dest, -INTVAL (argsize));
1628 else
1629 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1630#endif
1631 dest = gen_rtx_MEM (BLKmode, dest);
1632 set_mem_align (dest, PARM_BOUNDARY);
1633 src = gen_rtx_MEM (BLKmode, incoming_args);
1634 set_mem_align (src, PARM_BOUNDARY);
1635 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1636
1637 /* Refer to the argument block. */
1638 apply_args_size ();
1639 arguments = gen_rtx_MEM (BLKmode, arguments);
1640 set_mem_align (arguments, PARM_BOUNDARY);
1641
1642 /* Walk past the arg-pointer and structure value address. */
1643 size = GET_MODE_SIZE (Pmode);
1644 if (struct_value)
1645 size += GET_MODE_SIZE (Pmode);
1646
1647 /* Restore each of the registers previously saved. Make USE insns
1648 for each of these registers for use in making the call. */
1649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1650 if ((mode = apply_args_mode[regno]) != VOIDmode)
1651 {
1652 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1653 if (size % align != 0)
1654 size = CEIL (size, align) * align;
1655 reg = gen_rtx_REG (mode, regno);
1656 emit_move_insn (reg, adjust_address (arguments, mode, size));
1657 use_reg (&call_fusage, reg);
1658 size += GET_MODE_SIZE (mode);
1659 }
1660
1661 /* Restore the structure value address unless this is passed as an
1662 "invisible" first argument. */
1663 size = GET_MODE_SIZE (Pmode);
1664 if (struct_value)
1665 {
1666 rtx value = gen_reg_rtx (Pmode);
1667 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1668 emit_move_insn (struct_value, value);
1669 if (REG_P (struct_value))
1670 use_reg (&call_fusage, struct_value);
1671 size += GET_MODE_SIZE (Pmode);
1672 }
1673
1674 /* All arguments and registers used for the call are set up by now! */
1675 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1676
1677 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1678 and we don't want to load it into a register as an optimization,
1679 because prepare_call_address already did it if it should be done. */
1680 if (GET_CODE (function) != SYMBOL_REF)
1681 function = memory_address (FUNCTION_MODE, function);
1682
1683 /* Generate the actual call instruction and save the return value. */
1684#ifdef HAVE_untyped_call
1685 if (HAVE_untyped_call)
1686 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1687 result, result_vector (1, result)));
1688 else
1689#endif
1690#ifdef HAVE_call_value
1691 if (HAVE_call_value)
1692 {
1693 rtx valreg = 0;
1694
1695 /* Locate the unique return register. It is not possible to
1696 express a call that sets more than one return register using
1697 call_value; use untyped_call for that. In fact, untyped_call
1698 only needs to save the return registers in the given block. */
1699 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1700 if ((mode = apply_result_mode[regno]) != VOIDmode)
1701 {
1702 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1703
1704 valreg = gen_rtx_REG (mode, regno);
1705 }
1706
1707 emit_call_insn (GEN_CALL_VALUE (valreg,
1708 gen_rtx_MEM (FUNCTION_MODE, function),
1709 const0_rtx, NULL_RTX, const0_rtx));
1710
1711 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1712 }
1713 else
1714#endif
1715 gcc_unreachable ();
1716
1717 /* Find the CALL insn we just emitted, and attach the register usage
1718 information. */
1719 call_insn = last_call_insn ();
1720 add_function_usage_to (call_insn, call_fusage);
1721
1722 /* Restore the stack. */
1723#ifdef HAVE_save_stack_nonlocal
1724 if (HAVE_save_stack_nonlocal)
1725 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1726 else
1727#endif
1728 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1729 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1730
1731 OK_DEFER_POP;
1732
1733 /* Return the address of the result block. */
1734 result = copy_addr_to_reg (XEXP (result, 0));
1735 return convert_memory_address (ptr_mode, result);
1736}
1737
1738/* Perform an untyped return. */
1739
1740static void
1741expand_builtin_return (rtx result)
1742{
1743 int size, align, regno;
1744 enum machine_mode mode;
1745 rtx reg;
1746 rtx call_fusage = 0;
1747
1748 result = convert_memory_address (Pmode, result);
1749
1750 apply_result_size ();
1751 result = gen_rtx_MEM (BLKmode, result);
1752
1753#ifdef HAVE_untyped_return
1754 if (HAVE_untyped_return)
1755 {
1756 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1757 emit_barrier ();
1758 return;
1759 }
1760#endif
1761
1762 /* Restore the return value and note that each value is used. */
1763 size = 0;
1764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1765 if ((mode = apply_result_mode[regno]) != VOIDmode)
1766 {
1767 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1768 if (size % align != 0)
1769 size = CEIL (size, align) * align;
1770 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1771 emit_move_insn (reg, adjust_address (result, mode, size));
1772
1773 push_to_sequence (call_fusage);
1774 emit_use (reg);
1775 call_fusage = get_insns ();
1776 end_sequence ();
1777 size += GET_MODE_SIZE (mode);
1778 }
1779
1780 /* Put the USE insns before the return. */
1781 emit_insn (call_fusage);
1782
1783 /* Return whatever values was restored by jumping directly to the end
1784 of the function. */
1785 expand_naked_return ();
1786}
1787
1788/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1789
1790static enum type_class
1791type_to_class (tree type)
1792{
1793 switch (TREE_CODE (type))
1794 {
1795 case VOID_TYPE: return void_type_class;
1796 case INTEGER_TYPE: return integer_type_class;
1797 case ENUMERAL_TYPE: return enumeral_type_class;
1798 case BOOLEAN_TYPE: return boolean_type_class;
1799 case POINTER_TYPE: return pointer_type_class;
1800 case REFERENCE_TYPE: return reference_type_class;
1801 case OFFSET_TYPE: return offset_type_class;
1802 case REAL_TYPE: return real_type_class;
1803 case COMPLEX_TYPE: return complex_type_class;
1804 case FUNCTION_TYPE: return function_type_class;
1805 case METHOD_TYPE: return method_type_class;
1806 case RECORD_TYPE: return record_type_class;
1807 case UNION_TYPE:
1808 case QUAL_UNION_TYPE: return union_type_class;
1809 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1810 ? string_type_class : array_type_class);
1811 case LANG_TYPE: return lang_type_class;
1812 default: return no_type_class;
1813 }
1814}
1815
1816/* Expand a call EXP to __builtin_classify_type. */
1817
1818static rtx
1819expand_builtin_classify_type (tree exp)
1820{
1821 if (call_expr_nargs (exp))
1822 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1823 return GEN_INT (no_type_class);
1824}
1825
1826/* This helper macro, meant to be used in mathfn_built_in below,
1827 determines which among a set of three builtin math functions is
1828 appropriate for a given type mode. The `F' and `L' cases are
1829 automatically generated from the `double' case. */
1830#define CASE_MATHFN(BUILT_IN_MATHFN) \
1831 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1832 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1833 fcodel = BUILT_IN_MATHFN##L ; break;
1834/* Similar to above, but appends _R after any F/L suffix. */
1835#define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1836 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1837 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1838 fcodel = BUILT_IN_MATHFN##L_R ; break;
1839
1840/* Return mathematic function equivalent to FN but operating directly on TYPE,
1841 if available. If IMPLICIT is true use the implicit builtin declaration,
1842 otherwise use the explicit declaration. If we can't do the conversion,
1843 return zero. */
1844
1845static tree
1846mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1847{
1848 enum built_in_function fcode, fcodef, fcodel, fcode2;
1849
1850 switch (fn)
1851 {
1852 CASE_MATHFN (BUILT_IN_ACOS)
1853 CASE_MATHFN (BUILT_IN_ACOSH)
1854 CASE_MATHFN (BUILT_IN_ASIN)
1855 CASE_MATHFN (BUILT_IN_ASINH)
1856 CASE_MATHFN (BUILT_IN_ATAN)
1857 CASE_MATHFN (BUILT_IN_ATAN2)
1858 CASE_MATHFN (BUILT_IN_ATANH)
1859 CASE_MATHFN (BUILT_IN_CBRT)
1860 CASE_MATHFN (BUILT_IN_CEIL)
1861 CASE_MATHFN (BUILT_IN_CEXPI)
1862 CASE_MATHFN (BUILT_IN_COPYSIGN)
1863 CASE_MATHFN (BUILT_IN_COS)
1864 CASE_MATHFN (BUILT_IN_COSH)
1865 CASE_MATHFN (BUILT_IN_DREM)
1866 CASE_MATHFN (BUILT_IN_ERF)
1867 CASE_MATHFN (BUILT_IN_ERFC)
1868 CASE_MATHFN (BUILT_IN_EXP)
1869 CASE_MATHFN (BUILT_IN_EXP10)
1870 CASE_MATHFN (BUILT_IN_EXP2)
1871 CASE_MATHFN (BUILT_IN_EXPM1)
1872 CASE_MATHFN (BUILT_IN_FABS)
1873 CASE_MATHFN (BUILT_IN_FDIM)
1874 CASE_MATHFN (BUILT_IN_FLOOR)
1875 CASE_MATHFN (BUILT_IN_FMA)
1876 CASE_MATHFN (BUILT_IN_FMAX)
1877 CASE_MATHFN (BUILT_IN_FMIN)
1878 CASE_MATHFN (BUILT_IN_FMOD)
1879 CASE_MATHFN (BUILT_IN_FREXP)
1880 CASE_MATHFN (BUILT_IN_GAMMA)
1881 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1882 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1883 CASE_MATHFN (BUILT_IN_HYPOT)
1884 CASE_MATHFN (BUILT_IN_ILOGB)
1885 CASE_MATHFN (BUILT_IN_ICEIL)
1886 CASE_MATHFN (BUILT_IN_IFLOOR)
1887 CASE_MATHFN (BUILT_IN_INF)
1888 CASE_MATHFN (BUILT_IN_IRINT)
1889 CASE_MATHFN (BUILT_IN_IROUND)
1890 CASE_MATHFN (BUILT_IN_ISINF)
1891 CASE_MATHFN (BUILT_IN_J0)
1892 CASE_MATHFN (BUILT_IN_J1)
1893 CASE_MATHFN (BUILT_IN_JN)
1894 CASE_MATHFN (BUILT_IN_LCEIL)
1895 CASE_MATHFN (BUILT_IN_LDEXP)
1896 CASE_MATHFN (BUILT_IN_LFLOOR)
1897 CASE_MATHFN (BUILT_IN_LGAMMA)
1898 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1899 CASE_MATHFN (BUILT_IN_LLCEIL)
1900 CASE_MATHFN (BUILT_IN_LLFLOOR)
1901 CASE_MATHFN (BUILT_IN_LLRINT)
1902 CASE_MATHFN (BUILT_IN_LLROUND)
1903 CASE_MATHFN (BUILT_IN_LOG)
1904 CASE_MATHFN (BUILT_IN_LOG10)
1905 CASE_MATHFN (BUILT_IN_LOG1P)
1906 CASE_MATHFN (BUILT_IN_LOG2)
1907 CASE_MATHFN (BUILT_IN_LOGB)
1908 CASE_MATHFN (BUILT_IN_LRINT)
1909 CASE_MATHFN (BUILT_IN_LROUND)
1910 CASE_MATHFN (BUILT_IN_MODF)
1911 CASE_MATHFN (BUILT_IN_NAN)
1912 CASE_MATHFN (BUILT_IN_NANS)
1913 CASE_MATHFN (BUILT_IN_NEARBYINT)
1914 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1915 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1916 CASE_MATHFN (BUILT_IN_POW)
1917 CASE_MATHFN (BUILT_IN_POWI)
1918 CASE_MATHFN (BUILT_IN_POW10)
1919 CASE_MATHFN (BUILT_IN_REMAINDER)
1920 CASE_MATHFN (BUILT_IN_REMQUO)
1921 CASE_MATHFN (BUILT_IN_RINT)
1922 CASE_MATHFN (BUILT_IN_ROUND)
1923 CASE_MATHFN (BUILT_IN_SCALB)
1924 CASE_MATHFN (BUILT_IN_SCALBLN)
1925 CASE_MATHFN (BUILT_IN_SCALBN)
1926 CASE_MATHFN (BUILT_IN_SIGNBIT)
1927 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1928 CASE_MATHFN (BUILT_IN_SIN)
1929 CASE_MATHFN (BUILT_IN_SINCOS)
1930 CASE_MATHFN (BUILT_IN_SINH)
1931 CASE_MATHFN (BUILT_IN_SQRT)
1932 CASE_MATHFN (BUILT_IN_TAN)
1933 CASE_MATHFN (BUILT_IN_TANH)
1934 CASE_MATHFN (BUILT_IN_TGAMMA)
1935 CASE_MATHFN (BUILT_IN_TRUNC)
1936 CASE_MATHFN (BUILT_IN_Y0)
1937 CASE_MATHFN (BUILT_IN_Y1)
1938 CASE_MATHFN (BUILT_IN_YN)
1939
1940 default:
1941 return NULL_TREE;
1942 }
1943
1944 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1945 fcode2 = fcode;
1946 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1947 fcode2 = fcodef;
1948 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1949 fcode2 = fcodel;
1950 else
1951 return NULL_TREE;
1952
1953 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1954 return NULL_TREE;
1955
1956 return builtin_decl_explicit (fcode2);
1957}
1958
1959/* Like mathfn_built_in_1(), but always use the implicit array. */
1960
1961tree
1962mathfn_built_in (tree type, enum built_in_function fn)
1963{
1964 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1965}
1966
1967/* If errno must be maintained, expand the RTL to check if the result,
1968 TARGET, of a built-in function call, EXP, is NaN, and if so set
1969 errno to EDOM. */
1970
1971static void
1972expand_errno_check (tree exp, rtx target)
1973{
1974 rtx lab = gen_label_rtx ();
1975
1976 /* Test the result; if it is NaN, set errno=EDOM because
1977 the argument was not in the domain. */
1978 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1979 NULL_RTX, NULL_RTX, lab,
1980 /* The jump is very likely. */
1981 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1982
1983#ifdef TARGET_EDOM
1984 /* If this built-in doesn't throw an exception, set errno directly. */
1985 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1986 {
1987#ifdef GEN_ERRNO_RTX
1988 rtx errno_rtx = GEN_ERRNO_RTX;
1989#else
1990 rtx errno_rtx
1991 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1992#endif
1993 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1994 emit_label (lab);
1995 return;
1996 }
1997#endif
1998
1999 /* Make sure the library call isn't expanded as a tail call. */
2000 CALL_EXPR_TAILCALL (exp) = 0;
2001
2002 /* We can't set errno=EDOM directly; let the library call do it.
2003 Pop the arguments right away in case the call gets deleted. */
2004 NO_DEFER_POP;
2005 expand_call (exp, target, 0);
2006 OK_DEFER_POP;
2007 emit_label (lab);
2008}
2009
2010/* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2011 Return NULL_RTX if a normal call should be emitted rather than expanding
2012 the function in-line. EXP is the expression that is a call to the builtin
2013 function; if convenient, the result should be placed in TARGET.
2014 SUBTARGET may be used as the target for computing one of EXP's operands. */
2015
2016static rtx
2017expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2018{
2019 optab builtin_optab;
2020 rtx op0, insns;
2021 tree fndecl = get_callee_fndecl (exp);
2022 enum machine_mode mode;
2023 bool errno_set = false;
2024 tree arg;
2025
2026 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2027 return NULL_RTX;
2028
2029 arg = CALL_EXPR_ARG (exp, 0);
2030
2031 switch (DECL_FUNCTION_CODE (fndecl))
2032 {
2033 CASE_FLT_FN (BUILT_IN_SQRT):
2034 errno_set = ! tree_expr_nonnegative_p (arg);
2035 builtin_optab = sqrt_optab;
2036 break;
2037 CASE_FLT_FN (BUILT_IN_EXP):
2038 errno_set = true; builtin_optab = exp_optab; break;
2039 CASE_FLT_FN (BUILT_IN_EXP10):
2040 CASE_FLT_FN (BUILT_IN_POW10):
2041 errno_set = true; builtin_optab = exp10_optab; break;
2042 CASE_FLT_FN (BUILT_IN_EXP2):
2043 errno_set = true; builtin_optab = exp2_optab; break;
2044 CASE_FLT_FN (BUILT_IN_EXPM1):
2045 errno_set = true; builtin_optab = expm1_optab; break;
2046 CASE_FLT_FN (BUILT_IN_LOGB):
2047 errno_set = true; builtin_optab = logb_optab; break;
2048 CASE_FLT_FN (BUILT_IN_LOG):
2049 errno_set = true; builtin_optab = log_optab; break;
2050 CASE_FLT_FN (BUILT_IN_LOG10):
2051 errno_set = true; builtin_optab = log10_optab; break;
2052 CASE_FLT_FN (BUILT_IN_LOG2):
2053 errno_set = true; builtin_optab = log2_optab; break;
2054 CASE_FLT_FN (BUILT_IN_LOG1P):
2055 errno_set = true; builtin_optab = log1p_optab; break;
2056 CASE_FLT_FN (BUILT_IN_ASIN):
2057 builtin_optab = asin_optab; break;
2058 CASE_FLT_FN (BUILT_IN_ACOS):
2059 builtin_optab = acos_optab; break;
2060 CASE_FLT_FN (BUILT_IN_TAN):
2061 builtin_optab = tan_optab; break;
2062 CASE_FLT_FN (BUILT_IN_ATAN):
2063 builtin_optab = atan_optab; break;
2064 CASE_FLT_FN (BUILT_IN_FLOOR):
2065 builtin_optab = floor_optab; break;
2066 CASE_FLT_FN (BUILT_IN_CEIL):
2067 builtin_optab = ceil_optab; break;
2068 CASE_FLT_FN (BUILT_IN_TRUNC):
2069 builtin_optab = btrunc_optab; break;
2070 CASE_FLT_FN (BUILT_IN_ROUND):
2071 builtin_optab = round_optab; break;
2072 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2073 builtin_optab = nearbyint_optab;
2074 if (flag_trapping_math)
2075 break;
2076 /* Else fallthrough and expand as rint. */
2077 CASE_FLT_FN (BUILT_IN_RINT):
2078 builtin_optab = rint_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2080 builtin_optab = significand_optab; break;
2081 default:
2082 gcc_unreachable ();
2083 }
2084
2085 /* Make a suitable register to place result in. */
2086 mode = TYPE_MODE (TREE_TYPE (exp));
2087
2088 if (! flag_errno_math || ! HONOR_NANS (mode))
2089 errno_set = false;
2090
2091 /* Before working hard, check whether the instruction is available. */
2092 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2093 && (!errno_set || !optimize_insn_for_size_p ()))
2094 {
2095 target = gen_reg_rtx (mode);
2096
2097 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2098 need to expand the argument again. This way, we will not perform
2099 side-effects more the once. */
2100 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2101
2102 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2103
2104 start_sequence ();
2105
2106 /* Compute into TARGET.
2107 Set TARGET to wherever the result comes back. */
2108 target = expand_unop (mode, builtin_optab, op0, target, 0);
2109
2110 if (target != 0)
2111 {
2112 if (errno_set)
2113 expand_errno_check (exp, target);
2114
2115 /* Output the entire sequence. */
2116 insns = get_insns ();
2117 end_sequence ();
2118 emit_insn (insns);
2119 return target;
2120 }
2121
2122 /* If we were unable to expand via the builtin, stop the sequence
2123 (without outputting the insns) and call to the library function
2124 with the stabilized argument list. */
2125 end_sequence ();
2126 }
2127
2128 return expand_call (exp, target, target == const0_rtx);
2129}
2130
2131/* Expand a call to the builtin binary math functions (pow and atan2).
2132 Return NULL_RTX if a normal call should be emitted rather than expanding the
2133 function in-line. EXP is the expression that is a call to the builtin
2134 function; if convenient, the result should be placed in TARGET.
2135 SUBTARGET may be used as the target for computing one of EXP's
2136 operands. */
2137
2138static rtx
2139expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2140{
2141 optab builtin_optab;
2142 rtx op0, op1, insns;
2143 int op1_type = REAL_TYPE;
2144 tree fndecl = get_callee_fndecl (exp);
2145 tree arg0, arg1;
2146 enum machine_mode mode;
2147 bool errno_set = true;
2148
2149 switch (DECL_FUNCTION_CODE (fndecl))
2150 {
2151 CASE_FLT_FN (BUILT_IN_SCALBN):
2152 CASE_FLT_FN (BUILT_IN_SCALBLN):
2153 CASE_FLT_FN (BUILT_IN_LDEXP):
2154 op1_type = INTEGER_TYPE;
2155 default:
2156 break;
2157 }
2158
2159 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2160 return NULL_RTX;
2161
2162 arg0 = CALL_EXPR_ARG (exp, 0);
2163 arg1 = CALL_EXPR_ARG (exp, 1);
2164
2165 switch (DECL_FUNCTION_CODE (fndecl))
2166 {
2167 CASE_FLT_FN (BUILT_IN_POW):
2168 builtin_optab = pow_optab; break;
2169 CASE_FLT_FN (BUILT_IN_ATAN2):
2170 builtin_optab = atan2_optab; break;
2171 CASE_FLT_FN (BUILT_IN_SCALB):
2172 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2173 return 0;
2174 builtin_optab = scalb_optab; break;
2175 CASE_FLT_FN (BUILT_IN_SCALBN):
2176 CASE_FLT_FN (BUILT_IN_SCALBLN):
2177 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2178 return 0;
2179 /* Fall through... */
2180 CASE_FLT_FN (BUILT_IN_LDEXP):
2181 builtin_optab = ldexp_optab; break;
2182 CASE_FLT_FN (BUILT_IN_FMOD):
2183 builtin_optab = fmod_optab; break;
2184 CASE_FLT_FN (BUILT_IN_REMAINDER):
2185 CASE_FLT_FN (BUILT_IN_DREM):
2186 builtin_optab = remainder_optab; break;
2187 default:
2188 gcc_unreachable ();
2189 }
2190
2191 /* Make a suitable register to place result in. */
2192 mode = TYPE_MODE (TREE_TYPE (exp));
2193
2194 /* Before working hard, check whether the instruction is available. */
2195 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2196 return NULL_RTX;
2197
2198 target = gen_reg_rtx (mode);
2199
2200 if (! flag_errno_math || ! HONOR_NANS (mode))
2201 errno_set = false;
2202
2203 if (errno_set && optimize_insn_for_size_p ())
2204 return 0;
2205
2206 /* Always stabilize the argument list. */
2207 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2208 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2209
2210 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2211 op1 = expand_normal (arg1);
2212
2213 start_sequence ();
2214
2215 /* Compute into TARGET.
2216 Set TARGET to wherever the result comes back. */
2217 target = expand_binop (mode, builtin_optab, op0, op1,
2218 target, 0, OPTAB_DIRECT);
2219
2220 /* If we were unable to expand via the builtin, stop the sequence
2221 (without outputting the insns) and call to the library function
2222 with the stabilized argument list. */
2223 if (target == 0)
2224 {
2225 end_sequence ();
2226 return expand_call (exp, target, target == const0_rtx);
2227 }
2228
2229 if (errno_set)
2230 expand_errno_check (exp, target);
2231
2232 /* Output the entire sequence. */
2233 insns = get_insns ();
2234 end_sequence ();
2235 emit_insn (insns);
2236
2237 return target;
2238}
2239
2240/* Expand a call to the builtin trinary math functions (fma).
2241 Return NULL_RTX if a normal call should be emitted rather than expanding the
2242 function in-line. EXP is the expression that is a call to the builtin
2243 function; if convenient, the result should be placed in TARGET.
2244 SUBTARGET may be used as the target for computing one of EXP's
2245 operands. */
2246
2247static rtx
2248expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2249{
2250 optab builtin_optab;
2251 rtx op0, op1, op2, insns;
2252 tree fndecl = get_callee_fndecl (exp);
2253 tree arg0, arg1, arg2;
2254 enum machine_mode mode;
2255
2256 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2257 return NULL_RTX;
2258
2259 arg0 = CALL_EXPR_ARG (exp, 0);
2260 arg1 = CALL_EXPR_ARG (exp, 1);
2261 arg2 = CALL_EXPR_ARG (exp, 2);
2262
2263 switch (DECL_FUNCTION_CODE (fndecl))
2264 {
2265 CASE_FLT_FN (BUILT_IN_FMA):
2266 builtin_optab = fma_optab; break;
2267 default:
2268 gcc_unreachable ();
2269 }
2270
2271 /* Make a suitable register to place result in. */
2272 mode = TYPE_MODE (TREE_TYPE (exp));
2273
2274 /* Before working hard, check whether the instruction is available. */
2275 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2276 return NULL_RTX;
2277
2278 target = gen_reg_rtx (mode);
2279
2280 /* Always stabilize the argument list. */
2281 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2282 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2283 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2284
2285 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2286 op1 = expand_normal (arg1);
2287 op2 = expand_normal (arg2);
2288
2289 start_sequence ();
2290
2291 /* Compute into TARGET.
2292 Set TARGET to wherever the result comes back. */
2293 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2294 target, 0);
2295
2296 /* If we were unable to expand via the builtin, stop the sequence
2297 (without outputting the insns) and call to the library function
2298 with the stabilized argument list. */
2299 if (target == 0)
2300 {
2301 end_sequence ();
2302 return expand_call (exp, target, target == const0_rtx);
2303 }
2304
2305 /* Output the entire sequence. */
2306 insns = get_insns ();
2307 end_sequence ();
2308 emit_insn (insns);
2309
2310 return target;
2311}
2312
2313/* Expand a call to the builtin sin and cos math functions.
2314 Return NULL_RTX if a normal call should be emitted rather than expanding the
2315 function in-line. EXP is the expression that is a call to the builtin
2316 function; if convenient, the result should be placed in TARGET.
2317 SUBTARGET may be used as the target for computing one of EXP's
2318 operands. */
2319
2320static rtx
2321expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2322{
2323 optab builtin_optab;
2324 rtx op0, insns;
2325 tree fndecl = get_callee_fndecl (exp);
2326 enum machine_mode mode;
2327 tree arg;
2328
2329 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2330 return NULL_RTX;
2331
2332 arg = CALL_EXPR_ARG (exp, 0);
2333
2334 switch (DECL_FUNCTION_CODE (fndecl))
2335 {
2336 CASE_FLT_FN (BUILT_IN_SIN):
2337 CASE_FLT_FN (BUILT_IN_COS):
2338 builtin_optab = sincos_optab; break;
2339 default:
2340 gcc_unreachable ();
2341 }
2342
2343 /* Make a suitable register to place result in. */
2344 mode = TYPE_MODE (TREE_TYPE (exp));
2345
2346 /* Check if sincos insn is available, otherwise fallback
2347 to sin or cos insn. */
2348 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2349 switch (DECL_FUNCTION_CODE (fndecl))
2350 {
2351 CASE_FLT_FN (BUILT_IN_SIN):
2352 builtin_optab = sin_optab; break;
2353 CASE_FLT_FN (BUILT_IN_COS):
2354 builtin_optab = cos_optab; break;
2355 default:
2356 gcc_unreachable ();
2357 }
2358
2359 /* Before working hard, check whether the instruction is available. */
2360 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2361 {
2362 target = gen_reg_rtx (mode);
2363
2364 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2365 need to expand the argument again. This way, we will not perform
2366 side-effects more the once. */
2367 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2368
2369 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2370
2371 start_sequence ();
2372
2373 /* Compute into TARGET.
2374 Set TARGET to wherever the result comes back. */
2375 if (builtin_optab == sincos_optab)
2376 {
2377 int result;
2378
2379 switch (DECL_FUNCTION_CODE (fndecl))
2380 {
2381 CASE_FLT_FN (BUILT_IN_SIN):
2382 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2383 break;
2384 CASE_FLT_FN (BUILT_IN_COS):
2385 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2386 break;
2387 default:
2388 gcc_unreachable ();
2389 }
2390 gcc_assert (result);
2391 }
2392 else
2393 {
2394 target = expand_unop (mode, builtin_optab, op0, target, 0);
2395 }
2396
2397 if (target != 0)
2398 {
2399 /* Output the entire sequence. */
2400 insns = get_insns ();
2401 end_sequence ();
2402 emit_insn (insns);
2403 return target;
2404 }
2405
2406 /* If we were unable to expand via the builtin, stop the sequence
2407 (without outputting the insns) and call to the library function
2408 with the stabilized argument list. */
2409 end_sequence ();
2410 }
2411
2412 target = expand_call (exp, target, target == const0_rtx);
2413
2414 return target;
2415}
2416
2417/* Given an interclass math builtin decl FNDECL and it's argument ARG
2418 return an RTL instruction code that implements the functionality.
2419 If that isn't possible or available return CODE_FOR_nothing. */
2420
2421static enum insn_code
2422interclass_mathfn_icode (tree arg, tree fndecl)
2423{
2424 bool errno_set = false;
2425 optab builtin_optab = 0;
2426 enum machine_mode mode;
2427
2428 switch (DECL_FUNCTION_CODE (fndecl))
2429 {
2430 CASE_FLT_FN (BUILT_IN_ILOGB):
2431 errno_set = true; builtin_optab = ilogb_optab; break;
2432 CASE_FLT_FN (BUILT_IN_ISINF):
2433 builtin_optab = isinf_optab; break;
2434 case BUILT_IN_ISNORMAL:
2435 case BUILT_IN_ISFINITE:
2436 CASE_FLT_FN (BUILT_IN_FINITE):
2437 case BUILT_IN_FINITED32:
2438 case BUILT_IN_FINITED64:
2439 case BUILT_IN_FINITED128:
2440 case BUILT_IN_ISINFD32:
2441 case BUILT_IN_ISINFD64:
2442 case BUILT_IN_ISINFD128:
2443 /* These builtins have no optabs (yet). */
2444 break;
2445 default:
2446 gcc_unreachable ();
2447 }
2448
2449 /* There's no easy way to detect the case we need to set EDOM. */
2450 if (flag_errno_math && errno_set)
2451 return CODE_FOR_nothing;
2452
2453 /* Optab mode depends on the mode of the input argument. */
2454 mode = TYPE_MODE (TREE_TYPE (arg));
2455
2456 if (builtin_optab)
2457 return optab_handler (builtin_optab, mode);
2458 return CODE_FOR_nothing;
2459}
2460
2461/* Expand a call to one of the builtin math functions that operate on
2462 floating point argument and output an integer result (ilogb, isinf,
2463 isnan, etc).
2464 Return 0 if a normal call should be emitted rather than expanding the
2465 function in-line. EXP is the expression that is a call to the builtin
2466 function; if convenient, the result should be placed in TARGET. */
2467
2468static rtx
2469expand_builtin_interclass_mathfn (tree exp, rtx target)
2470{
2471 enum insn_code icode = CODE_FOR_nothing;
2472 rtx op0;
2473 tree fndecl = get_callee_fndecl (exp);
2474 enum machine_mode mode;
2475 tree arg;
2476
2477 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2478 return NULL_RTX;
2479
2480 arg = CALL_EXPR_ARG (exp, 0);
2481 icode = interclass_mathfn_icode (arg, fndecl);
2482 mode = TYPE_MODE (TREE_TYPE (arg));
2483
2484 if (icode != CODE_FOR_nothing)
2485 {
2486 struct expand_operand ops[1];
2487 rtx last = get_last_insn ();
2488 tree orig_arg = arg;
2489
2490 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2491 need to expand the argument again. This way, we will not perform
2492 side-effects more the once. */
2493 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2494
2495 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2496
2497 if (mode != GET_MODE (op0))
2498 op0 = convert_to_mode (mode, op0, 0);
2499
2500 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2501 if (maybe_legitimize_operands (icode, 0, 1, ops)
2502 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2503 return ops[0].value;
2504
2505 delete_insns_since (last);
2506 CALL_EXPR_ARG (exp, 0) = orig_arg;
2507 }
2508
2509 return NULL_RTX;
2510}
2511
2512/* Expand a call to the builtin sincos math function.
2513 Return NULL_RTX if a normal call should be emitted rather than expanding the
2514 function in-line. EXP is the expression that is a call to the builtin
2515 function. */
2516
2517static rtx
2518expand_builtin_sincos (tree exp)
2519{
2520 rtx op0, op1, op2, target1, target2;
2521 enum machine_mode mode;
2522 tree arg, sinp, cosp;
2523 int result;
2524 location_t loc = EXPR_LOCATION (exp);
2525 tree alias_type, alias_off;
2526
2527 if (!validate_arglist (exp, REAL_TYPE,
2528 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2529 return NULL_RTX;
2530
2531 arg = CALL_EXPR_ARG (exp, 0);
2532 sinp = CALL_EXPR_ARG (exp, 1);
2533 cosp = CALL_EXPR_ARG (exp, 2);
2534
2535 /* Make a suitable register to place result in. */
2536 mode = TYPE_MODE (TREE_TYPE (arg));
2537
2538 /* Check if sincos insn is available, otherwise emit the call. */
2539 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2540 return NULL_RTX;
2541
2542 target1 = gen_reg_rtx (mode);
2543 target2 = gen_reg_rtx (mode);
2544
2545 op0 = expand_normal (arg);
2546 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2547 alias_off = build_int_cst (alias_type, 0);
2548 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2549 sinp, alias_off));
2550 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2551 cosp, alias_off));
2552
2553 /* Compute into target1 and target2.
2554 Set TARGET to wherever the result comes back. */
2555 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2556 gcc_assert (result);
2557
2558 /* Move target1 and target2 to the memory locations indicated
2559 by op1 and op2. */
2560 emit_move_insn (op1, target1);
2561 emit_move_insn (op2, target2);
2562
2563 return const0_rtx;
2564}
2565
2566/* Expand a call to the internal cexpi builtin to the sincos math function.
2567 EXP is the expression that is a call to the builtin function; if convenient,
2568 the result should be placed in TARGET. */
2569
2570static rtx
2571expand_builtin_cexpi (tree exp, rtx target)
2572{
2573 tree fndecl = get_callee_fndecl (exp);
2574 tree arg, type;
2575 enum machine_mode mode;
2576 rtx op0, op1, op2;
2577 location_t loc = EXPR_LOCATION (exp);
2578
2579 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2580 return NULL_RTX;
2581
2582 arg = CALL_EXPR_ARG (exp, 0);
2583 type = TREE_TYPE (arg);
2584 mode = TYPE_MODE (TREE_TYPE (arg));
2585
2586 /* Try expanding via a sincos optab, fall back to emitting a libcall
2587 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2588 is only generated from sincos, cexp or if we have either of them. */
2589 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2590 {
2591 op1 = gen_reg_rtx (mode);
2592 op2 = gen_reg_rtx (mode);
2593
2594 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2595
2596 /* Compute into op1 and op2. */
2597 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2598 }
2599 else if (TARGET_HAS_SINCOS)
2600 {
2601 tree call, fn = NULL_TREE;
2602 tree top1, top2;
2603 rtx op1a, op2a;
2604
2605 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2606 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2607 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2608 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2610 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2611 else
2612 gcc_unreachable ();
2613
2614 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2615 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2616 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2617 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2618 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2619 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2620
2621 /* Make sure not to fold the sincos call again. */
2622 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2623 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2624 call, 3, arg, top1, top2));
2625 }
2626 else
2627 {
2628 tree call, fn = NULL_TREE, narg;
2629 tree ctype = build_complex_type (type);
2630
2631 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2632 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2633 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2634 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2635 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2636 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2637 else
2638 gcc_unreachable ();
2639
2640 /* If we don't have a decl for cexp create one. This is the
2641 friendliest fallback if the user calls __builtin_cexpi
2642 without full target C99 function support. */
2643 if (fn == NULL_TREE)
2644 {
2645 tree fntype;
2646 const char *name = NULL;
2647
2648 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2649 name = "cexpf";
2650 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2651 name = "cexp";
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2653 name = "cexpl";
2654
2655 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2656 fn = build_fn_decl (name, fntype);
2657 }
2658
2659 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2660 build_real (type, dconst0), arg);
2661
2662 /* Make sure not to fold the cexp call again. */
2663 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2664 return expand_expr (build_call_nary (ctype, call, 1, narg),
2665 target, VOIDmode, EXPAND_NORMAL);
2666 }
2667
2668 /* Now build the proper return type. */
2669 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2670 make_tree (TREE_TYPE (arg), op2),
2671 make_tree (TREE_TYPE (arg), op1)),
2672 target, VOIDmode, EXPAND_NORMAL);
2673}
2674
2675/* Conveniently construct a function call expression. FNDECL names the
2676 function to be called, N is the number of arguments, and the "..."
2677 parameters are the argument expressions. Unlike build_call_exr
2678 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2679
2680static tree
2681build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2682{
2683 va_list ap;
2684 tree fntype = TREE_TYPE (fndecl);
2685 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2686
2687 va_start (ap, n);
2688 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2689 va_end (ap);
2690 SET_EXPR_LOCATION (fn, loc);
2691 return fn;
2692}
2693
2694/* Expand a call to one of the builtin rounding functions gcc defines
2695 as an extension (lfloor and lceil). As these are gcc extensions we
2696 do not need to worry about setting errno to EDOM.
2697 If expanding via optab fails, lower expression to (int)(floor(x)).
2698 EXP is the expression that is a call to the builtin function;
2699 if convenient, the result should be placed in TARGET. */
2700
2701static rtx
2702expand_builtin_int_roundingfn (tree exp, rtx target)
2703{
2704 convert_optab builtin_optab;
2705 rtx op0, insns, tmp;
2706 tree fndecl = get_callee_fndecl (exp);
2707 enum built_in_function fallback_fn;
2708 tree fallback_fndecl;
2709 enum machine_mode mode;
2710 tree arg;
2711
2712 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2713 gcc_unreachable ();
2714
2715 arg = CALL_EXPR_ARG (exp, 0);
2716
2717 switch (DECL_FUNCTION_CODE (fndecl))
2718 {
2719 CASE_FLT_FN (BUILT_IN_ICEIL):
2720 CASE_FLT_FN (BUILT_IN_LCEIL):
2721 CASE_FLT_FN (BUILT_IN_LLCEIL):
2722 builtin_optab = lceil_optab;
2723 fallback_fn = BUILT_IN_CEIL;
2724 break;
2725
2726 CASE_FLT_FN (BUILT_IN_IFLOOR):
2727 CASE_FLT_FN (BUILT_IN_LFLOOR):
2728 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2729 builtin_optab = lfloor_optab;
2730 fallback_fn = BUILT_IN_FLOOR;
2731 break;
2732
2733 default:
2734 gcc_unreachable ();
2735 }
2736
2737 /* Make a suitable register to place result in. */
2738 mode = TYPE_MODE (TREE_TYPE (exp));
2739
2740 target = gen_reg_rtx (mode);
2741
2742 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2743 need to expand the argument again. This way, we will not perform
2744 side-effects more the once. */
2745 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2746
2747 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2748
2749 start_sequence ();
2750
2751 /* Compute into TARGET. */
2752 if (expand_sfix_optab (target, op0, builtin_optab))
2753 {
2754 /* Output the entire sequence. */
2755 insns = get_insns ();
2756 end_sequence ();
2757 emit_insn (insns);
2758 return target;
2759 }
2760
2761 /* If we were unable to expand via the builtin, stop the sequence
2762 (without outputting the insns). */
2763 end_sequence ();
2764
2765 /* Fall back to floating point rounding optab. */
2766 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2767
2768 /* For non-C99 targets we may end up without a fallback fndecl here
2769 if the user called __builtin_lfloor directly. In this case emit
2770 a call to the floor/ceil variants nevertheless. This should result
2771 in the best user experience for not full C99 targets. */
2772 if (fallback_fndecl == NULL_TREE)
2773 {
2774 tree fntype;
2775 const char *name = NULL;
2776
2777 switch (DECL_FUNCTION_CODE (fndecl))
2778 {
2779 case BUILT_IN_ICEIL:
2780 case BUILT_IN_LCEIL:
2781 case BUILT_IN_LLCEIL:
2782 name = "ceil";
2783 break;
2784 case BUILT_IN_ICEILF:
2785 case BUILT_IN_LCEILF:
2786 case BUILT_IN_LLCEILF:
2787 name = "ceilf";
2788 break;
2789 case BUILT_IN_ICEILL:
2790 case BUILT_IN_LCEILL:
2791 case BUILT_IN_LLCEILL:
2792 name = "ceill";
2793 break;
2794 case BUILT_IN_IFLOOR:
2795 case BUILT_IN_LFLOOR:
2796 case BUILT_IN_LLFLOOR:
2797 name = "floor";
2798 break;
2799 case BUILT_IN_IFLOORF:
2800 case BUILT_IN_LFLOORF:
2801 case BUILT_IN_LLFLOORF:
2802 name = "floorf";
2803 break;
2804 case BUILT_IN_IFLOORL:
2805 case BUILT_IN_LFLOORL:
2806 case BUILT_IN_LLFLOORL:
2807 name = "floorl";
2808 break;
2809 default:
2810 gcc_unreachable ();
2811 }
2812
2813 fntype = build_function_type_list (TREE_TYPE (arg),
2814 TREE_TYPE (arg), NULL_TREE);
2815 fallback_fndecl = build_fn_decl (name, fntype);
2816 }
2817
2818 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2819
2820 tmp = expand_normal (exp);
2821
2822 /* Truncate the result of floating point optab to integer
2823 via expand_fix (). */
2824 target = gen_reg_rtx (mode);
2825 expand_fix (target, tmp, 0);
2826
2827 return target;
2828}
2829
2830/* Expand a call to one of the builtin math functions doing integer
2831 conversion (lrint).
2832 Return 0 if a normal call should be emitted rather than expanding the
2833 function in-line. EXP is the expression that is a call to the builtin
2834 function; if convenient, the result should be placed in TARGET. */
2835
2836static rtx
2837expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2838{
2839 convert_optab builtin_optab;
2840 rtx op0, insns;
2841 tree fndecl = get_callee_fndecl (exp);
2842 tree arg;
2843 enum machine_mode mode;
2844 enum built_in_function fallback_fn = BUILT_IN_NONE;
2845
2846 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2847 gcc_unreachable ();
2848
2849 arg = CALL_EXPR_ARG (exp, 0);
2850
2851 switch (DECL_FUNCTION_CODE (fndecl))
2852 {
2853 CASE_FLT_FN (BUILT_IN_IRINT):
2854 fallback_fn = BUILT_IN_LRINT;
2855 /* FALLTHRU */
2856 CASE_FLT_FN (BUILT_IN_LRINT):
2857 CASE_FLT_FN (BUILT_IN_LLRINT):
2858 builtin_optab = lrint_optab;
2859 break;
2860
2861 CASE_FLT_FN (BUILT_IN_IROUND):
2862 fallback_fn = BUILT_IN_LROUND;
2863 /* FALLTHRU */
2864 CASE_FLT_FN (BUILT_IN_LROUND):
2865 CASE_FLT_FN (BUILT_IN_LLROUND):
2866 builtin_optab = lround_optab;
2867 break;
2868
2869 default:
2870 gcc_unreachable ();
2871 }
2872
2873 /* There's no easy way to detect the case we need to set EDOM. */
2874 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2875 return NULL_RTX;
2876
2877 /* Make a suitable register to place result in. */
2878 mode = TYPE_MODE (TREE_TYPE (exp));
2879
2880 /* There's no easy way to detect the case we need to set EDOM. */
2881 if (!flag_errno_math)
2882 {
2883 target = gen_reg_rtx (mode);
2884
2885 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2886 need to expand the argument again. This way, we will not perform
2887 side-effects more the once. */
2888 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2889
2890 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2891
2892 start_sequence ();
2893
2894 if (expand_sfix_optab (target, op0, builtin_optab))
2895 {
2896 /* Output the entire sequence. */
2897 insns = get_insns ();
2898 end_sequence ();
2899 emit_insn (insns);
2900 return target;
2901 }
2902
2903 /* If we were unable to expand via the builtin, stop the sequence
2904 (without outputting the insns) and call to the library function
2905 with the stabilized argument list. */
2906 end_sequence ();
2907 }
2908
2909 if (fallback_fn != BUILT_IN_NONE)
2910 {
2911 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2912 targets, (int) round (x) should never be transformed into
2913 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2914 a call to lround in the hope that the target provides at least some
2915 C99 functions. This should result in the best user experience for
2916 not full C99 targets. */
2917 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2918 fallback_fn, 0);
2919
2920 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2921 fallback_fndecl, 1, arg);
2922
2923 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2924 return convert_to_mode (mode, target, 0);
2925 }
2926
2927 target = expand_call (exp, target, target == const0_rtx);
2928
2929 return target;
2930}
2931
2932/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2933 a normal call should be emitted rather than expanding the function
2934 in-line. EXP is the expression that is a call to the builtin
2935 function; if convenient, the result should be placed in TARGET. */
2936
2937static rtx
2938expand_builtin_powi (tree exp, rtx target)
2939{
2940 tree arg0, arg1;
2941 rtx op0, op1;
2942 enum machine_mode mode;
2943 enum machine_mode mode2;
2944
2945 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2946 return NULL_RTX;
2947
2948 arg0 = CALL_EXPR_ARG (exp, 0);
2949 arg1 = CALL_EXPR_ARG (exp, 1);
2950 mode = TYPE_MODE (TREE_TYPE (exp));
2951
2952 /* Emit a libcall to libgcc. */
2953
2954 /* Mode of the 2nd argument must match that of an int. */
2955 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2956
2957 if (target == NULL_RTX)
2958 target = gen_reg_rtx (mode);
2959
2960 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2961 if (GET_MODE (op0) != mode)
2962 op0 = convert_to_mode (mode, op0, 0);
2963 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2964 if (GET_MODE (op1) != mode2)
2965 op1 = convert_to_mode (mode2, op1, 0);
2966
2967 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2968 target, LCT_CONST, mode, 2,
2969 op0, mode, op1, mode2);
2970
2971 return target;
2972}
2973
2974/* Expand expression EXP which is a call to the strlen builtin. Return
2975 NULL_RTX if we failed the caller should emit a normal call, otherwise
2976 try to get the result in TARGET, if convenient. */
2977
2978static rtx
2979expand_builtin_strlen (tree exp, rtx target,
2980 enum machine_mode target_mode)
2981{
2982 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2983 return NULL_RTX;
2984 else
2985 {
2986 struct expand_operand ops[4];
2987 rtx pat;
2988 tree len;
2989 tree src = CALL_EXPR_ARG (exp, 0);
2990 rtx src_reg, before_strlen;
2991 enum machine_mode insn_mode = target_mode;
2992 enum insn_code icode = CODE_FOR_nothing;
2993 unsigned int align;
2994
2995 /* If the length can be computed at compile-time, return it. */
2996 len = c_strlen (src, 0);
2997 if (len)
2998 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2999
3000 /* If the length can be computed at compile-time and is constant
3001 integer, but there are side-effects in src, evaluate
3002 src for side-effects, then return len.
3003 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3004 can be optimized into: i++; x = 3; */
3005 len = c_strlen (src, 1);
3006 if (len && TREE_CODE (len) == INTEGER_CST)
3007 {
3008 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3009 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3010 }
3011
3012 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3013
3014 /* If SRC is not a pointer type, don't do this operation inline. */
3015 if (align == 0)
3016 return NULL_RTX;
3017
3018 /* Bail out if we can't compute strlen in the right mode. */
3019 while (insn_mode != VOIDmode)
3020 {
3021 icode = optab_handler (strlen_optab, insn_mode);
3022 if (icode != CODE_FOR_nothing)
3023 break;
3024
3025 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3026 }
3027 if (insn_mode == VOIDmode)
3028 return NULL_RTX;
3029
3030 /* Make a place to hold the source address. We will not expand
3031 the actual source until we are sure that the expansion will
3032 not fail -- there are trees that cannot be expanded twice. */
3033 src_reg = gen_reg_rtx (Pmode);
3034
3035 /* Mark the beginning of the strlen sequence so we can emit the
3036 source operand later. */
3037 before_strlen = get_last_insn ();
3038
3039 create_output_operand (&ops[0], target, insn_mode);
3040 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3041 create_integer_operand (&ops[2], 0);
3042 create_integer_operand (&ops[3], align);
3043 if (!maybe_expand_insn (icode, 4, ops))
3044 return NULL_RTX;
3045
3046 /* Now that we are assured of success, expand the source. */
3047 start_sequence ();
3048 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3049 if (pat != src_reg)
3050 {
3051#ifdef POINTERS_EXTEND_UNSIGNED
3052 if (GET_MODE (pat) != Pmode)
3053 pat = convert_to_mode (Pmode, pat,
3054 POINTERS_EXTEND_UNSIGNED);
3055#endif
3056 emit_move_insn (src_reg, pat);
3057 }
3058 pat = get_insns ();
3059 end_sequence ();
3060
3061 if (before_strlen)
3062 emit_insn_after (pat, before_strlen);
3063 else
3064 emit_insn_before (pat, get_insns ());
3065
3066 /* Return the value in the proper mode for this function. */
3067 if (GET_MODE (ops[0].value) == target_mode)
3068 target = ops[0].value;
3069 else if (target != 0)
3070 convert_move (target, ops[0].value, 0);
3071 else
3072 target = convert_to_mode (target_mode, ops[0].value, 0);
3073
3074 return target;
3075 }
3076}
3077
3078/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3079 bytes from constant string DATA + OFFSET and return it as target
3080 constant. */
3081
3082static rtx
3083builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3084 enum machine_mode mode)
3085{
3086 const char *str = (const char *) data;
3087
3088 gcc_assert (offset >= 0
3089 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3090 <= strlen (str) + 1));
3091
3092 return c_readstr (str + offset, mode);
3093}
3094
3095/* Expand a call EXP to the memcpy builtin.
3096 Return NULL_RTX if we failed, the caller should emit a normal call,
3097 otherwise try to get the result in TARGET, if convenient (and in
3098 mode MODE if that's convenient). */
3099
3100static rtx
3101expand_builtin_memcpy (tree exp, rtx target)
3102{
3103 if (!validate_arglist (exp,
3104 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3105 return NULL_RTX;
3106 else
3107 {
3108 tree dest = CALL_EXPR_ARG (exp, 0);
3109 tree src = CALL_EXPR_ARG (exp, 1);
3110 tree len = CALL_EXPR_ARG (exp, 2);
3111 const char *src_str;
3112 unsigned int src_align = get_pointer_alignment (src);
3113 unsigned int dest_align = get_pointer_alignment (dest);
3114 rtx dest_mem, src_mem, dest_addr, len_rtx;
3115 HOST_WIDE_INT expected_size = -1;
3116 unsigned int expected_align = 0;
3117
3118 /* If DEST is not a pointer type, call the normal function. */
3119 if (dest_align == 0)
3120 return NULL_RTX;
3121
3122 /* If either SRC is not a pointer type, don't do this
3123 operation in-line. */
3124 if (src_align == 0)
3125 return NULL_RTX;
3126
3127 if (currently_expanding_gimple_stmt)
3128 stringop_block_profile (currently_expanding_gimple_stmt,
3129 &expected_align, &expected_size);
3130
3131 if (expected_align < dest_align)
3132 expected_align = dest_align;
3133 dest_mem = get_memory_rtx (dest, len);
3134 set_mem_align (dest_mem, dest_align);
3135 len_rtx = expand_normal (len);
3136 src_str = c_getstr (src);
3137
3138 /* If SRC is a string constant and block move would be done
3139 by pieces, we can avoid loading the string from memory
3140 and only stored the computed constants. */
3141 if (src_str
3142 && CONST_INT_P (len_rtx)
3143 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3144 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3145 CONST_CAST (char *, src_str),
3146 dest_align, false))
3147 {
3148 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3149 builtin_memcpy_read_str,
3150 CONST_CAST (char *, src_str),
3151 dest_align, false, 0);
3152 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3153 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3154 return dest_mem;
3155 }
3156
3157 src_mem = get_memory_rtx (src, len);
3158 set_mem_align (src_mem, src_align);
3159
3160 /* Copy word part most expediently. */
3161 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3162 CALL_EXPR_TAILCALL (exp)
3163 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3164 expected_align, expected_size);
3165
3166 if (dest_addr == 0)
3167 {
3168 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3169 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3170 }
3171 return dest_addr;
3172 }
3173}
3174
3175/* Expand a call EXP to the mempcpy builtin.
3176 Return NULL_RTX if we failed; the caller should emit a normal call,
3177 otherwise try to get the result in TARGET, if convenient (and in
3178 mode MODE if that's convenient). If ENDP is 0 return the
3179 destination pointer, if ENDP is 1 return the end pointer ala
3180 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3181 stpcpy. */
3182
3183static rtx
3184expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3185{
3186 if (!validate_arglist (exp,
3187 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3188 return NULL_RTX;
3189 else
3190 {
3191 tree dest = CALL_EXPR_ARG (exp, 0);
3192 tree src = CALL_EXPR_ARG (exp, 1);
3193 tree len = CALL_EXPR_ARG (exp, 2);
3194 return expand_builtin_mempcpy_args (dest, src, len,
3195 target, mode, /*endp=*/ 1);
3196 }
3197}
3198
3199/* Helper function to do the actual work for expand_builtin_mempcpy. The
3200 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3201 so that this can also be called without constructing an actual CALL_EXPR.
3202 The other arguments and return value are the same as for
3203 expand_builtin_mempcpy. */
3204
3205static rtx
3206expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3207 rtx target, enum machine_mode mode, int endp)
3208{
3209 /* If return value is ignored, transform mempcpy into memcpy. */
3210 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3211 {
3212 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3213 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3214 dest, src, len);
3215 return expand_expr (result, target, mode, EXPAND_NORMAL);
3216 }
3217 else
3218 {
3219 const char *src_str;
3220 unsigned int src_align = get_pointer_alignment (src);
3221 unsigned int dest_align = get_pointer_alignment (dest);
3222 rtx dest_mem, src_mem, len_rtx;
3223
3224 /* If either SRC or DEST is not a pointer type, don't do this
3225 operation in-line. */
3226 if (dest_align == 0 || src_align == 0)
3227 return NULL_RTX;
3228
3229 /* If LEN is not constant, call the normal function. */
3230 if (! host_integerp (len, 1))
3231 return NULL_RTX;
3232
3233 len_rtx = expand_normal (len);
3234 src_str = c_getstr (src);
3235
3236 /* If SRC is a string constant and block move would be done
3237 by pieces, we can avoid loading the string from memory
3238 and only stored the computed constants. */
3239 if (src_str
3240 && CONST_INT_P (len_rtx)
3241 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3242 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3243 CONST_CAST (char *, src_str),
3244 dest_align, false))
3245 {
3246 dest_mem = get_memory_rtx (dest, len);
3247 set_mem_align (dest_mem, dest_align);
3248 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3249 builtin_memcpy_read_str,
3250 CONST_CAST (char *, src_str),
3251 dest_align, false, endp);
3252 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3253 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3254 return dest_mem;
3255 }
3256
3257 if (CONST_INT_P (len_rtx)
3258 && can_move_by_pieces (INTVAL (len_rtx),
3259 MIN (dest_align, src_align)))
3260 {
3261 dest_mem = get_memory_rtx (dest, len);
3262 set_mem_align (dest_mem, dest_align);
3263 src_mem = get_memory_rtx (src, len);
3264 set_mem_align (src_mem, src_align);
3265 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3266 MIN (dest_align, src_align), endp);
3267 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3268 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3269 return dest_mem;
3270 }
3271
3272 return NULL_RTX;
3273 }
3274}
3275
3276#ifndef HAVE_movstr
3277# define HAVE_movstr 0
3278# define CODE_FOR_movstr CODE_FOR_nothing
3279#endif
3280
3281/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3282 we failed, the caller should emit a normal call, otherwise try to
3283 get the result in TARGET, if convenient. If ENDP is 0 return the
3284 destination pointer, if ENDP is 1 return the end pointer ala
3285 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3286 stpcpy. */
3287
3288static rtx
3289expand_movstr (tree dest, tree src, rtx target, int endp)
3290{
3291 struct expand_operand ops[3];
3292 rtx dest_mem;
3293 rtx src_mem;
3294
3295 if (!HAVE_movstr)
3296 return NULL_RTX;
3297
3298 dest_mem = get_memory_rtx (dest, NULL);
3299 src_mem = get_memory_rtx (src, NULL);
3300 if (!endp)
3301 {
3302 target = force_reg (Pmode, XEXP (dest_mem, 0));
3303 dest_mem = replace_equiv_address (dest_mem, target);
3304 }
3305
3306 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3307 create_fixed_operand (&ops[1], dest_mem);
3308 create_fixed_operand (&ops[2], src_mem);
3309 expand_insn (CODE_FOR_movstr, 3, ops);
3310
3311 if (endp && target != const0_rtx)
3312 {
3313 target = ops[0].value;
3314 /* movstr is supposed to set end to the address of the NUL
3315 terminator. If the caller requested a mempcpy-like return value,
3316 adjust it. */
3317 if (endp == 1)
3318 {
3319 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3320 emit_move_insn (target, force_operand (tem, NULL_RTX));
3321 }
3322 }
3323 return target;
3324}
3325
3326/* Expand expression EXP, which is a call to the strcpy builtin. Return
3327 NULL_RTX if we failed the caller should emit a normal call, otherwise
3328 try to get the result in TARGET, if convenient (and in mode MODE if that's
3329 convenient). */
3330
3331static rtx
3332expand_builtin_strcpy (tree exp, rtx target)
3333{
3334 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3335 {
3336 tree dest = CALL_EXPR_ARG (exp, 0);
3337 tree src = CALL_EXPR_ARG (exp, 1);
3338 return expand_builtin_strcpy_args (dest, src, target);
3339 }
3340 return NULL_RTX;
3341}
3342
3343/* Helper function to do the actual work for expand_builtin_strcpy. The
3344 arguments to the builtin_strcpy call DEST and SRC are broken out
3345 so that this can also be called without constructing an actual CALL_EXPR.
3346 The other arguments and return value are the same as for
3347 expand_builtin_strcpy. */
3348
3349static rtx
3350expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3351{
3352 return expand_movstr (dest, src, target, /*endp=*/0);
3353}
3354
3355/* Expand a call EXP to the stpcpy builtin.
3356 Return NULL_RTX if we failed the caller should emit a normal call,
3357 otherwise try to get the result in TARGET, if convenient (and in
3358 mode MODE if that's convenient). */
3359
3360static rtx
3361expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3362{
3363 tree dst, src;
3364 location_t loc = EXPR_LOCATION (exp);
3365
3366 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3367 return NULL_RTX;
3368
3369 dst = CALL_EXPR_ARG (exp, 0);
3370 src = CALL_EXPR_ARG (exp, 1);
3371
3372 /* If return value is ignored, transform stpcpy into strcpy. */
3373 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3374 {
3375 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3376 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3377 return expand_expr (result, target, mode, EXPAND_NORMAL);
3378 }
3379 else
3380 {
3381 tree len, lenp1;
3382 rtx ret;
3383
3384 /* Ensure we get an actual string whose length can be evaluated at
3385 compile-time, not an expression containing a string. This is
3386 because the latter will potentially produce pessimized code
3387 when used to produce the return value. */
3388 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3389 return expand_movstr (dst, src, target, /*endp=*/2);
3390
3391 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3392 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3393 target, mode, /*endp=*/2);
3394
3395 if (ret)
3396 return ret;
3397
3398 if (TREE_CODE (len) == INTEGER_CST)
3399 {
3400 rtx len_rtx = expand_normal (len);
3401
3402 if (CONST_INT_P (len_rtx))
3403 {
3404 ret = expand_builtin_strcpy_args (dst, src, target);
3405
3406 if (ret)
3407 {
3408 if (! target)
3409 {
3410 if (mode != VOIDmode)
3411 target = gen_reg_rtx (mode);
3412 else
3413 target = gen_reg_rtx (GET_MODE (ret));
3414 }
3415 if (GET_MODE (target) != GET_MODE (ret))
3416 ret = gen_lowpart (GET_MODE (target), ret);
3417
3418 ret = plus_constant (ret, INTVAL (len_rtx));
3419 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3420 gcc_assert (ret);
3421
3422 return target;
3423 }
3424 }
3425 }
3426
3427 return expand_movstr (dst, src, target, /*endp=*/2);
3428 }
3429}
3430
3431/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3432 bytes from constant string DATA + OFFSET and return it as target
3433 constant. */
3434
3435rtx
3436builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3437 enum machine_mode mode)
3438{
3439 const char *str = (const char *) data;
3440
3441 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3442 return const0_rtx;
3443
3444 return c_readstr (str + offset, mode);
3445}
3446
3447/* Expand expression EXP, which is a call to the strncpy builtin. Return
3448 NULL_RTX if we failed the caller should emit a normal call. */
3449
3450static rtx
3451expand_builtin_strncpy (tree exp, rtx target)
3452{
3453 location_t loc = EXPR_LOCATION (exp);
3454
3455 if (validate_arglist (exp,
3456 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3457 {
3458 tree dest = CALL_EXPR_ARG (exp, 0);
3459 tree src = CALL_EXPR_ARG (exp, 1);
3460 tree len = CALL_EXPR_ARG (exp, 2);
3461 tree slen = c_strlen (src, 1);
3462
3463 /* We must be passed a constant len and src parameter. */
3464 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3465 return NULL_RTX;
3466
3467 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3468
3469 /* We're required to pad with trailing zeros if the requested
3470 len is greater than strlen(s2)+1. In that case try to
3471 use store_by_pieces, if it fails, punt. */
3472 if (tree_int_cst_lt (slen, len))
3473 {
3474 unsigned int dest_align = get_pointer_alignment (dest);
3475 const char *p = c_getstr (src);
3476 rtx dest_mem;
3477
3478 if (!p || dest_align == 0 || !host_integerp (len, 1)
3479 || !can_store_by_pieces (tree_low_cst (len, 1),
3480 builtin_strncpy_read_str,
3481 CONST_CAST (char *, p),
3482 dest_align, false))
3483 return NULL_RTX;
3484
3485 dest_mem = get_memory_rtx (dest, len);
3486 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3487 builtin_strncpy_read_str,
3488 CONST_CAST (char *, p), dest_align, false, 0);
3489 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3490 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3491 return dest_mem;
3492 }
3493 }
3494 return NULL_RTX;
3495}
3496
3497/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3498 bytes from constant string DATA + OFFSET and return it as target
3499 constant. */
3500
3501rtx
3502builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3503 enum machine_mode mode)
3504{
3505 const char *c = (const char *) data;
3506 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3507
3508 memset (p, *c, GET_MODE_SIZE (mode));
3509
3510 return c_readstr (p, mode);
3511}
3512
3513/* Callback routine for store_by_pieces. Return the RTL of a register
3514 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3515 char value given in the RTL register data. For example, if mode is
3516 4 bytes wide, return the RTL for 0x01010101*data. */
3517
3518static rtx
3519builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3520 enum machine_mode mode)
3521{
3522 rtx target, coeff;
3523 size_t size;
3524 char *p;
3525
3526 size = GET_MODE_SIZE (mode);
3527 if (size == 1)
3528 return (rtx) data;
3529
3530 p = XALLOCAVEC (char, size);
3531 memset (p, 1, size);
3532 coeff = c_readstr (p, mode);
3533
3534 target = convert_to_mode (mode, (rtx) data, 1);
3535 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3536 return force_reg (mode, target);
3537}
3538
3539/* Expand expression EXP, which is a call to the memset builtin. Return
3540 NULL_RTX if we failed the caller should emit a normal call, otherwise
3541 try to get the result in TARGET, if convenient (and in mode MODE if that's
3542 convenient). */
3543
3544static rtx
3545expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3546{
3547 if (!validate_arglist (exp,
3548 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3549 return NULL_RTX;
3550 else
3551 {
3552 tree dest = CALL_EXPR_ARG (exp, 0);
3553 tree val = CALL_EXPR_ARG (exp, 1);
3554 tree len = CALL_EXPR_ARG (exp, 2);
3555 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3556 }
3557}
3558
3559/* Helper function to do the actual work for expand_builtin_memset. The
3560 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3561 so that this can also be called without constructing an actual CALL_EXPR.
3562 The other arguments and return value are the same as for
3563 expand_builtin_memset. */
3564
3565static rtx
3566expand_builtin_memset_args (tree dest, tree val, tree len,
3567 rtx target, enum machine_mode mode, tree orig_exp)
3568{
3569 tree fndecl, fn;
3570 enum built_in_function fcode;
3571 enum machine_mode val_mode;
3572 char c;
3573 unsigned int dest_align;
3574 rtx dest_mem, dest_addr, len_rtx;
3575 HOST_WIDE_INT expected_size = -1;
3576 unsigned int expected_align = 0;
3577
3578 dest_align = get_pointer_alignment (dest);
3579
3580 /* If DEST is not a pointer type, don't do this operation in-line. */
3581 if (dest_align == 0)
3582 return NULL_RTX;
3583
3584 if (currently_expanding_gimple_stmt)
3585 stringop_block_profile (currently_expanding_gimple_stmt,
3586 &expected_align, &expected_size);
3587
3588 if (expected_align < dest_align)
3589 expected_align = dest_align;
3590
3591 /* If the LEN parameter is zero, return DEST. */
3592 if (integer_zerop (len))
3593 {
3594 /* Evaluate and ignore VAL in case it has side-effects. */
3595 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3596 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3597 }
3598
3599 /* Stabilize the arguments in case we fail. */
3600 dest = builtin_save_expr (dest);
3601 val = builtin_save_expr (val);
3602 len = builtin_save_expr (len);
3603
3604 len_rtx = expand_normal (len);
3605 dest_mem = get_memory_rtx (dest, len);
3606 val_mode = TYPE_MODE (unsigned_char_type_node);
3607
3608 if (TREE_CODE (val) != INTEGER_CST)
3609 {
3610 rtx val_rtx;
3611
3612 val_rtx = expand_normal (val);
3613 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3614
3615 /* Assume that we can memset by pieces if we can store
3616 * the coefficients by pieces (in the required modes).
3617 * We can't pass builtin_memset_gen_str as that emits RTL. */
3618 c = 1;
3619 if (host_integerp (len, 1)
3620 && can_store_by_pieces (tree_low_cst (len, 1),
3621 builtin_memset_read_str, &c, dest_align,
3622 true))
3623 {
3624 val_rtx = force_reg (val_mode, val_rtx);
3625 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3626 builtin_memset_gen_str, val_rtx, dest_align,
3627 true, 0);
3628 }
3629 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3630 dest_align, expected_align,
3631 expected_size))
3632 goto do_libcall;
3633
3634 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3635 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3636 return dest_mem;
3637 }
3638
3639 if (target_char_cast (val, &c))
3640 goto do_libcall;
3641
3642 if (c)
3643 {
3644 if (host_integerp (len, 1)
3645 && can_store_by_pieces (tree_low_cst (len, 1),
3646 builtin_memset_read_str, &c, dest_align,
3647 true))
3648 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3649 builtin_memset_read_str, &c, dest_align, true, 0);
3650 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3651 gen_int_mode (c, val_mode),
3652 dest_align, expected_align,
3653 expected_size))
3654 goto do_libcall;
3655
3656 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3657 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3658 return dest_mem;
3659 }
3660
3661 set_mem_align (dest_mem, dest_align);
3662 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3663 CALL_EXPR_TAILCALL (orig_exp)
3664 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3665 expected_align, expected_size);
3666
3667 if (dest_addr == 0)
3668 {
3669 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3670 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3671 }
3672
3673 return dest_addr;
3674
3675 do_libcall:
3676 fndecl = get_callee_fndecl (orig_exp);
3677 fcode = DECL_FUNCTION_CODE (fndecl);
3678 if (fcode == BUILT_IN_MEMSET)
3679 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3680 dest, val, len);
3681 else if (fcode == BUILT_IN_BZERO)
3682 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3683 dest, len);
3684 else
3685 gcc_unreachable ();
3686 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3687 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3688 return expand_call (fn, target, target == const0_rtx);
3689}
3690
3691/* Expand expression EXP, which is a call to the bzero builtin. Return
3692 NULL_RTX if we failed the caller should emit a normal call. */
3693
3694static rtx
3695expand_builtin_bzero (tree exp)
3696{
3697 tree dest, size;
3698 location_t loc = EXPR_LOCATION (exp);
3699
3700 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3701 return NULL_RTX;
3702
3703 dest = CALL_EXPR_ARG (exp, 0);
3704 size = CALL_EXPR_ARG (exp, 1);
3705
3706 /* New argument list transforming bzero(ptr x, int y) to
3707 memset(ptr x, int 0, size_t y). This is done this way
3708 so that if it isn't expanded inline, we fallback to
3709 calling bzero instead of memset. */
3710
3711 return expand_builtin_memset_args (dest, integer_zero_node,
3712 fold_convert_loc (loc,
3713 size_type_node, size),
3714 const0_rtx, VOIDmode, exp);
3715}
3716
3717/* Expand expression EXP, which is a call to the memcmp built-in function.
3718 Return NULL_RTX if we failed and the caller should emit a normal call,
3719 otherwise try to get the result in TARGET, if convenient (and in mode
3720 MODE, if that's convenient). */
3721
3722static rtx
3723expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3724 ATTRIBUTE_UNUSED enum machine_mode mode)
3725{
3726 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3727
3728 if (!validate_arglist (exp,
3729 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3730 return NULL_RTX;
3731
3732 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3733 implementing memcmp because it will stop if it encounters two
3734 zero bytes. */
3735#if defined HAVE_cmpmemsi
3736 {
3737 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3738 rtx result;
3739 rtx insn;
3740 tree arg1 = CALL_EXPR_ARG (exp, 0);
3741 tree arg2 = CALL_EXPR_ARG (exp, 1);
3742 tree len = CALL_EXPR_ARG (exp, 2);
3743
3744 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3745 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3746 enum machine_mode insn_mode;
3747
3748 if (HAVE_cmpmemsi)
3749 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3750 else
3751 return NULL_RTX;
3752
3753 /* If we don't have POINTER_TYPE, call the function. */
3754 if (arg1_align == 0 || arg2_align == 0)
3755 return NULL_RTX;
3756
3757 /* Make a place to write the result of the instruction. */
3758 result = target;
3759 if (! (result != 0
3760 && REG_P (result) && GET_MODE (result) == insn_mode
3761 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3762 result = gen_reg_rtx (insn_mode);
3763
3764 arg1_rtx = get_memory_rtx (arg1, len);
3765 arg2_rtx = get_memory_rtx (arg2, len);
3766 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3767
3768 /* Set MEM_SIZE as appropriate. */
3769 if (CONST_INT_P (arg3_rtx))
3770 {
3771 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3772 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3773 }
3774
3775 if (HAVE_cmpmemsi)
3776 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3777 GEN_INT (MIN (arg1_align, arg2_align)));
3778 else
3779 gcc_unreachable ();
3780
3781 if (insn)
3782 emit_insn (insn);
3783 else
3784 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3785 TYPE_MODE (integer_type_node), 3,
3786 XEXP (arg1_rtx, 0), Pmode,
3787 XEXP (arg2_rtx, 0), Pmode,
3788 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3789 TYPE_UNSIGNED (sizetype)),
3790 TYPE_MODE (sizetype));
3791
3792 /* Return the value in the proper mode for this function. */
3793 mode = TYPE_MODE (TREE_TYPE (exp));
3794 if (GET_MODE (result) == mode)
3795 return result;
3796 else if (target != 0)
3797 {
3798 convert_move (target, result, 0);