1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 ;; Free Software Foundation, Inc.
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify
8 ;; it under the terms of the GNU General Public License as published by
9 ;; the Free Software Foundation; either version 3, or (at your option)
12 ;; GCC is distributed in the hope that it will be useful,
13 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
14 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 ;; GNU General Public License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 ;; Return nonzero if OP is either a i387 or SSE fp register.
22 (define_predicate "any_fp_register_operand"
23 (and (match_code "reg")
24 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
26 ;; Return nonzero if OP is an i387 fp register.
27 (define_predicate "fp_register_operand"
28 (and (match_code "reg")
29 (match_test "FP_REGNO_P (REGNO (op))")))
31 ;; Return nonzero if OP is a non-fp register_operand.
32 (define_predicate "register_and_not_any_fp_reg_operand"
33 (and (match_code "reg")
34 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
36 ;; Return nonzero if OP is a register operand other than an i387 fp register.
37 (define_predicate "register_and_not_fp_reg_operand"
38 (and (match_code "reg")
39 (not (match_test "FP_REGNO_P (REGNO (op))"))))
41 ;; True if the operand is an MMX register.
42 (define_predicate "mmx_reg_operand"
43 (and (match_code "reg")
44 (match_test "MMX_REGNO_P (REGNO (op))")))
46 ;; True if the operand is a Q_REGS class register.
47 (define_predicate "q_regs_operand"
48 (match_operand 0 "register_operand")
50 if (GET_CODE (op) == SUBREG)
52 return ANY_QI_REG_P (op);
55 ;; Match an SI or HImode register for a zero_extract.
56 (define_special_predicate "ext_register_operand"
57 (match_operand 0 "register_operand")
59 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
60 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
62 if (GET_CODE (op) == SUBREG)
65 /* Be careful to accept only registers having upper parts. */
66 return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
69 ;; Return true if op is the AX register.
70 (define_predicate "ax_reg_operand"
71 (and (match_code "reg")
72 (match_test "REGNO (op) == 0")))
74 ;; Return true if op is the flags register.
75 (define_predicate "flags_reg_operand"
76 (and (match_code "reg")
77 (match_test "REGNO (op) == FLAGS_REG")))
79 ;; Return true if op is not xmm0 register.
80 (define_predicate "reg_not_xmm0_operand"
81 (and (match_operand 0 "register_operand")
82 (match_test "GET_CODE (op) != REG
83 || REGNO (op) != FIRST_SSE_REG")))
85 ;; As above, but allow nonimmediate operands.
86 (define_predicate "nonimm_not_xmm0_operand"
87 (and (match_operand 0 "nonimmediate_operand")
88 (match_test "GET_CODE (op) != REG
89 || REGNO (op) != FIRST_SSE_REG")))
91 ;; Return 1 if VALUE can be stored in a sign extended immediate field.
92 (define_predicate "x86_64_immediate_operand"
93 (match_code "const_int,symbol_ref,label_ref,const")
96 return immediate_operand (op, mode);
98 switch (GET_CODE (op))
101 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
102 to be at least 32 and this all acceptable constants are
103 represented as CONST_INT. */
104 if (HOST_BITS_PER_WIDE_INT == 32)
108 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
109 return trunc_int_for_mode (val, SImode) == val;
114 /* For certain code models, the symbolic references are known to fit.
115 in CM_SMALL_PIC model we know it fits if it is local to the shared
116 library. Don't count TLS SYMBOL_REFs here, since they should fit
117 only if inside of UNSPEC handled below. */
118 /* TLS symbols are not constant. */
119 if (SYMBOL_REF_TLS_MODEL (op))
121 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
122 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
125 /* For certain code models, the code is near as well. */
126 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
127 || ix86_cmodel == CM_KERNEL);
130 /* We also may accept the offsetted memory references in certain
132 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
133 switch (XINT (XEXP (op, 0), 1))
135 case UNSPEC_GOTPCREL:
137 case UNSPEC_GOTNTPOFF:
144 if (GET_CODE (XEXP (op, 0)) == PLUS)
146 rtx op1 = XEXP (XEXP (op, 0), 0);
147 rtx op2 = XEXP (XEXP (op, 0), 1);
148 HOST_WIDE_INT offset;
150 if (ix86_cmodel == CM_LARGE)
152 if (!CONST_INT_P (op2))
154 offset = trunc_int_for_mode (INTVAL (op2), DImode);
155 switch (GET_CODE (op1))
158 /* TLS symbols are not constant. */
159 if (SYMBOL_REF_TLS_MODEL (op1))
161 /* For CM_SMALL assume that latest object is 16MB before
162 end of 31bits boundary. We may also accept pretty
163 large negative constants knowing that all objects are
164 in the positive half of address space. */
165 if ((ix86_cmodel == CM_SMALL
166 || (ix86_cmodel == CM_MEDIUM
167 && !SYMBOL_REF_FAR_ADDR_P (op1)))
168 && offset < 16*1024*1024
169 && trunc_int_for_mode (offset, SImode) == offset)
171 /* For CM_KERNEL we know that all object resist in the
172 negative half of 32bits address space. We may not
173 accept negative offsets, since they may be just off
174 and we may accept pretty large positive ones. */
175 if (ix86_cmodel == CM_KERNEL
177 && trunc_int_for_mode (offset, SImode) == offset)
182 /* These conditions are similar to SYMBOL_REF ones, just the
183 constraints for code models differ. */
184 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
185 && offset < 16*1024*1024
186 && trunc_int_for_mode (offset, SImode) == offset)
188 if (ix86_cmodel == CM_KERNEL
190 && trunc_int_for_mode (offset, SImode) == offset)
195 switch (XINT (op1, 1))
200 && trunc_int_for_mode (offset, SImode) == offset)
218 ;; Return 1 if VALUE can be stored in the zero extended immediate field.
219 (define_predicate "x86_64_zext_immediate_operand"
220 (match_code "const_double,const_int,symbol_ref,label_ref,const")
222 switch (GET_CODE (op))
225 if (HOST_BITS_PER_WIDE_INT == 32)
226 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
231 if (HOST_BITS_PER_WIDE_INT == 32)
232 return INTVAL (op) >= 0;
234 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
237 /* For certain code models, the symbolic references are known to fit. */
238 /* TLS symbols are not constant. */
239 if (SYMBOL_REF_TLS_MODEL (op))
241 return (ix86_cmodel == CM_SMALL
242 || (ix86_cmodel == CM_MEDIUM
243 && !SYMBOL_REF_FAR_ADDR_P (op)));
246 /* For certain code models, the code is near as well. */
247 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
250 /* We also may accept the offsetted memory references in certain
252 if (GET_CODE (XEXP (op, 0)) == PLUS)
254 rtx op1 = XEXP (XEXP (op, 0), 0);
255 rtx op2 = XEXP (XEXP (op, 0), 1);
257 if (ix86_cmodel == CM_LARGE)
259 switch (GET_CODE (op1))
262 /* TLS symbols are not constant. */
263 if (SYMBOL_REF_TLS_MODEL (op1))
265 /* For small code model we may accept pretty large positive
266 offsets, since one bit is available for free. Negative
267 offsets are limited by the size of NULL pointer area
268 specified by the ABI. */
269 if ((ix86_cmodel == CM_SMALL
270 || (ix86_cmodel == CM_MEDIUM
271 && !SYMBOL_REF_FAR_ADDR_P (op1)))
273 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
274 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
276 /* ??? For the kernel, we may accept adjustment of
277 -0x10000000, since we know that it will just convert
278 negative address space to positive, but perhaps this
279 is not worthwhile. */
283 /* These conditions are similar to SYMBOL_REF ones, just the
284 constraints for code models differ. */
285 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
287 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
288 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
304 ;; Return nonzero if OP is general operand representable on x86_64.
305 (define_predicate "x86_64_general_operand"
306 (if_then_else (match_test "TARGET_64BIT")
307 (ior (match_operand 0 "nonimmediate_operand")
308 (match_operand 0 "x86_64_immediate_operand"))
309 (match_operand 0 "general_operand")))
311 ;; Return nonzero if OP is general operand representable on x86_64
312 ;; as either sign extended or zero extended constant.
313 (define_predicate "x86_64_szext_general_operand"
314 (if_then_else (match_test "TARGET_64BIT")
315 (ior (match_operand 0 "nonimmediate_operand")
316 (ior (match_operand 0 "x86_64_immediate_operand")
317 (match_operand 0 "x86_64_zext_immediate_operand")))
318 (match_operand 0 "general_operand")))
320 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
321 (define_predicate "x86_64_nonmemory_operand"
322 (if_then_else (match_test "TARGET_64BIT")
323 (ior (match_operand 0 "register_operand")
324 (match_operand 0 "x86_64_immediate_operand"))
325 (match_operand 0 "nonmemory_operand")))
327 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
328 (define_predicate "x86_64_szext_nonmemory_operand"
329 (if_then_else (match_test "TARGET_64BIT")
330 (ior (match_operand 0 "register_operand")
331 (ior (match_operand 0 "x86_64_immediate_operand")
332 (match_operand 0 "x86_64_zext_immediate_operand")))
333 (match_operand 0 "nonmemory_operand")))
335 ;; Return true when operand is PIC expression that can be computed by lea
337 (define_predicate "pic_32bit_operand"
338 (match_code "const,symbol_ref,label_ref")
342 /* Rule out relocations that translate into 64bit constants. */
343 if (TARGET_64BIT && GET_CODE (op) == CONST)
346 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
348 if (GET_CODE (op) == UNSPEC
349 && (XINT (op, 1) == UNSPEC_GOTOFF
350 || XINT (op, 1) == UNSPEC_GOT))
353 return symbolic_operand (op, mode);
357 ;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
358 (define_predicate "x86_64_movabs_operand"
359 (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
360 (match_operand 0 "nonmemory_operand")
361 (ior (match_operand 0 "register_operand")
362 (and (match_operand 0 "const_double_operand")
363 (match_test "GET_MODE_SIZE (mode) <= 8")))))
365 ;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
366 ;; reference and a constant.
367 (define_predicate "symbolic_operand"
368 (match_code "symbol_ref,label_ref,const")
370 switch (GET_CODE (op))
378 if (GET_CODE (op) == SYMBOL_REF
379 || GET_CODE (op) == LABEL_REF
380 || (GET_CODE (op) == UNSPEC
381 && (XINT (op, 1) == UNSPEC_GOT
382 || XINT (op, 1) == UNSPEC_GOTOFF
383 || XINT (op, 1) == UNSPEC_GOTPCREL)))
385 if (GET_CODE (op) != PLUS
386 || !CONST_INT_P (XEXP (op, 1)))
390 if (GET_CODE (op) == SYMBOL_REF
391 || GET_CODE (op) == LABEL_REF)
393 /* Only @GOTOFF gets offsets. */
394 if (GET_CODE (op) != UNSPEC
395 || XINT (op, 1) != UNSPEC_GOTOFF)
398 op = XVECEXP (op, 0, 0);
399 if (GET_CODE (op) == SYMBOL_REF
400 || GET_CODE (op) == LABEL_REF)
409 ;; Return true if the operand contains a @GOT or @GOTOFF reference.
410 (define_predicate "pic_symbolic_operand"
416 if (GET_CODE (op) == UNSPEC
417 && XINT (op, 1) == UNSPEC_GOTPCREL)
419 if (GET_CODE (op) == PLUS
420 && GET_CODE (XEXP (op, 0)) == UNSPEC
421 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
426 if (GET_CODE (op) == UNSPEC)
428 if (GET_CODE (op) != PLUS
429 || !CONST_INT_P (XEXP (op, 1)))
432 if (GET_CODE (op) == UNSPEC
433 && XINT (op, 1) != UNSPEC_MACHOPIC_OFFSET)
439 ;; Return true if OP is a symbolic operand that resolves locally.
440 (define_predicate "local_symbolic_operand"
441 (match_code "const,label_ref,symbol_ref")
443 if (GET_CODE (op) == CONST
444 && GET_CODE (XEXP (op, 0)) == PLUS
445 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
446 op = XEXP (XEXP (op, 0), 0);
448 if (GET_CODE (op) == LABEL_REF)
451 if (GET_CODE (op) != SYMBOL_REF)
454 if (SYMBOL_REF_TLS_MODEL (op) != 0)
457 if (SYMBOL_REF_LOCAL_P (op))
460 /* There is, however, a not insubstantial body of code in the rest of
461 the compiler that assumes it can just stick the results of
462 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
463 /* ??? This is a hack. Should update the body of the compiler to
464 always create a DECL an invoke targetm.encode_section_info. */
465 if (strncmp (XSTR (op, 0), internal_label_prefix,
466 internal_label_prefix_len) == 0)
472 ;; Test for a legitimate @GOTOFF operand.
474 ;; VxWorks does not impose a fixed gap between segments; the run-time
475 ;; gap can be different from the object-file gap. We therefore can't
476 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
477 ;; same segment as the GOT. Unfortunately, the flexibility of linker
478 ;; scripts means that we can't be sure of that in general, so assume
479 ;; that @GOTOFF is never valid on VxWorks.
480 (define_predicate "gotoff_operand"
481 (and (match_test "!TARGET_VXWORKS_RTP")
482 (match_operand 0 "local_symbolic_operand")))
484 ;; Test for various thread-local symbols.
485 (define_predicate "tls_symbolic_operand"
486 (and (match_code "symbol_ref")
487 (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
489 (define_predicate "tls_modbase_operand"
490 (and (match_code "symbol_ref")
491 (match_test "op == ix86_tls_module_base ()")))
493 (define_predicate "tp_or_register_operand"
494 (ior (match_operand 0 "register_operand")
495 (and (match_code "unspec")
496 (match_test "XINT (op, 1) == UNSPEC_TP"))))
498 ;; Test for a pc-relative call operand
499 (define_predicate "constant_call_address_operand"
500 (match_code "symbol_ref")
502 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
504 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
509 ;; True for any non-virtual or eliminable register. Used in places where
510 ;; instantiation of such a register may cause the pattern to not be recognized.
511 (define_predicate "register_no_elim_operand"
512 (match_operand 0 "register_operand")
514 if (GET_CODE (op) == SUBREG)
515 op = SUBREG_REG (op);
516 return !(op == arg_pointer_rtx
517 || op == frame_pointer_rtx
518 || IN_RANGE (REGNO (op),
519 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
522 ;; P6 processors will jump to the address after the decrement when %esp
523 ;; is used as a call operand, so they will execute return address as a code.
524 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
526 (define_predicate "call_register_no_elim_operand"
527 (match_operand 0 "register_operand")
529 if (GET_CODE (op) == SUBREG)
530 op = SUBREG_REG (op);
532 if (!TARGET_64BIT && op == stack_pointer_rtx)
535 return register_no_elim_operand (op, mode);
538 ;; Similarly, but include the stack pointer. This is used to prevent esp
539 ;; from being used as an index reg.
540 (define_predicate "index_register_operand"
541 (match_operand 0 "register_operand")
543 if (GET_CODE (op) == SUBREG)
544 op = SUBREG_REG (op);
545 if (reload_in_progress || reload_completed)
546 return REG_OK_FOR_INDEX_STRICT_P (op);
548 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
551 ;; Return false if this is any eliminable register. Otherwise general_operand.
552 (define_predicate "general_no_elim_operand"
553 (if_then_else (match_code "reg,subreg")
554 (match_operand 0 "register_no_elim_operand")
555 (match_operand 0 "general_operand")))
557 ;; Return false if this is any eliminable register. Otherwise
558 ;; register_operand or a constant.
559 (define_predicate "nonmemory_no_elim_operand"
560 (ior (match_operand 0 "register_no_elim_operand")
561 (match_operand 0 "immediate_operand")))
563 ;; Test for a valid operand for a call instruction.
564 (define_predicate "call_insn_operand"
565 (ior (match_operand 0 "constant_call_address_operand")
566 (ior (match_operand 0 "call_register_no_elim_operand")
567 (match_operand 0 "memory_operand"))))
569 ;; Similarly, but for tail calls, in which we cannot allow memory references.
570 (define_predicate "sibcall_insn_operand"
571 (ior (match_operand 0 "constant_call_address_operand")
572 (match_operand 0 "register_no_elim_operand")))
574 ;; Match exactly zero.
575 (define_predicate "const0_operand"
576 (match_code "const_int,const_double,const_vector")
578 if (mode == VOIDmode)
579 mode = GET_MODE (op);
580 return op == CONST0_RTX (mode);
583 ;; Match exactly one.
584 (define_predicate "const1_operand"
585 (and (match_code "const_int")
586 (match_test "op == const1_rtx")))
588 ;; Match exactly eight.
589 (define_predicate "const8_operand"
590 (and (match_code "const_int")
591 (match_test "INTVAL (op) == 8")))
593 ;; Match 2, 4, or 8. Used for leal multiplicands.
594 (define_predicate "const248_operand"
595 (match_code "const_int")
597 HOST_WIDE_INT i = INTVAL (op);
598 return i == 2 || i == 4 || i == 8;
602 (define_predicate "const_0_to_1_operand"
603 (and (match_code "const_int")
604 (match_test "op == const0_rtx || op == const1_rtx")))
607 (define_predicate "const_0_to_3_operand"
608 (and (match_code "const_int")
609 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
612 (define_predicate "const_0_to_7_operand"
613 (and (match_code "const_int")
614 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
617 (define_predicate "const_0_to_15_operand"
618 (and (match_code "const_int")
619 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
622 (define_predicate "const_0_to_31_operand"
623 (and (match_code "const_int")
624 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
627 (define_predicate "const_0_to_63_operand"
628 (and (match_code "const_int")
629 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
632 (define_predicate "const_0_to_255_operand"
633 (and (match_code "const_int")
634 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
636 ;; Match (0 to 255) * 8
637 (define_predicate "const_0_to_255_mul_8_operand"
638 (match_code "const_int")
640 unsigned HOST_WIDE_INT val = INTVAL (op);
641 return val <= 255*8 && val % 8 == 0;
644 ;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
645 ;; for shift & compare patterns, as shifting by 0 does not change flags).
646 (define_predicate "const_1_to_31_operand"
647 (and (match_code "const_int")
648 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
650 ;; Return nonzero if OP is CONST_INT >= 1 and <= 63 (a valid operand
651 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
652 (define_predicate "const_1_to_63_operand"
653 (and (match_code "const_int")
654 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
657 (define_predicate "const_2_to_3_operand"
658 (and (match_code "const_int")
659 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
662 (define_predicate "const_4_to_5_operand"
663 (and (match_code "const_int")
664 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
667 (define_predicate "const_4_to_7_operand"
668 (and (match_code "const_int")
669 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
672 (define_predicate "const_6_to_7_operand"
673 (and (match_code "const_int")
674 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
677 (define_predicate "const_8_to_11_operand"
678 (and (match_code "const_int")
679 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
682 (define_predicate "const_12_to_15_operand"
683 (and (match_code "const_int")
684 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
686 ;; Match exactly one bit in 2-bit mask.
687 (define_predicate "const_pow2_1_to_2_operand"
688 (and (match_code "const_int")
689 (match_test "INTVAL (op) == 1 || INTVAL (op) == 2")))
691 ;; Match exactly one bit in 4-bit mask.
692 (define_predicate "const_pow2_1_to_8_operand"
693 (match_code "const_int")
695 unsigned int log = exact_log2 (INTVAL (op));
699 ;; Match exactly one bit in 8-bit mask.
700 (define_predicate "const_pow2_1_to_128_operand"
701 (match_code "const_int")
703 unsigned int log = exact_log2 (INTVAL (op));
707 ;; Match exactly one bit in 16-bit mask.
708 (define_predicate "const_pow2_1_to_32768_operand"
709 (match_code "const_int")
711 unsigned int log = exact_log2 (INTVAL (op));
715 ;; True if this is a constant appropriate for an increment or decrement.
716 (define_predicate "incdec_operand"
717 (match_code "const_int")
719 /* On Pentium4, the inc and dec operations causes extra dependency on flag
720 registers, since carry flag is not set. */
721 if (!TARGET_USE_INCDEC && !optimize_size)
723 return op == const1_rtx || op == constm1_rtx;
726 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
727 (define_predicate "reg_or_pm1_operand"
728 (ior (match_operand 0 "register_operand")
729 (and (match_code "const_int")
730 (match_test "op == const1_rtx || op == constm1_rtx"))))
732 ;; True if OP is acceptable as operand of DImode shift expander.
733 (define_predicate "shiftdi_operand"
734 (if_then_else (match_test "TARGET_64BIT")
735 (match_operand 0 "nonimmediate_operand")
736 (match_operand 0 "register_operand")))
738 (define_predicate "ashldi_input_operand"
739 (if_then_else (match_test "TARGET_64BIT")
740 (match_operand 0 "nonimmediate_operand")
741 (match_operand 0 "reg_or_pm1_operand")))
743 ;; Return true if OP is a vector load from the constant pool with just
744 ;; the first element nonzero.
745 (define_predicate "zero_extended_scalar_load_operand"
749 op = maybe_get_pool_constant (op);
752 if (GET_CODE (op) != CONST_VECTOR)
755 (GET_MODE_SIZE (GET_MODE (op)) /
756 GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
757 for (n_elts--; n_elts > 0; n_elts--)
759 rtx elt = CONST_VECTOR_ELT (op, n_elts);
760 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
766 /* Return true if operand is a vector constant that is all ones. */
767 (define_predicate "vector_all_ones_operand"
768 (match_code "const_vector")
770 int nunits = GET_MODE_NUNITS (mode);
772 if (GET_CODE (op) == CONST_VECTOR
773 && CONST_VECTOR_NUNITS (op) == nunits)
776 for (i = 0; i < nunits; ++i)
778 rtx x = CONST_VECTOR_ELT (op, i);
779 if (x != constm1_rtx)
788 ; Return 1 when OP is operand acceptable for standard SSE move.
789 (define_predicate "vector_move_operand"
790 (ior (match_operand 0 "nonimmediate_operand")
791 (match_operand 0 "const0_operand")))
793 ;; Return 1 when OP is nonimmediate or standard SSE constant.
794 (define_predicate "nonimmediate_or_sse_const_operand"
795 (match_operand 0 "general_operand")
797 if (nonimmediate_operand (op, mode))
799 if (standard_sse_constant_p (op) > 0)
804 ;; Return true if OP is a register or a zero.
805 (define_predicate "reg_or_0_operand"
806 (ior (match_operand 0 "register_operand")
807 (match_operand 0 "const0_operand")))
809 ;; Return true if op if a valid address, and does not contain
810 ;; a segment override.
811 (define_special_predicate "no_seg_address_operand"
812 (match_operand 0 "address_operand")
814 struct ix86_address parts;
817 ok = ix86_decompose_address (op, &parts);
819 return parts.seg == SEG_DEFAULT;
822 ;; Return nonzero if the rtx is known to be at least 32 bits aligned.
823 (define_predicate "aligned_operand"
824 (match_operand 0 "general_operand")
826 struct ix86_address parts;
829 /* Registers and immediate operands are always "aligned". */
830 if (GET_CODE (op) != MEM)
833 /* All patterns using aligned_operand on memory operands ends up
834 in promoting memory operand to 64bit and thus causing memory mismatch. */
835 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_size)
838 /* Don't even try to do any aligned optimizations with volatiles. */
839 if (MEM_VOLATILE_P (op))
842 if (MEM_ALIGN (op) >= 32)
847 /* Pushes and pops are only valid on the stack pointer. */
848 if (GET_CODE (op) == PRE_DEC
849 || GET_CODE (op) == POST_INC)
852 /* Decode the address. */
853 ok = ix86_decompose_address (op, &parts);
856 /* Look for some component that isn't known to be aligned. */
859 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
864 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
869 if (!CONST_INT_P (parts.disp)
870 || (INTVAL (parts.disp) & 3) != 0)
874 /* Didn't find one -- this must be an aligned address. */
878 ;; Returns 1 if OP is memory operand with a displacement.
879 (define_predicate "memory_displacement_operand"
880 (match_operand 0 "memory_operand")
882 struct ix86_address parts;
885 ok = ix86_decompose_address (XEXP (op, 0), &parts);
887 return parts.disp != NULL_RTX;
890 ;; Returns 1 if OP is memory operand with a displacement only.
891 (define_predicate "memory_displacement_only_operand"
892 (match_operand 0 "memory_operand")
894 struct ix86_address parts;
897 ok = ix86_decompose_address (XEXP (op, 0), &parts);
900 if (parts.base || parts.index)
903 return parts.disp != NULL_RTX;
906 ;; Returns 1 if OP is memory operand which will need zero or
907 ;; one register at most, not counting stack pointer or frame pointer.
908 (define_predicate "cmpxchg8b_pic_memory_operand"
909 (match_operand 0 "memory_operand")
911 struct ix86_address parts;
914 ok = ix86_decompose_address (XEXP (op, 0), &parts);
916 if (parts.base == NULL_RTX
917 || parts.base == arg_pointer_rtx
918 || parts.base == frame_pointer_rtx
919 || parts.base == hard_frame_pointer_rtx
920 || parts.base == stack_pointer_rtx)
923 if (parts.index == NULL_RTX
924 || parts.index == arg_pointer_rtx
925 || parts.index == frame_pointer_rtx
926 || parts.index == hard_frame_pointer_rtx
927 || parts.index == stack_pointer_rtx)
934 ;; Returns 1 if OP is memory operand that cannot be represented
935 ;; by the modRM array.
936 (define_predicate "long_memory_operand"
937 (and (match_operand 0 "memory_operand")
938 (match_test "memory_address_length (op) != 0")))
940 ;; Return 1 if OP is a comparison operator that can be issued by fcmov.
941 (define_predicate "fcmov_comparison_operator"
942 (match_operand 0 "comparison_operator")
944 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
945 enum rtx_code code = GET_CODE (op);
947 if (inmode == CCFPmode || inmode == CCFPUmode)
949 enum rtx_code second_code, bypass_code;
950 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
951 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
953 code = ix86_fp_compare_code_to_integer (code);
955 /* i387 supports just limited amount of conditional codes. */
958 case LTU: case GTU: case LEU: case GEU:
959 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
960 || inmode == CCCmode)
963 case ORDERED: case UNORDERED:
971 ;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
972 ;; The first set are supported directly; the second set can't be done with
973 ;; full IEEE support, i.e. NaNs.
975 ;; ??? It would seem that we have a lot of uses of this predicate that pass
976 ;; it the wrong mode. We got away with this because the old function didn't
977 ;; check the mode at all. Mirror that for now by calling this a special
980 (define_special_predicate "sse_comparison_operator"
981 (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
983 ;; Return 1 if OP is a comparison operator that can be issued by
984 ;; avx predicate generation instructions
985 (define_predicate "avx_comparison_float_operator"
986 (match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt"))
988 ;; Return 1 if OP is a comparison operator that can be issued by sse predicate
989 ;; generation instructions
990 (define_predicate "sse5_comparison_float_operator"
991 (and (match_test "TARGET_SSE5")
992 (match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt")))
994 (define_predicate "ix86_comparison_int_operator"
995 (match_code "ne,eq,ge,gt,le,lt"))
997 (define_predicate "ix86_comparison_uns_operator"
998 (match_code "ne,eq,geu,gtu,leu,ltu"))
1000 (define_predicate "bt_comparison_operator"
1001 (match_code "ne,eq"))
1003 ;; Return 1 if OP is a valid comparison operator in valid mode.
1004 (define_predicate "ix86_comparison_operator"
1005 (match_operand 0 "comparison_operator")
1007 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1008 enum rtx_code code = GET_CODE (op);
1010 if (inmode == CCFPmode || inmode == CCFPUmode)
1012 enum rtx_code second_code, bypass_code;
1013 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
1014 return (bypass_code == UNKNOWN && second_code == UNKNOWN);
1021 if (inmode == CCmode || inmode == CCGCmode
1022 || inmode == CCGOCmode || inmode == CCNOmode)
1025 case LTU: case GTU: case LEU: case GEU:
1026 if (inmode == CCmode || inmode == CCCmode)
1029 case ORDERED: case UNORDERED:
1030 if (inmode == CCmode)
1034 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1042 ;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
1043 (define_predicate "ix86_carry_flag_operator"
1044 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1046 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1047 enum rtx_code code = GET_CODE (op);
1049 if (!REG_P (XEXP (op, 0))
1050 || REGNO (XEXP (op, 0)) != FLAGS_REG
1051 || XEXP (op, 1) != const0_rtx)
1054 if (inmode == CCFPmode || inmode == CCFPUmode)
1056 enum rtx_code second_code, bypass_code;
1057 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
1058 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
1060 code = ix86_fp_compare_code_to_integer (code);
1062 else if (inmode == CCCmode)
1063 return code == LTU || code == GTU;
1064 else if (inmode != CCmode)
1070 ;; Nearly general operand, but accept any const_double, since we wish
1071 ;; to be able to drop them into memory rather than have them get pulled
1073 (define_predicate "cmp_fp_expander_operand"
1074 (ior (match_code "const_double")
1075 (match_operand 0 "general_operand")))
1077 ;; Return true if this is a valid binary floating-point operation.
1078 (define_predicate "binary_fp_operator"
1079 (match_code "plus,minus,mult,div"))
1081 ;; Return true if this is a multiply operation.
1082 (define_predicate "mult_operator"
1083 (match_code "mult"))
1085 ;; Return true if this is a division operation.
1086 (define_predicate "div_operator"
1089 ;; Return true if this is a float extend operation.
1090 (define_predicate "float_operator"
1091 (match_code "float"))
1093 ;; Return true for ARITHMETIC_P.
1094 (define_predicate "arith_or_logical_operator"
1095 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1096 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1098 ;; Return true for COMMUTATIVE_P.
1099 (define_predicate "commutative_operator"
1100 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1102 ;; Return 1 if OP is a binary operator that can be promoted to wider mode.
1103 (define_predicate "promotable_binary_operator"
1104 (ior (match_code "plus,and,ior,xor,ashift")
1105 (and (match_code "mult")
1106 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1108 ;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
1109 ;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
1111 ;; ??? It seems likely that this will only work because cmpsi is an
1112 ;; expander, and no actual insns use this.
1114 (define_predicate "cmpsi_operand"
1115 (ior (match_operand 0 "nonimmediate_operand")
1116 (and (match_code "and")
1117 (match_code "zero_extract" "0")
1118 (match_code "const_int" "1")
1119 (match_code "const_int" "01")
1120 (match_code "const_int" "02")
1121 (match_test "INTVAL (XEXP (XEXP (op, 0), 1)) == 8")
1122 (match_test "INTVAL (XEXP (XEXP (op, 0), 2)) == 8")
1125 (define_predicate "compare_operator"
1126 (match_code "compare"))
1128 (define_predicate "absneg_operator"
1129 (match_code "abs,neg"))
1131 ;; Return 1 if OP is misaligned memory operand
1132 (define_predicate "misaligned_operand"
1133 (and (match_code "mem")
1134 (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
1136 ;; Return 1 if OP is a vzeroall operation, known to be a PARALLEL.
1137 (define_predicate "vzeroall_operation"
1138 (match_code "parallel")
1140 int nregs = TARGET_64BIT ? 16 : 8;
1142 if (XVECLEN (op, 0) != nregs + 1)