1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
79 #include "coretypes.h"
86 #include "hard-reg-set.h"
87 #include "basic-block.h"
88 #include "insn-config.h"
90 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
92 #include "insn-attr.h"
98 #include "insn-codes.h"
99 #include "rtlhooks-def.h"
100 /* Include output.h for dump_file. */
104 #include "tree-pass.h"
106 /* Number of attempts to combine instructions in this function. */
108 static int combine_attempts;
110 /* Number of attempts that got as far as substitution in this function. */
112 static int combine_merges;
114 /* Number of instructions combined with added SETs in this function. */
116 static int combine_extras;
118 /* Number of instructions combined in this function. */
120 static int combine_successes;
122 /* Totals over entire compilation. */
124 static int total_attempts, total_merges, total_extras, total_successes;
127 /* Vector mapping INSN_UIDs to cuids.
128 The cuids are like uids but increase monotonically always.
129 Combine always uses cuids so that it can compare them.
130 But actually renumbering the uids, which we used to do,
131 proves to be a bad idea because it makes it hard to compare
132 the dumps produced by earlier passes with those from later passes. */
134 static int *uid_cuid;
135 static int max_uid_cuid;
137 /* Get the cuid of an insn. */
139 #define INSN_CUID(INSN) \
140 (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
142 /* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
143 BITS_PER_WORD would invoke undefined behavior. Work around it. */
145 #define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
146 (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
148 /* Maximum register number, which is the size of the tables below. */
150 static unsigned int combine_max_regno;
153 /* Record last point of death of (hard or pseudo) register n. */
156 /* Record last point of modification of (hard or pseudo) register n. */
159 /* The next group of fields allows the recording of the last value assigned
160 to (hard or pseudo) register n. We use this information to see if an
161 operation being processed is redundant given a prior operation performed
162 on the register. For example, an `and' with a constant is redundant if
163 all the zero bits are already known to be turned off.
165 We use an approach similar to that used by cse, but change it in the
168 (1) We do not want to reinitialize at each label.
169 (2) It is useful, but not critical, to know the actual value assigned
170 to a register. Often just its form is helpful.
172 Therefore, we maintain the following fields:
174 last_set_value the last value assigned
175 last_set_label records the value of label_tick when the
176 register was assigned
177 last_set_table_tick records the value of label_tick when a
178 value using the register is assigned
179 last_set_invalid set to nonzero when it is not valid
180 to use the value of this register in some
183 To understand the usage of these tables, it is important to understand
184 the distinction between the value in last_set_value being valid and
185 the register being validly contained in some other expression in the
188 (The next two parameters are out of date).
190 reg_stat[i].last_set_value is valid if it is nonzero, and either
191 reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
193 Register I may validly appear in any expression returned for the value
194 of another register if reg_n_sets[i] is 1. It may also appear in the
195 value for register J if reg_stat[j].last_set_invalid is zero, or
196 reg_stat[i].last_set_label < reg_stat[j].last_set_label.
198 If an expression is found in the table containing a register which may
199 not validly appear in an expression, the register is replaced by
200 something that won't match, (clobber (const_int 0)). */
202 /* Record last value assigned to (hard or pseudo) register n. */
206 /* Record the value of label_tick when an expression involving register n
207 is placed in last_set_value. */
209 int last_set_table_tick;
211 /* Record the value of label_tick when the value for register n is placed in
216 /* These fields are maintained in parallel with last_set_value and are
217 used to store the mode in which the register was last set, the bits
218 that were known to be zero when it was last set, and the number of
219 sign bits copies it was known to have when it was last set. */
221 unsigned HOST_WIDE_INT last_set_nonzero_bits;
222 char last_set_sign_bit_copies;
223 ENUM_BITFIELD(machine_mode) last_set_mode : 8;
225 /* Set nonzero if references to register n in expressions should not be
226 used. last_set_invalid is set nonzero when this register is being
227 assigned to and last_set_table_tick == label_tick. */
229 char last_set_invalid;
231 /* Some registers that are set more than once and used in more than one
232 basic block are nevertheless always set in similar ways. For example,
233 a QImode register may be loaded from memory in two places on a machine
234 where byte loads zero extend.
236 We record in the following fields if a register has some leading bits
237 that are always equal to the sign bit, and what we know about the
238 nonzero bits of a register, specifically which bits are known to be
241 If an entry is zero, it means that we don't know anything special. */
243 unsigned char sign_bit_copies;
245 unsigned HOST_WIDE_INT nonzero_bits;
248 static struct reg_stat *reg_stat;
250 /* Record the cuid of the last insn that invalidated memory
251 (anything that writes memory, and subroutine calls, but not pushes). */
253 static int mem_last_set;
255 /* Record the cuid of the last CALL_INSN
256 so we can tell whether a potential combination crosses any calls. */
258 static int last_call_cuid;
260 /* When `subst' is called, this is the insn that is being modified
261 (by combining in a previous insn). The PATTERN of this insn
262 is still the old pattern partially modified and it should not be
263 looked at, but this may be used to examine the successors of the insn
264 to judge whether a simplification is valid. */
266 static rtx subst_insn;
268 /* This is the lowest CUID that `subst' is currently dealing with.
269 get_last_value will not return a value if the register was set at or
270 after this CUID. If not for this mechanism, we could get confused if
271 I2 or I1 in try_combine were an insn that used the old value of a register
272 to obtain a new value. In that case, we might erroneously get the
273 new value of the register when we wanted the old one. */
275 static int subst_low_cuid;
277 /* This contains any hard registers that are used in newpat; reg_dead_at_p
278 must consider all these registers to be always live. */
280 static HARD_REG_SET newpat_used_regs;
282 /* This is an insn to which a LOG_LINKS entry has been added. If this
283 insn is the earlier than I2 or I3, combine should rescan starting at
286 static rtx added_links_insn;
288 /* Basic block in which we are performing combines. */
289 static basic_block this_basic_block;
291 /* A bitmap indicating which blocks had registers go dead at entry.
292 After combine, we'll need to re-do global life analysis with
293 those blocks as starting points. */
294 static sbitmap refresh_blocks;
296 /* The following array records the insn_rtx_cost for every insn
297 in the instruction stream. */
299 static int *uid_insn_cost;
301 /* Length of the currently allocated uid_insn_cost array. */
303 static int last_insn_cost;
305 /* Incremented for each label. */
307 static int label_tick;
309 /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
310 largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
312 static enum machine_mode nonzero_bits_mode;
314 /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
315 be safely used. It is zero while computing them and after combine has
316 completed. This former test prevents propagating values based on
317 previously set values, which can be incorrect if a variable is modified
320 static int nonzero_sign_valid;
323 /* Record one modification to rtl structure
324 to be undone by storing old_contents into *where.
325 is_int is 1 if the contents are an int. */
331 union {rtx r; int i;} old_contents;
332 union {rtx *r; int *i;} where;
335 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
336 num_undo says how many are currently recorded.
338 other_insn is nonzero if we have modified some other insn in the process
339 of working on subst_insn. It must be verified too. */
348 static struct undobuf undobuf;
350 /* Number of times the pseudo being substituted for
351 was found and replaced. */
353 static int n_occurrences;
355 static rtx reg_nonzero_bits_for_combine (rtx, enum machine_mode, rtx,
357 unsigned HOST_WIDE_INT,
358 unsigned HOST_WIDE_INT *);
359 static rtx reg_num_sign_bit_copies_for_combine (rtx, enum machine_mode, rtx,
361 unsigned int, unsigned int *);
362 static void do_SUBST (rtx *, rtx);
363 static void do_SUBST_INT (int *, int);
364 static void init_reg_last (void);
365 static void setup_incoming_promotions (void);
366 static void set_nonzero_bits_and_sign_copies (rtx, rtx, void *);
367 static int cant_combine_insn_p (rtx);
368 static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
369 static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
370 static int contains_muldiv (rtx);
371 static rtx try_combine (rtx, rtx, rtx, int *);
372 static void undo_all (void);
373 static void undo_commit (void);
374 static rtx *find_split_point (rtx *, rtx);
375 static rtx subst (rtx, rtx, rtx, int, int);
376 static rtx combine_simplify_rtx (rtx, enum machine_mode, int);
377 static rtx simplify_if_then_else (rtx);
378 static rtx simplify_set (rtx);
379 static rtx simplify_logical (rtx);
380 static rtx expand_compound_operation (rtx);
381 static rtx expand_field_assignment (rtx);
382 static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
383 rtx, unsigned HOST_WIDE_INT, int, int, int);
384 static rtx extract_left_shift (rtx, int);
385 static rtx make_compound_operation (rtx, enum rtx_code);
386 static int get_pos_from_mask (unsigned HOST_WIDE_INT,
387 unsigned HOST_WIDE_INT *);
388 static rtx force_to_mode (rtx, enum machine_mode,
389 unsigned HOST_WIDE_INT, rtx, int);
390 static rtx if_then_else_cond (rtx, rtx *, rtx *);
391 static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
392 static int rtx_equal_for_field_assignment_p (rtx, rtx);
393 static rtx make_field_assignment (rtx);
394 static rtx apply_distributive_law (rtx);
395 static rtx distribute_and_simplify_rtx (rtx, int);
396 static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
397 unsigned HOST_WIDE_INT);
398 static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
399 HOST_WIDE_INT, enum machine_mode, int *);
400 static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
402 static int recog_for_combine (rtx *, rtx, rtx *);
403 static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
404 static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
405 static void update_table_tick (rtx);
406 static void record_value_for_reg (rtx, rtx, rtx);
407 static void check_promoted_subreg (rtx, rtx);
408 static void record_dead_and_set_regs_1 (rtx, rtx, void *);
409 static void record_dead_and_set_regs (rtx);
410 static int get_last_value_validate (rtx *, rtx, int, int);
411 static rtx get_last_value (rtx);
412 static int use_crosses_set_p (rtx, int);
413 static void reg_dead_at_p_1 (rtx, rtx, void *);
414 static int reg_dead_at_p (rtx, rtx);
415 static void move_deaths (rtx, rtx, int, rtx, rtx *);
416 static int reg_bitfield_target_p (rtx, rtx);
417 static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx);
418 static void distribute_links (rtx);
419 static void mark_used_regs_combine (rtx);
420 static int insn_cuid (rtx);
421 static void record_promoted_value (rtx, rtx);
422 static int unmentioned_reg_p_1 (rtx *, void *);
423 static bool unmentioned_reg_p (rtx, rtx);
426 /* It is not safe to use ordinary gen_lowpart in combine.
427 See comments in gen_lowpart_for_combine. */
428 #undef RTL_HOOKS_GEN_LOWPART
429 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine
431 /* Our implementation of gen_lowpart never emits a new pseudo. */
432 #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
433 #define RTL_HOOKS_GEN_LOWPART_NO_EMIT gen_lowpart_for_combine
435 #undef RTL_HOOKS_REG_NONZERO_REG_BITS
436 #define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine
438 #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
439 #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine
441 static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
444 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
445 insn. The substitution can be undone by undo_all. If INTO is already
446 set to NEWVAL, do not record this change. Because computing NEWVAL might
447 also call SUBST, we have to compute it before we put anything into
451 do_SUBST (rtx *into, rtx newval)
456 if (oldval == newval)
459 /* We'd like to catch as many invalid transformations here as
460 possible. Unfortunately, there are way too many mode changes
461 that are perfectly valid, so we'd waste too much effort for
462 little gain doing the checks here. Focus on catching invalid
463 transformations involving integer constants. */
464 if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
465 && GET_CODE (newval) == CONST_INT)
467 /* Sanity check that we're replacing oldval with a CONST_INT
468 that is a valid sign-extension for the original mode. */
469 gcc_assert (INTVAL (newval)
470 == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
472 /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
473 CONST_INT is not valid, because after the replacement, the
474 original mode would be gone. Unfortunately, we can't tell
475 when do_SUBST is called to replace the operand thereof, so we
476 perform this test on oldval instead, checking whether an
477 invalid replacement took place before we got here. */
478 gcc_assert (!(GET_CODE (oldval) == SUBREG
479 && GET_CODE (SUBREG_REG (oldval)) == CONST_INT));
480 gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
481 && GET_CODE (XEXP (oldval, 0)) == CONST_INT));
485 buf = undobuf.frees, undobuf.frees = buf->next;
487 buf = xmalloc (sizeof (struct undo));
491 buf->old_contents.r = oldval;
494 buf->next = undobuf.undos, undobuf.undos = buf;
497 #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
499 /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
500 for the value of a HOST_WIDE_INT value (including CONST_INT) is
504 do_SUBST_INT (int *into, int newval)
509 if (oldval == newval)
513 buf = undobuf.frees, undobuf.frees = buf->next;
515 buf = xmalloc (sizeof (struct undo));
519 buf->old_contents.i = oldval;
522 buf->next = undobuf.undos, undobuf.undos = buf;
525 #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
527 /* Subroutine of try_combine. Determine whether the combine replacement
528 patterns NEWPAT and NEWI2PAT are cheaper according to insn_rtx_cost
529 that the original instruction sequence I1, I2 and I3. Note that I1
530 and/or NEWI2PAT may be NULL_RTX. This function returns false, if the
531 costs of all instructions can be estimated, and the replacements are
532 more expensive than the original sequence. */
535 combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat)
537 int i1_cost, i2_cost, i3_cost;
538 int new_i2_cost, new_i3_cost;
539 int old_cost, new_cost;
541 /* Lookup the original insn_rtx_costs. */
542 i2_cost = INSN_UID (i2) <= last_insn_cost
543 ? uid_insn_cost[INSN_UID (i2)] : 0;
544 i3_cost = INSN_UID (i3) <= last_insn_cost
545 ? uid_insn_cost[INSN_UID (i3)] : 0;
549 i1_cost = INSN_UID (i1) <= last_insn_cost
550 ? uid_insn_cost[INSN_UID (i1)] : 0;
551 old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0)
552 ? i1_cost + i2_cost + i3_cost : 0;
556 old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
560 /* Calculate the replacement insn_rtx_costs. */
561 new_i3_cost = insn_rtx_cost (newpat);
564 new_i2_cost = insn_rtx_cost (newi2pat);
565 new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
566 ? new_i2_cost + new_i3_cost : 0;
570 new_cost = new_i3_cost;
574 if (undobuf.other_insn)
576 int old_other_cost, new_other_cost;
578 old_other_cost = (INSN_UID (undobuf.other_insn) <= last_insn_cost
579 ? uid_insn_cost[INSN_UID (undobuf.other_insn)] : 0);
580 new_other_cost = insn_rtx_cost (PATTERN (undobuf.other_insn));
581 if (old_other_cost > 0 && new_other_cost > 0)
583 old_cost += old_other_cost;
584 new_cost += new_other_cost;
590 /* Disallow this recombination if both new_cost and old_cost are
591 greater than zero, and new_cost is greater than old cost. */
593 && new_cost > old_cost)
600 "rejecting combination of insns %d, %d and %d\n",
601 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
602 fprintf (dump_file, "original costs %d + %d + %d = %d\n",
603 i1_cost, i2_cost, i3_cost, old_cost);
608 "rejecting combination of insns %d and %d\n",
609 INSN_UID (i2), INSN_UID (i3));
610 fprintf (dump_file, "original costs %d + %d = %d\n",
611 i2_cost, i3_cost, old_cost);
616 fprintf (dump_file, "replacement costs %d + %d = %d\n",
617 new_i2_cost, new_i3_cost, new_cost);
620 fprintf (dump_file, "replacement cost %d\n", new_cost);
626 /* Update the uid_insn_cost array with the replacement costs. */
627 uid_insn_cost[INSN_UID (i2)] = new_i2_cost;
628 uid_insn_cost[INSN_UID (i3)] = new_i3_cost;
630 uid_insn_cost[INSN_UID (i1)] = 0;
635 /* Main entry point for combiner. F is the first insn of the function.
636 NREGS is the first unused pseudo-reg number.
638 Return nonzero if the combiner has turned an indirect jump
639 instruction into a direct jump. */
641 combine_instructions (rtx f, unsigned int nregs)
649 rtx links, nextlinks;
650 sbitmap_iterator sbi;
652 int new_direct_jump_p = 0;
654 combine_attempts = 0;
657 combine_successes = 0;
659 combine_max_regno = nregs;
661 rtl_hooks = combine_rtl_hooks;
663 reg_stat = xcalloc (nregs, sizeof (struct reg_stat));
665 init_recog_no_volatile ();
667 /* Compute maximum uid value so uid_cuid can be allocated. */
669 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
670 if (INSN_UID (insn) > i)
673 uid_cuid = xmalloc ((i + 1) * sizeof (int));
676 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
678 /* Don't use reg_stat[].nonzero_bits when computing it. This can cause
679 problems when, for example, we have j <<= 1 in a loop. */
681 nonzero_sign_valid = 0;
683 /* Compute the mapping from uids to cuids.
684 Cuids are numbers assigned to insns, like uids,
685 except that cuids increase monotonically through the code.
687 Scan all SETs and see if we can deduce anything about what
688 bits are known to be zero for some registers and how many copies
689 of the sign bit are known to exist for those registers.
691 Also set any known values so that we can use it while searching
692 for what bits are known to be set. */
696 setup_incoming_promotions ();
698 refresh_blocks = sbitmap_alloc (last_basic_block);
699 sbitmap_zero (refresh_blocks);
701 /* Allocate array of current insn_rtx_costs. */
702 uid_insn_cost = xcalloc (max_uid_cuid + 1, sizeof (int));
703 last_insn_cost = max_uid_cuid;
705 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
707 uid_cuid[INSN_UID (insn)] = ++i;
713 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
715 record_dead_and_set_regs (insn);
718 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
719 if (REG_NOTE_KIND (links) == REG_INC)
720 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
724 /* Record the current insn_rtx_cost of this instruction. */
725 if (NONJUMP_INSN_P (insn))
726 uid_insn_cost[INSN_UID (insn)] = insn_rtx_cost (PATTERN (insn));
728 fprintf(dump_file, "insn_cost %d: %d\n",
729 INSN_UID (insn), uid_insn_cost[INSN_UID (insn)]);
736 nonzero_sign_valid = 1;
738 /* Now scan all the insns in forward order. */
744 setup_incoming_promotions ();
746 FOR_EACH_BB (this_basic_block)
748 for (insn = BB_HEAD (this_basic_block);
749 insn != NEXT_INSN (BB_END (this_basic_block));
750 insn = next ? next : NEXT_INSN (insn))
757 else if (INSN_P (insn))
759 /* See if we know about function return values before this
760 insn based upon SUBREG flags. */
761 check_promoted_subreg (insn, PATTERN (insn));
763 /* Try this insn with each insn it links back to. */
765 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
766 if ((next = try_combine (insn, XEXP (links, 0),
767 NULL_RTX, &new_direct_jump_p)) != 0)
770 /* Try each sequence of three linked insns ending with this one. */
772 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
774 rtx link = XEXP (links, 0);
776 /* If the linked insn has been replaced by a note, then there
777 is no point in pursuing this chain any further. */
781 for (nextlinks = LOG_LINKS (link);
783 nextlinks = XEXP (nextlinks, 1))
784 if ((next = try_combine (insn, link,
786 &new_direct_jump_p)) != 0)
791 /* Try to combine a jump insn that uses CC0
792 with a preceding insn that sets CC0, and maybe with its
793 logical predecessor as well.
794 This is how we make decrement-and-branch insns.
795 We need this special code because data flow connections
796 via CC0 do not get entered in LOG_LINKS. */
799 && (prev = prev_nonnote_insn (insn)) != 0
800 && NONJUMP_INSN_P (prev)
801 && sets_cc0_p (PATTERN (prev)))
803 if ((next = try_combine (insn, prev,
804 NULL_RTX, &new_direct_jump_p)) != 0)
807 for (nextlinks = LOG_LINKS (prev); nextlinks;
808 nextlinks = XEXP (nextlinks, 1))
809 if ((next = try_combine (insn, prev,
811 &new_direct_jump_p)) != 0)
815 /* Do the same for an insn that explicitly references CC0. */
816 if (NONJUMP_INSN_P (insn)
817 && (prev = prev_nonnote_insn (insn)) != 0
818 && NONJUMP_INSN_P (prev)
819 && sets_cc0_p (PATTERN (prev))
820 && GET_CODE (PATTERN (insn)) == SET
821 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
823 if ((next = try_combine (insn, prev,
824 NULL_RTX, &new_direct_jump_p)) != 0)
827 for (nextlinks = LOG_LINKS (prev); nextlinks;
828 nextlinks = XEXP (nextlinks, 1))
829 if ((next = try_combine (insn, prev,
831 &new_direct_jump_p)) != 0)
835 /* Finally, see if any of the insns that this insn links to
836 explicitly references CC0. If so, try this insn, that insn,
837 and its predecessor if it sets CC0. */
838 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
839 if (NONJUMP_INSN_P (XEXP (links, 0))
840 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
841 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
842 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
843 && NONJUMP_INSN_P (prev)
844 && sets_cc0_p (PATTERN (prev))
845 && (next = try_combine (insn, XEXP (links, 0),
846 prev, &new_direct_jump_p)) != 0)
850 /* Try combining an insn with two different insns whose results it
852 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
853 for (nextlinks = XEXP (links, 1); nextlinks;
854 nextlinks = XEXP (nextlinks, 1))
855 if ((next = try_combine (insn, XEXP (links, 0),
857 &new_direct_jump_p)) != 0)
860 /* Try this insn with each REG_EQUAL note it links back to. */
861 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
864 rtx temp = XEXP (links, 0);
865 if ((set = single_set (temp)) != 0
866 && (note = find_reg_equal_equiv_note (temp)) != 0
867 && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
868 /* Avoid using a register that may already been marked
869 dead by an earlier instruction. */
870 && ! unmentioned_reg_p (note, SET_SRC (set))
871 && (GET_MODE (note) == VOIDmode
872 ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
873 : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
875 /* Temporarily replace the set's source with the
876 contents of the REG_EQUAL note. The insn will
877 be deleted or recognized by try_combine. */
878 rtx orig = SET_SRC (set);
879 SET_SRC (set) = note;
880 next = try_combine (insn, temp, NULL_RTX,
884 SET_SRC (set) = orig;
889 record_dead_and_set_regs (insn);
898 EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, j, sbi)
899 BASIC_BLOCK (j)->flags |= BB_DIRTY;
900 new_direct_jump_p |= purge_all_dead_edges ();
901 delete_noop_moves ();
903 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
904 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
905 | PROP_KILL_DEAD_CODE);
908 sbitmap_free (refresh_blocks);
909 free (uid_insn_cost);
914 struct undo *undo, *next;
915 for (undo = undobuf.frees; undo; undo = next)
923 total_attempts += combine_attempts;
924 total_merges += combine_merges;
925 total_extras += combine_extras;
926 total_successes += combine_successes;
928 nonzero_sign_valid = 0;
929 rtl_hooks = general_rtl_hooks;
931 /* Make recognizer allow volatile MEMs again. */
934 return new_direct_jump_p;
937 /* Wipe the last_xxx fields of reg_stat in preparation for another pass. */
943 for (i = 0; i < combine_max_regno; i++)
944 memset (reg_stat + i, 0, offsetof (struct reg_stat, sign_bit_copies));
947 /* Set up any promoted values for incoming argument registers. */
950 setup_incoming_promotions (void)
954 enum machine_mode mode;
956 rtx first = get_insns ();
958 if (targetm.calls.promote_function_args (TREE_TYPE (cfun->decl)))
960 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
961 /* Check whether this register can hold an incoming pointer
962 argument. FUNCTION_ARG_REGNO_P tests outgoing register
963 numbers, so translate if necessary due to register windows. */
964 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
965 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
968 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
971 gen_rtx_CLOBBER (mode, const0_rtx)));
976 /* Called via note_stores. If X is a pseudo that is narrower than
977 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
979 If we are setting only a portion of X and we can't figure out what
980 portion, assume all bits will be used since we don't know what will
983 Similarly, set how many bits of X are known to be copies of the sign bit
984 at all locations in the function. This is the smallest number implied
988 set_nonzero_bits_and_sign_copies (rtx x, rtx set,
989 void *data ATTRIBUTE_UNUSED)
994 && REGNO (x) >= FIRST_PSEUDO_REGISTER
995 /* If this register is undefined at the start of the file, we can't
996 say what its contents were. */
998 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start, REGNO (x))
999 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
1001 if (set == 0 || GET_CODE (set) == CLOBBER)
1003 reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1004 reg_stat[REGNO (x)].sign_bit_copies = 1;
1008 /* If this is a complex assignment, see if we can convert it into a
1009 simple assignment. */
1010 set = expand_field_assignment (set);
1012 /* If this is a simple assignment, or we have a paradoxical SUBREG,
1013 set what we know about X. */
1015 if (SET_DEST (set) == x
1016 || (GET_CODE (SET_DEST (set)) == SUBREG
1017 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
1018 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
1019 && SUBREG_REG (SET_DEST (set)) == x))
1021 rtx src = SET_SRC (set);
1023 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1024 /* If X is narrower than a word and SRC is a non-negative
1025 constant that would appear negative in the mode of X,
1026 sign-extend it for use in reg_stat[].nonzero_bits because some
1027 machines (maybe most) will actually do the sign-extension
1028 and this is the conservative approach.
1030 ??? For 2.5, try to tighten up the MD files in this regard
1031 instead of this kludge. */
1033 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
1034 && GET_CODE (src) == CONST_INT
1036 && 0 != (INTVAL (src)
1037 & ((HOST_WIDE_INT) 1
1038 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
1039 src = GEN_INT (INTVAL (src)
1040 | ((HOST_WIDE_INT) (-1)
1041 << GET_MODE_BITSIZE (GET_MODE (x))));
1044 /* Don't call nonzero_bits if it cannot change anything. */
1045 if (reg_stat[REGNO (x)].nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1046 reg_stat[REGNO (x)].nonzero_bits
1047 |= nonzero_bits (src, nonzero_bits_mode);
1048 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1049 if (reg_stat[REGNO (x)].sign_bit_copies == 0
1050 || reg_stat[REGNO (x)].sign_bit_copies > num)
1051 reg_stat[REGNO (x)].sign_bit_copies = num;
1055 reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1056 reg_stat[REGNO (x)].sign_bit_copies = 1;
1061 /* See if INSN can be combined into I3. PRED and SUCC are optionally
1062 insns that were previously combined into I3 or that will be combined
1063 into the merger of INSN and I3.
1065 Return 0 if the combination is not allowed for any reason.
1067 If the combination is allowed, *PDEST will be set to the single
1068 destination of INSN and *PSRC to the single source, and this function
1072 can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
1073 rtx *pdest, rtx *psrc)
1076 rtx set = 0, src, dest;
1081 int all_adjacent = (succ ? (next_active_insn (insn) == succ
1082 && next_active_insn (succ) == i3)
1083 : next_active_insn (insn) == i3);
1085 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1086 or a PARALLEL consisting of such a SET and CLOBBERs.
1088 If INSN has CLOBBER parallel parts, ignore them for our processing.
1089 By definition, these happen during the execution of the insn. When it
1090 is merged with another insn, all bets are off. If they are, in fact,
1091 needed and aren't also supplied in I3, they may be added by
1092 recog_for_combine. Otherwise, it won't match.
1094 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1097 Get the source and destination of INSN. If more than one, can't
1100 if (GET_CODE (PATTERN (insn)) == SET)
1101 set = PATTERN (insn);
1102 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1103 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1105 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1107 rtx elt = XVECEXP (PATTERN (insn), 0, i);
1110 switch (GET_CODE (elt))
1112 /* This is important to combine floating point insns
1113 for the SH4 port. */
1115 /* Combining an isolated USE doesn't make sense.
1116 We depend here on combinable_i3pat to reject them. */
1117 /* The code below this loop only verifies that the inputs of
1118 the SET in INSN do not change. We call reg_set_between_p
1119 to verify that the REG in the USE does not change between
1121 If the USE in INSN was for a pseudo register, the matching
1122 insn pattern will likely match any register; combining this
1123 with any other USE would only be safe if we knew that the
1124 used registers have identical values, or if there was
1125 something to tell them apart, e.g. different modes. For
1126 now, we forgo such complicated tests and simply disallow
1127 combining of USES of pseudo registers with any other USE. */
1128 if (REG_P (XEXP (elt, 0))
1129 && GET_CODE (PATTERN (i3)) == PARALLEL)
1131 rtx i3pat = PATTERN (i3);
1132 int i = XVECLEN (i3pat, 0) - 1;
1133 unsigned int regno = REGNO (XEXP (elt, 0));
1137 rtx i3elt = XVECEXP (i3pat, 0, i);
1139 if (GET_CODE (i3elt) == USE
1140 && REG_P (XEXP (i3elt, 0))
1141 && (REGNO (XEXP (i3elt, 0)) == regno
1142 ? reg_set_between_p (XEXP (elt, 0),
1143 PREV_INSN (insn), i3)
1144 : regno >= FIRST_PSEUDO_REGISTER))
1151 /* We can ignore CLOBBERs. */
1156 /* Ignore SETs whose result isn't used but not those that
1157 have side-effects. */
1158 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1159 && (!(note = find_reg_note (insn, REG_EH_REGION, NULL_RTX))
1160 || INTVAL (XEXP (note, 0)) <= 0)
1161 && ! side_effects_p (elt))
1164 /* If we have already found a SET, this is a second one and
1165 so we cannot combine with this insn. */
1173 /* Anything else means we can't combine. */
1179 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1180 so don't do anything with it. */
1181 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1190 set = expand_field_assignment (set);
1191 src = SET_SRC (set), dest = SET_DEST (set);
1193 /* Don't eliminate a store in the stack pointer. */
1194 if (dest == stack_pointer_rtx
1195 /* Don't combine with an insn that sets a register to itself if it has
1196 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
1197 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1198 /* Can't merge an ASM_OPERANDS. */
1199 || GET_CODE (src) == ASM_OPERANDS
1200 /* Can't merge a function call. */
1201 || GET_CODE (src) == CALL
1202 /* Don't eliminate a function call argument. */
1204 && (find_reg_fusage (i3, USE, dest)
1206 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1207 && global_regs[REGNO (dest)])))
1208 /* Don't substitute into an incremented register. */
1209 || FIND_REG_INC_NOTE (i3, dest)
1210 || (succ && FIND_REG_INC_NOTE (succ, dest))
1211 /* Don't substitute into a non-local goto, this confuses CFG. */
1212 || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1214 /* Don't combine the end of a libcall into anything. */
1215 /* ??? This gives worse code, and appears to be unnecessary, since no
1216 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1217 use REG_RETVAL notes for noconflict blocks, but other code here
1218 makes sure that those insns don't disappear. */
1219 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1221 /* Make sure that DEST is not used after SUCC but before I3. */
1222 || (succ && ! all_adjacent
1223 && reg_used_between_p (dest, succ, i3))
1224 /* Make sure that the value that is to be substituted for the register
1225 does not use any registers whose values alter in between. However,
1226 If the insns are adjacent, a use can't cross a set even though we
1227 think it might (this can happen for a sequence of insns each setting
1228 the same destination; last_set of that register might point to
1229 a NOTE). If INSN has a REG_EQUIV note, the register is always
1230 equivalent to the memory so the substitution is valid even if there
1231 are intervening stores. Also, don't move a volatile asm or
1232 UNSPEC_VOLATILE across any other insns. */
1235 || ! find_reg_note (insn, REG_EQUIV, src))
1236 && use_crosses_set_p (src, INSN_CUID (insn)))
1237 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1238 || GET_CODE (src) == UNSPEC_VOLATILE))
1239 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1240 better register allocation by not doing the combine. */
1241 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1242 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1243 /* Don't combine across a CALL_INSN, because that would possibly
1244 change whether the life span of some REGs crosses calls or not,
1245 and it is a pain to update that information.
1246 Exception: if source is a constant, moving it later can't hurt.
1247 Accept that special case, because it helps -fforce-addr a lot. */
1248 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1251 /* DEST must either be a REG or CC0. */
1254 /* If register alignment is being enforced for multi-word items in all
1255 cases except for parameters, it is possible to have a register copy
1256 insn referencing a hard register that is not allowed to contain the
1257 mode being copied and which would not be valid as an operand of most
1258 insns. Eliminate this problem by not combining with such an insn.
1260 Also, on some machines we don't want to extend the life of a hard
1264 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1265 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1266 /* Don't extend the life of a hard register unless it is
1267 user variable (if we have few registers) or it can't
1268 fit into the desired register (meaning something special
1270 Also avoid substituting a return register into I3, because
1271 reload can't handle a conflict with constraints of other
1273 || (REGNO (src) < FIRST_PSEUDO_REGISTER
1274 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1277 else if (GET_CODE (dest) != CC0)
1281 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1282 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1283 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1285 /* Don't substitute for a register intended as a clobberable
1287 rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1288 if (rtx_equal_p (reg, dest))
1291 /* If the clobber represents an earlyclobber operand, we must not
1292 substitute an expression containing the clobbered register.
1293 As we do not analyze the constraint strings here, we have to
1294 make the conservative assumption. However, if the register is
1295 a fixed hard reg, the clobber cannot represent any operand;
1296 we leave it up to the machine description to either accept or
1297 reject use-and-clobber patterns. */
1299 || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1300 || !fixed_regs[REGNO (reg)])
1301 if (reg_overlap_mentioned_p (reg, src))
1305 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1306 or not), reject, unless nothing volatile comes between it and I3 */
1308 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1310 /* Make sure succ doesn't contain a volatile reference. */
1311 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1314 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1315 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1319 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1320 to be an explicit register variable, and was chosen for a reason. */
1322 if (GET_CODE (src) == ASM_OPERANDS
1323 && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1326 /* If there are any volatile insns between INSN and I3, reject, because
1327 they might affect machine state. */
1329 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1330 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1333 /* If INSN contains an autoincrement or autodecrement, make sure that
1334 register is not used between there and I3, and not already used in
1335 I3 either. Neither must it be used in PRED or SUCC, if they exist.
1336 Also insist that I3 not be a jump; if it were one
1337 and the incremented register were spilled, we would lose. */
1340 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1341 if (REG_NOTE_KIND (link) == REG_INC
1343 || reg_used_between_p (XEXP (link, 0), insn, i3)
1344 || (pred != NULL_RTX
1345 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1346 || (succ != NULL_RTX
1347 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
1348 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1353 /* Don't combine an insn that follows a CC0-setting insn.
1354 An insn that uses CC0 must not be separated from the one that sets it.
1355 We do, however, allow I2 to follow a CC0-setting insn if that insn
1356 is passed as I1; in that case it will be deleted also.
1357 We also allow combining in this case if all the insns are adjacent
1358 because that would leave the two CC0 insns adjacent as well.
1359 It would be more logical to test whether CC0 occurs inside I1 or I2,
1360 but that would be much slower, and this ought to be equivalent. */
1362 p = prev_nonnote_insn (insn);
1363 if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
1368 /* If we get here, we have passed all the tests and the combination is
1377 /* LOC is the location within I3 that contains its pattern or the component
1378 of a PARALLEL of the pattern. We validate that it is valid for combining.
1380 One problem is if I3 modifies its output, as opposed to replacing it
1381 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1382 so would produce an insn that is not equivalent to the original insns.
1386 (set (reg:DI 101) (reg:DI 100))
1387 (set (subreg:SI (reg:DI 101) 0) <foo>)
1389 This is NOT equivalent to:
1391 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1392 (set (reg:DI 101) (reg:DI 100))])
1394 Not only does this modify 100 (in which case it might still be valid
1395 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1397 We can also run into a problem if I2 sets a register that I1
1398 uses and I1 gets directly substituted into I3 (not via I2). In that
1399 case, we would be getting the wrong value of I2DEST into I3, so we
1400 must reject the combination. This case occurs when I2 and I1 both
1401 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1402 If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1403 of a SET must prevent combination from occurring.
1405 Before doing the above check, we first try to expand a field assignment
1406 into a set of logical operations.
1408 If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1409 we place a register that is both set and used within I3. If more than one
1410 such register is detected, we fail.
1412 Return 1 if the combination is valid, zero otherwise. */
1415 combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1416 int i1_not_in_src, rtx *pi3dest_killed)
1420 if (GET_CODE (x) == SET)
1423 rtx dest = SET_DEST (set);
1424 rtx src = SET_SRC (set);
1425 rtx inner_dest = dest;
1428 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1429 || GET_CODE (inner_dest) == SUBREG
1430 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1431 inner_dest = XEXP (inner_dest, 0);
1433 /* Check for the case where I3 modifies its output, as discussed
1434 above. We don't want to prevent pseudos from being combined
1435 into the address of a MEM, so only prevent the combination if
1436 i1 or i2 set the same MEM. */
1437 if ((inner_dest != dest &&
1438 (!MEM_P (inner_dest)
1439 || rtx_equal_p (i2dest, inner_dest)
1440 || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1441 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1442 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1444 /* This is the same test done in can_combine_p except we can't test
1445 all_adjacent; we don't have to, since this instruction will stay
1446 in place, thus we are not considering increasing the lifetime of
1449 Also, if this insn sets a function argument, combining it with
1450 something that might need a spill could clobber a previous
1451 function argument; the all_adjacent test in can_combine_p also
1452 checks this; here, we do a more specific test for this case. */
1454 || (REG_P (inner_dest)
1455 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1456 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1457 GET_MODE (inner_dest))))
1458 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1461 /* If DEST is used in I3, it is being killed in this insn, so
1462 record that for later. We have to consider paradoxical
1463 subregs here, since they kill the whole register, but we
1464 ignore partial subregs, STRICT_LOW_PART, etc.
1465 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1466 STACK_POINTER_REGNUM, since these are always considered to be
1467 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1469 if (GET_CODE (subdest) == SUBREG
1470 && (GET_MODE_SIZE (GET_MODE (subdest))
1471 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
1472 subdest = SUBREG_REG (subdest);
1475 && reg_referenced_p (subdest, PATTERN (i3))
1476 && REGNO (subdest) != FRAME_POINTER_REGNUM
1477 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1478 && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
1480 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1481 && (REGNO (subdest) != ARG_POINTER_REGNUM
1482 || ! fixed_regs [REGNO (subdest)])
1484 && REGNO (subdest) != STACK_POINTER_REGNUM)
1486 if (*pi3dest_killed)
1489 *pi3dest_killed = subdest;
1493 else if (GET_CODE (x) == PARALLEL)
1497 for (i = 0; i < XVECLEN (x, 0); i++)
1498 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1499 i1_not_in_src, pi3dest_killed))
1506 /* Return 1 if X is an arithmetic expression that contains a multiplication
1507 and division. We don't count multiplications by powers of two here. */
1510 contains_muldiv (rtx x)
1512 switch (GET_CODE (x))
1514 case MOD: case DIV: case UMOD: case UDIV:
1518 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1519 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1522 return contains_muldiv (XEXP (x, 0))
1523 || contains_muldiv (XEXP (x, 1));
1526 return contains_muldiv (XEXP (x, 0));
1532 /* Determine whether INSN can be used in a combination. Return nonzero if
1533 not. This is used in try_combine to detect early some cases where we
1534 can't perform combinations. */
1537 cant_combine_insn_p (rtx insn)
1542 /* If this isn't really an insn, we can't do anything.
1543 This can occur when flow deletes an insn that it has merged into an
1544 auto-increment address. */
1545 if (! INSN_P (insn))
1548 /* Never combine loads and stores involving hard regs that are likely
1549 to be spilled. The register allocator can usually handle such
1550 reg-reg moves by tying. If we allow the combiner to make
1551 substitutions of likely-spilled regs, reload might die.
1552 As an exception, we allow combinations involving fixed regs; these are
1553 not available to the register allocator so there's no risk involved. */
1555 set = single_set (insn);
1558 src = SET_SRC (set);
1559 dest = SET_DEST (set);
1560 if (GET_CODE (src) == SUBREG)
1561 src = SUBREG_REG (src);
1562 if (GET_CODE (dest) == SUBREG)
1563 dest = SUBREG_REG (dest);
1564 if (REG_P (src) && REG_P (dest)
1565 && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1566 && ! fixed_regs[REGNO (src)]
1567 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
1568 || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1569 && ! fixed_regs[REGNO (dest)]
1570 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
1576 struct likely_spilled_retval_info
1578 unsigned regno, nregs;
1582 /* Called via note_stores by likely_spilled_retval_p. Remove from info->mask
1583 hard registers that are known to be written to / clobbered in full. */
1585 likely_spilled_retval_1 (rtx x, rtx set, void *data)
1587 struct likely_spilled_retval_info *info = data;
1588 unsigned regno, nregs;
1591 if (!REG_P (XEXP (set, 0)))
1594 if (regno >= info->regno + info->nregs)
1596 nregs = hard_regno_nregs[regno][GET_MODE (x)];
1597 if (regno + nregs <= info->regno)
1599 new_mask = (2U << (nregs - 1)) - 1;
1600 if (regno < info->regno)
1601 new_mask >>= info->regno - regno;
1603 new_mask <<= regno - info->regno;
1604 info->mask &= new_mask;
1607 /* Return nonzero iff part of the return value is live during INSN, and
1608 it is likely spilled. This can happen when more than one insn is needed
1609 to copy the return value, e.g. when we consider to combine into the
1610 second copy insn for a complex value. */
1613 likely_spilled_retval_p (rtx insn)
1615 rtx use = BB_END (this_basic_block);
1617 unsigned regno, nregs;
1618 /* We assume here that no machine mode needs more than
1619 32 hard registers when the value overlaps with a register
1620 for which FUNCTION_VALUE_REGNO_P is true. */
1622 struct likely_spilled_retval_info info;
1624 if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
1626 reg = XEXP (PATTERN (use), 0);
1627 if (!REG_P (reg) || !FUNCTION_VALUE_REGNO_P (REGNO (reg)))
1629 regno = REGNO (reg);
1630 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1633 mask = (2U << (nregs - 1)) - 1;
1635 /* Disregard parts of the return value that are set later. */
1639 for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
1640 note_stores (PATTERN (insn), likely_spilled_retval_1, &info);
1643 /* Check if any of the (probably) live return value registers is
1648 if ((mask & 1 << nregs)
1649 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno + nregs)))
1655 /* Adjust INSN after we made a change to its destination.
1657 Changing the destination can invalidate notes that say something about
1658 the results of the insn and a LOG_LINK pointing to the insn. */
1661 adjust_for_new_dest (rtx insn)
1665 /* For notes, be conservative and simply remove them. */
1666 loc = ®_NOTES (insn);
1669 enum reg_note kind = REG_NOTE_KIND (*loc);
1670 if (kind == REG_EQUAL || kind == REG_EQUIV)
1671 *loc = XEXP (*loc, 1);
1673 loc = &XEXP (*loc, 1);
1676 /* The new insn will have a destination that was previously the destination
1677 of an insn just above it. Call distribute_links to make a LOG_LINK from
1678 the next use of that destination. */
1679 distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
1682 /* Return TRUE if combine can reuse reg X in mode MODE.
1683 ADDED_SETS is nonzero if the original set is still required. */
1685 can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
1693 /* Allow hard registers if the new mode is legal, and occupies no more
1694 registers than the old mode. */
1695 if (regno < FIRST_PSEUDO_REGISTER)
1696 return (HARD_REGNO_MODE_OK (regno, mode)
1697 && (hard_regno_nregs[regno][GET_MODE (x)]
1698 >= hard_regno_nregs[regno][mode]));
1700 /* Or a pseudo that is only used once. */
1701 return (REG_N_SETS (regno) == 1 && !added_sets
1702 && !REG_USERVAR_P (x));
1705 /* Try to combine the insns I1 and I2 into I3.
1706 Here I1 and I2 appear earlier than I3.
1707 I1 can be zero; then we combine just I2 into I3.
1709 If we are combining three insns and the resulting insn is not recognized,
1710 try splitting it into two insns. If that happens, I2 and I3 are retained
1711 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1714 Return 0 if the combination does not work. Then nothing is changed.
1715 If we did the combination, return the insn at which combine should
1718 Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1719 new direct jump instruction. */
1722 try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
1724 /* New patterns for I3 and I2, respectively. */
1725 rtx newpat, newi2pat = 0;
1726 rtvec newpat_vec_with_clobbers = 0;
1727 int substed_i2 = 0, substed_i1 = 0;
1728 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1729 int added_sets_1, added_sets_2;
1730 /* Total number of SETs to put into I3. */
1732 /* Nonzero if I2's body now appears in I3. */
1734 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1735 int insn_code_number, i2_code_number = 0, other_code_number = 0;
1736 /* Contains I3 if the destination of I3 is used in its source, which means
1737 that the old life of I3 is being killed. If that usage is placed into
1738 I2 and not in I3, a REG_DEAD note must be made. */
1739 rtx i3dest_killed = 0;
1740 /* SET_DEST and SET_SRC of I2 and I1. */
1741 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1742 /* PATTERN (I2), or a copy of it in certain cases. */
1744 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1745 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1746 int i2dest_killed = 0, i1dest_killed = 0;
1747 int i1_feeds_i3 = 0;
1748 /* Notes that must be added to REG_NOTES in I3 and I2. */
1749 rtx new_i3_notes, new_i2_notes;
1750 /* Notes that we substituted I3 into I2 instead of the normal case. */
1751 int i3_subst_into_i2 = 0;
1752 /* Notes that I1, I2 or I3 is a MULT operation. */
1761 /* Exit early if one of the insns involved can't be used for
1763 if (cant_combine_insn_p (i3)
1764 || cant_combine_insn_p (i2)
1765 || (i1 && cant_combine_insn_p (i1))
1766 || likely_spilled_retval_p (i3)
1767 /* We also can't do anything if I3 has a
1768 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1771 /* ??? This gives worse code, and appears to be unnecessary, since no
1772 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1773 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1779 undobuf.other_insn = 0;
1781 /* Reset the hard register usage information. */
1782 CLEAR_HARD_REG_SET (newpat_used_regs);
1784 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1785 code below, set I1 to be the earlier of the two insns. */
1786 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1787 temp = i1, i1 = i2, i2 = temp;
1789 added_links_insn = 0;
1791 /* First check for one important special-case that the code below will
1792 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
1793 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1794 we may be able to replace that destination with the destination of I3.
1795 This occurs in the common code where we compute both a quotient and
1796 remainder into a structure, in which case we want to do the computation
1797 directly into the structure to avoid register-register copies.
1799 Note that this case handles both multiple sets in I2 and also
1800 cases where I2 has a number of CLOBBER or PARALLELs.
1802 We make very conservative checks below and only try to handle the
1803 most common cases of this. For example, we only handle the case
1804 where I2 and I3 are adjacent to avoid making difficult register
1807 if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
1808 && REG_P (SET_SRC (PATTERN (i3)))
1809 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1810 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1811 && GET_CODE (PATTERN (i2)) == PARALLEL
1812 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1813 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1814 below would need to check what is inside (and reg_overlap_mentioned_p
1815 doesn't support those codes anyway). Don't allow those destinations;
1816 the resulting insn isn't likely to be recognized anyway. */
1817 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1818 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1819 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1820 SET_DEST (PATTERN (i3)))
1821 && next_real_insn (i2) == i3)
1823 rtx p2 = PATTERN (i2);
1825 /* Make sure that the destination of I3,
1826 which we are going to substitute into one output of I2,
1827 is not used within another output of I2. We must avoid making this:
1828 (parallel [(set (mem (reg 69)) ...)
1829 (set (reg 69) ...)])
1830 which is not well-defined as to order of actions.
1831 (Besides, reload can't handle output reloads for this.)
1833 The problem can also happen if the dest of I3 is a memory ref,
1834 if another dest in I2 is an indirect memory ref. */
1835 for (i = 0; i < XVECLEN (p2, 0); i++)
1836 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1837 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1838 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1839 SET_DEST (XVECEXP (p2, 0, i))))
1842 if (i == XVECLEN (p2, 0))
1843 for (i = 0; i < XVECLEN (p2, 0); i++)
1844 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1845 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1846 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1851 subst_low_cuid = INSN_CUID (i2);
1853 added_sets_2 = added_sets_1 = 0;
1854 i2dest = SET_SRC (PATTERN (i3));
1855 i2dest_killed = dead_or_set_p (i2, i2dest);
1857 /* Replace the dest in I2 with our dest and make the resulting
1858 insn the new pattern for I3. Then skip to where we
1859 validate the pattern. Everything was set up above. */
1860 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1861 SET_DEST (PATTERN (i3)));
1864 i3_subst_into_i2 = 1;
1865 goto validate_replacement;
1869 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1870 one of those words to another constant, merge them by making a new
1873 && (temp = single_set (i2)) != 0
1874 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1875 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1876 && REG_P (SET_DEST (temp))
1877 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1878 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1879 && GET_CODE (PATTERN (i3)) == SET
1880 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1881 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1882 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1883 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1884 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1886 HOST_WIDE_INT lo, hi;
1888 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1889 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1892 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1893 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1896 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1898 /* We don't handle the case of the target word being wider
1899 than a host wide int. */
1900 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD);
1902 lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1903 lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1904 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1906 else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1907 hi = INTVAL (SET_SRC (PATTERN (i3)));
1908 else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1910 int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1911 >> (HOST_BITS_PER_WIDE_INT - 1));
1913 lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1914 (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1915 lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1916 (INTVAL (SET_SRC (PATTERN (i3)))));
1918 hi = lo < 0 ? -1 : 0;
1921 /* We don't handle the case of the higher word not fitting
1922 entirely in either hi or lo. */
1927 subst_low_cuid = INSN_CUID (i2);
1928 added_sets_2 = added_sets_1 = 0;
1929 i2dest = SET_DEST (temp);
1930 i2dest_killed = dead_or_set_p (i2, i2dest);
1932 SUBST (SET_SRC (temp),
1933 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1935 newpat = PATTERN (i2);
1936 goto validate_replacement;
1940 /* If we have no I1 and I2 looks like:
1941 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1943 make up a dummy I1 that is
1946 (set (reg:CC X) (compare:CC Y (const_int 0)))
1948 (We can ignore any trailing CLOBBERs.)
1950 This undoes a previous combination and allows us to match a branch-and-
1953 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1954 && XVECLEN (PATTERN (i2), 0) >= 2
1955 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1956 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1958 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1959 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1960 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1961 && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
1962 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1963 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1965 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1966 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1971 /* We make I1 with the same INSN_UID as I2. This gives it
1972 the same INSN_CUID for value tracking. Our fake I1 will
1973 never appear in the insn stream so giving it the same INSN_UID
1974 as I2 will not cause a problem. */
1976 i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1977 BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
1978 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1981 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1982 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1983 SET_DEST (PATTERN (i1)));
1988 /* Verify that I2 and I1 are valid for combining. */
1989 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1990 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1996 /* Record whether I2DEST is used in I2SRC and similarly for the other
1997 cases. Knowing this will help in register status updating below. */
1998 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1999 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2000 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2001 i2dest_killed = dead_or_set_p (i2, i2dest);
2002 i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2004 /* See if I1 directly feeds into I3. It does if I1DEST is not used
2006 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
2008 /* Ensure that I3's pattern can be the destination of combines. */
2009 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
2010 i1 && i2dest_in_i1src && i1_feeds_i3,
2017 /* See if any of the insns is a MULT operation. Unless one is, we will
2018 reject a combination that is, since it must be slower. Be conservative
2020 if (GET_CODE (i2src) == MULT
2021 || (i1 != 0 && GET_CODE (i1src) == MULT)
2022 || (GET_CODE (PATTERN (i3)) == SET
2023 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2026 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2027 We used to do this EXCEPT in one case: I3 has a post-inc in an
2028 output operand. However, that exception can give rise to insns like
2030 which is a famous insn on the PDP-11 where the value of r3 used as the
2031 source was model-dependent. Avoid this sort of thing. */
2034 if (!(GET_CODE (PATTERN (i3)) == SET
2035 && REG_P (SET_SRC (PATTERN (i3)))
2036 && MEM_P (SET_DEST (PATTERN (i3)))
2037 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2038 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2039 /* It's not the exception. */
2042 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2043 if (REG_NOTE_KIND (link) == REG_INC
2044 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2046 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2053 /* See if the SETs in I1 or I2 need to be kept around in the merged
2054 instruction: whenever the value set there is still needed past I3.
2055 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2057 For the SET in I1, we have two cases: If I1 and I2 independently
2058 feed into I3, the set in I1 needs to be kept around if I1DEST dies
2059 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
2060 in I1 needs to be kept around unless I1DEST dies or is set in either
2061 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
2062 I1DEST. If so, we know I1 feeds into I2. */
2064 added_sets_2 = ! dead_or_set_p (i3, i2dest);
2067 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
2068 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
2070 /* If the set in I2 needs to be kept around, we must make a copy of
2071 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2072 PATTERN (I2), we are only substituting for the original I1DEST, not into
2073 an already-substituted copy. This also prevents making self-referential
2074 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2077 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
2078 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
2082 i2pat = copy_rtx (i2pat);
2086 /* Substitute in the latest insn for the regs set by the earlier ones. */
2088 maxreg = max_reg_num ();
2092 /* It is possible that the source of I2 or I1 may be performing an
2093 unneeded operation, such as a ZERO_EXTEND of something that is known
2094 to have the high part zero. Handle that case by letting subst look at
2095 the innermost one of them.
2097 Another way to do this would be to have a function that tries to
2098 simplify a single insn instead of merging two or more insns. We don't
2099 do this because of the potential of infinite loops and because
2100 of the potential extra memory required. However, doing it the way
2101 we are is a bit of a kludge and doesn't catch all cases.
2103 But only do this if -fexpensive-optimizations since it slows things down
2104 and doesn't usually win. */
2106 if (flag_expensive_optimizations)
2108 /* Pass pc_rtx so no substitutions are done, just simplifications. */
2111 subst_low_cuid = INSN_CUID (i1);
2112 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
2116 subst_low_cuid = INSN_CUID (i2);
2117 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
2122 /* Many machines that don't use CC0 have insns that can both perform an
2123 arithmetic operation and set the condition code. These operations will
2124 be represented as a PARALLEL with the first element of the vector
2125 being a COMPARE of an arithmetic operation with the constant zero.
2126 The second element of the vector will set some pseudo to the result
2127 of the same arithmetic operation. If we simplify the COMPARE, we won't
2128 match such a pattern and so will generate an extra insn. Here we test
2129 for this case, where both the comparison and the operation result are
2130 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2131 I2SRC. Later we will make the PARALLEL that contains I2. */
2133 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
2134 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
2135 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
2136 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
2138 #ifdef SELECT_CC_MODE
2140 enum machine_mode compare_mode;
2143 newpat = PATTERN (i3);
2144 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
2148 #ifdef SELECT_CC_MODE
2149 /* See if a COMPARE with the operand we substituted in should be done
2150 with the mode that is currently being used. If not, do the same
2151 processing we do in `subst' for a SET; namely, if the destination
2152 is used only once, try to replace it with a register of the proper
2153 mode and also replace the COMPARE. */
2154 if (undobuf.other_insn == 0
2155 && (cc_use = find_single_use (SET_DEST (newpat), i3,
2156 &undobuf.other_insn))
2157 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
2159 != GET_MODE (SET_DEST (newpat))))
2161 if (can_change_dest_mode(SET_DEST (newpat), added_sets_2,
2164 unsigned int regno = REGNO (SET_DEST (newpat));
2165 rtx new_dest = gen_rtx_REG (compare_mode, regno);
2167 if (regno >= FIRST_PSEUDO_REGISTER)
2168 SUBST (regno_reg_rtx[regno], new_dest);
2170 SUBST (SET_DEST (newpat), new_dest);
2171 SUBST (XEXP (*cc_use, 0), new_dest);
2172 SUBST (SET_SRC (newpat),
2173 gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2176 undobuf.other_insn = 0;
2183 n_occurrences = 0; /* `subst' counts here */
2185 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2186 need to make a unique copy of I2SRC each time we substitute it
2187 to avoid self-referential rtl. */
2189 subst_low_cuid = INSN_CUID (i2);
2190 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2191 ! i1_feeds_i3 && i1dest_in_i1src);
2194 /* Record whether i2's body now appears within i3's body. */
2195 i2_is_used = n_occurrences;
2198 /* If we already got a failure, don't try to do more. Otherwise,
2199 try to substitute in I1 if we have it. */
2201 if (i1 && GET_CODE (newpat) != CLOBBER)
2203 /* Before we can do this substitution, we must redo the test done
2204 above (see detailed comments there) that ensures that I1DEST
2205 isn't mentioned in any SETs in NEWPAT that are field assignments. */
2207 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
2215 subst_low_cuid = INSN_CUID (i1);
2216 newpat = subst (newpat, i1dest, i1src, 0, 0);
2220 /* Fail if an autoincrement side-effect has been duplicated. Be careful
2221 to count all the ways that I2SRC and I1SRC can be used. */
2222 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2223 && i2_is_used + added_sets_2 > 1)
2224 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2225 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2227 /* Fail if we tried to make a new register. */
2228 || max_reg_num () != maxreg
2229 /* Fail if we couldn't do something and have a CLOBBER. */
2230 || GET_CODE (newpat) == CLOBBER
2231 /* Fail if this new pattern is a MULT and we didn't have one before
2232 at the outer level. */
2233 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2240 /* If the actions of the earlier insns must be kept
2241 in addition to substituting them into the latest one,
2242 we must make a new PARALLEL for the latest insn
2243 to hold additional the SETs. */
2245 if (added_sets_1 || added_sets_2)
2249 if (GET_CODE (newpat) == PARALLEL)
2251 rtvec old = XVEC (newpat, 0);
2252 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2253 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2254 memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2255 sizeof (old->elem[0]) * old->num_elem);
2260 total_sets = 1 + added_sets_1 + added_sets_2;
2261 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2262 XVECEXP (newpat, 0, 0) = old;
2266 XVECEXP (newpat, 0, --total_sets)
2267 = (GET_CODE (PATTERN (i1)) == PARALLEL
2268 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
2272 /* If there is no I1, use I2's body as is. We used to also not do
2273 the subst call below if I2 was substituted into I3,
2274 but that could lose a simplification. */
2276 XVECEXP (newpat, 0, --total_sets) = i2pat;
2278 /* See comment where i2pat is assigned. */
2279 XVECEXP (newpat, 0, --total_sets)
2280 = subst (i2pat, i1dest, i1src, 0, 0);
2284 /* We come here when we are replacing a destination in I2 with the
2285 destination of I3. */
2286 validate_replacement:
2288 /* Note which hard regs this insn has as inputs. */
2289 mark_used_regs_combine (newpat);
2291 /* If recog_for_combine fails, it strips existing clobbers. If we'll
2292 consider splitting this pattern, we might need these clobbers. */
2293 if (i1 && GET_CODE (newpat) == PARALLEL
2294 && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
2296 int len = XVECLEN (newpat, 0);
2298 newpat_vec_with_clobbers = rtvec_alloc (len);
2299 for (i = 0; i < len; i++)
2300 RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
2303 /* Is the result of combination a valid instruction? */
2304 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2306 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2307 the second SET's destination is a register that is unused and isn't
2308 marked as an instruction that might trap in an EH region. In that case,
2309 we just need the first SET. This can occur when simplifying a divmod
2310 insn. We *must* test for this case here because the code below that
2311 splits two independent SETs doesn't handle this case correctly when it
2312 updates the register status.
2314 It's pointless doing this if we originally had two sets, one from
2315 i3, and one from i2. Combining then splitting the parallel results
2316 in the original i2 again plus an invalid insn (which we delete).
2317 The net effect is only to move instructions around, which makes
2318 debug info less accurate.
2320 Also check the case where the first SET's destination is unused.
2321 That would not cause incorrect code, but does cause an unneeded
2324 if (insn_code_number < 0
2325 && !(added_sets_2 && i1 == 0)
2326 && GET_CODE (newpat) == PARALLEL
2327 && XVECLEN (newpat, 0) == 2
2328 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2329 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2330 && asm_noperands (newpat) < 0)
2332 rtx set0 = XVECEXP (newpat, 0, 0);
2333 rtx set1 = XVECEXP (newpat, 0, 1);
2336 if (((REG_P (SET_DEST (set1))
2337 && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
2338 || (GET_CODE (SET_DEST (set1)) == SUBREG
2339 && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
2340 && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2341 || INTVAL (XEXP (note, 0)) <= 0)
2342 && ! side_effects_p (SET_SRC (set1)))
2345 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2348 else if (((REG_P (SET_DEST (set0))
2349 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
2350 || (GET_CODE (SET_DEST (set0)) == SUBREG
2351 && find_reg_note (i3, REG_UNUSED,
2352 SUBREG_REG (SET_DEST (set0)))))
2353 && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2354 || INTVAL (XEXP (note, 0)) <= 0)
2355 && ! side_effects_p (SET_SRC (set0)))
2358 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2360 if (insn_code_number >= 0)
2362 /* If we will be able to accept this, we have made a
2363 change to the destination of I3. This requires us to
2364 do a few adjustments. */
2366 PATTERN (i3) = newpat;
2367 adjust_for_new_dest (i3);
2372 /* If we were combining three insns and the result is a simple SET
2373 with no ASM_OPERANDS that wasn't recognized, try to split it into two
2374 insns. There are two ways to do this. It can be split using a
2375 machine-specific method (like when you have an addition of a large
2376 constant) or by combine in the function find_split_point. */
2378 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2379 && asm_noperands (newpat) < 0)
2381 rtx m_split, *split;
2382 rtx ni2dest = i2dest;
2384 /* See if the MD file can split NEWPAT. If it can't, see if letting it
2385 use I2DEST as a scratch register will help. In the latter case,
2386 convert I2DEST to the mode of the source of NEWPAT if we can. */
2388 m_split = split_insns (newpat, i3);
2390 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2391 inputs of NEWPAT. */
2393 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2394 possible to try that as a scratch reg. This would require adding
2395 more code to make it work though. */
2397 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2399 enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
2400 /* If I2DEST is a hard register or the only use of a pseudo,
2401 we can change its mode. */
2402 if (new_mode != GET_MODE (i2dest)
2403 && new_mode != VOIDmode
2404 && can_change_dest_mode (i2dest, added_sets_2, new_mode))
2405 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2408 m_split = split_insns (gen_rtx_PARALLEL
2410 gen_rtvec (2, newpat,
2411 gen_rtx_CLOBBER (VOIDmode,
2414 /* If the split with the mode-changed register didn't work, try
2415 the original register. */
2416 if (! m_split && ni2dest != i2dest)
2419 m_split = split_insns (gen_rtx_PARALLEL
2421 gen_rtvec (2, newpat,
2422 gen_rtx_CLOBBER (VOIDmode,
2428 /* If recog_for_combine has discarded clobbers, try to use them
2429 again for the split. */
2430 if (m_split == 0 && newpat_vec_with_clobbers)
2432 = split_insns (gen_rtx_PARALLEL (VOIDmode,
2433 newpat_vec_with_clobbers), i3);
2435 if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2437 m_split = PATTERN (m_split);
2438 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2439 if (insn_code_number >= 0)
2442 else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2443 && (next_real_insn (i2) == i3
2444 || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2447 rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2448 newi2pat = PATTERN (m_split);
2450 i3set = single_set (NEXT_INSN (m_split));
2451 i2set = single_set (m_split);
2453 /* In case we changed the mode of I2DEST, replace it in the
2454 pseudo-register table here. We can't do it above in case this
2455 code doesn't get executed and we do a split the other way. */
2457 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2458 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2460 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2462 /* If I2 or I3 has multiple SETs, we won't know how to track
2463 register status, so don't use these insns. If I2's destination
2464 is used between I2 and I3, we also can't use these insns. */
2466 if (i2_code_number >= 0 && i2set && i3set
2467 && (next_real_insn (i2) == i3
2468 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2469 insn_code_number = recog_for_combine (&newi3pat, i3,
2471 if (insn_code_number >= 0)
2474 /* It is possible that both insns now set the destination of I3.
2475 If so, we must show an extra use of it. */
2477 if (insn_code_number >= 0)
2479 rtx new_i3_dest = SET_DEST (i3set);
2480 rtx new_i2_dest = SET_DEST (i2set);
2482 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2483 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2484 || GET_CODE (new_i3_dest) == SUBREG)
2485 new_i3_dest = XEXP (new_i3_dest, 0);
2487 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2488 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2489 || GET_CODE (new_i2_dest) == SUBREG)
2490 new_i2_dest = XEXP (new_i2_dest, 0);
2492 if (REG_P (new_i3_dest)
2493 && REG_P (new_i2_dest)
2494 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2495 REG_N_SETS (REGNO (new_i2_dest))++;
2499 /* If we can split it and use I2DEST, go ahead and see if that
2500 helps things be recognized. Verify that none of the registers
2501 are set between I2 and I3. */
2502 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2506 /* We need I2DEST in the proper mode. If it is a hard register
2507 or the only use of a pseudo, we can change its mode.
2508 Make sure we don't change a hard register to have a mode that
2509 isn't valid for it, or change the number of registers. */
2510 && (GET_MODE (*split) == GET_MODE (i2dest)
2511 || GET_MODE (*split) == VOIDmode
2512 || can_change_dest_mode (i2dest, added_sets_2,
2514 && (next_real_insn (i2) == i3
2515 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2516 /* We can't overwrite I2DEST if its value is still used by
2518 && ! reg_referenced_p (i2dest, newpat))
2520 rtx newdest = i2dest;
2521 enum rtx_code split_code = GET_CODE (*split);
2522 enum machine_mode split_mode = GET_MODE (*split);
2524 /* Get NEWDEST as a register in the proper mode. We have already
2525 validated that we can do this. */
2526 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2528 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2530 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2531 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2534 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2535 an ASHIFT. This can occur if it was inside a PLUS and hence
2536 appeared to be a memory address. This is a kludge. */
2537 if (split_code == MULT
2538 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2539 && INTVAL (XEXP (*split, 1)) > 0
2540 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2542 SUBST (*split, gen_rtx_ASHIFT (split_mode,
2543 XEXP (*split, 0), GEN_INT (i)));
2544 /* Update split_code because we may not have a multiply
2546 split_code = GET_CODE (*split);
2549 #ifdef INSN_SCHEDULING
2550 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2551 be written as a ZERO_EXTEND. */
2552 if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
2554 #ifdef LOAD_EXTEND_OP
2555 /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2556 what it really is. */
2557 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2559 SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2560 SUBREG_REG (*split)));
2563 SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2564 SUBREG_REG (*split)));
2568 newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2569 SUBST (*split, newdest);
2570 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2572 /* recog_for_combine might have added CLOBBERs to newi2pat.
2573 Make sure NEWPAT does not depend on the clobbered regs. */
2574 if (GET_CODE (newi2pat) == PARALLEL)
2575 for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
2576 if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
2578 rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
2579 if (reg_overlap_mentioned_p (reg, newpat))
2586 /* If the split point was a MULT and we didn't have one before,
2587 don't use one now. */
2588 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2589 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2593 /* Check for a case where we loaded from memory in a narrow mode and
2594 then sign extended it, but we need both registers. In that case,
2595 we have a PARALLEL with both loads from the same memory location.
2596 We can split this into a load from memory followed by a register-register
2597 copy. This saves at least one insn, more if register allocation can
2600 We cannot do this if the destination of the first assignment is a
2601 condition code register or cc0. We eliminate this case by making sure
2602 the SET_DEST and SET_SRC have the same mode.
2604 We cannot do this if the destination of the second assignment is
2605 a register that we have already assumed is zero-extended. Similarly
2606 for a SUBREG of such a register. */
2608 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2609 && GET_CODE (newpat) == PARALLEL
2610 && XVECLEN (newpat, 0) == 2
2611 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2612 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2613 && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
2614 == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
2615 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2616 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2617 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2618 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2620 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2621 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2622 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2624 && reg_stat[REGNO (temp)].nonzero_bits != 0
2625 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2626 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2627 && (reg_stat[REGNO (temp)].nonzero_bits
2628 != GET_MODE_MASK (word_mode))))
2629 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2630 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2632 && reg_stat[REGNO (temp)].nonzero_bits != 0
2633 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2634 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2635 && (reg_stat[REGNO (temp)].nonzero_bits
2636 != GET_MODE_MASK (word_mode)))))
2637 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2638 SET_SRC (XVECEXP (newpat, 0, 1)))
2639 && ! find_reg_note (i3, REG_UNUSED,
2640 SET_DEST (XVECEXP (newpat, 0, 0))))
2644 newi2pat = XVECEXP (newpat, 0, 0);
2645 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2646 newpat = XVECEXP (newpat, 0, 1);
2647 SUBST (SET_SRC (newpat),
2648 gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
2649 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2651 if (i2_code_number >= 0)
2652 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2654 if (insn_code_number >= 0)
2658 /* Similarly, check for a case where we have a PARALLEL of two independent
2659 SETs but we started with three insns. In this case, we can do the sets
2660 as two separate insns. This case occurs when some SET allows two
2661 other insns to combine, but the destination of that SET is still live. */
2663 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2664 && GET_CODE (newpat) == PARALLEL
2665 && XVECLEN (newpat, 0) == 2
2666 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2667 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2668 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2669 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2670 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2671 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2672 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2674 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2675 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2676 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2677 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2678 XVECEXP (newpat, 0, 0))
2679 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2680 XVECEXP (newpat, 0, 1))
2681 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2682 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2684 /* Normally, it doesn't matter which of the two is done first,
2685 but it does if one references cc0. In that case, it has to
2688 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2690 newi2pat = XVECEXP (newpat, 0, 0);
2691 newpat = XVECEXP (newpat, 0, 1);
2696 newi2pat = XVECEXP (newpat, 0, 1);
2697 newpat = XVECEXP (newpat, 0, 0);
2700 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2702 if (i2_code_number >= 0)
2703 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2706 /* If it still isn't recognized, fail and change things back the way they
2708 if ((insn_code_number < 0
2709 /* Is the result a reasonable ASM_OPERANDS? */
2710 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2716 /* If we had to change another insn, make sure it is valid also. */
2717 if (undobuf.other_insn)
2719 rtx other_pat = PATTERN (undobuf.other_insn);
2720 rtx new_other_notes;
2723 CLEAR_HARD_REG_SET (newpat_used_regs);
2725 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2728 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2734 PATTERN (undobuf.other_insn) = other_pat;
2736 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2737 are still valid. Then add any non-duplicate notes added by
2738 recog_for_combine. */
2739 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2741 next = XEXP (note, 1);
2743 if (REG_NOTE_KIND (note) == REG_UNUSED
2744 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2746 if (REG_P (XEXP (note, 0)))
2747 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2749 remove_note (undobuf.other_insn, note);
2753 for (note = new_other_notes; note; note = XEXP (note, 1))
2754 if (REG_P (XEXP (note, 0)))
2755 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2757 distribute_notes (new_other_notes, undobuf.other_insn,
2758 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2761 /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
2762 they are adjacent to each other or not. */
2764 rtx p = prev_nonnote_insn (i3);
2765 if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
2766 && sets_cc0_p (newi2pat))
2774 /* Only allow this combination if insn_rtx_costs reports that the
2775 replacement instructions are cheaper than the originals. */
2776 if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat))
2782 /* We now know that we can do this combination. Merge the insns and
2783 update the status of registers and LOG_LINKS. */
2791 /* I3 now uses what used to be its destination and which is now
2792 I2's destination. This requires us to do a few adjustments. */
2793 PATTERN (i3) = newpat;
2794 adjust_for_new_dest (i3);
2796 /* We need a LOG_LINK from I3 to I2. But we used to have one,
2799 However, some later insn might be using I2's dest and have
2800 a LOG_LINK pointing at I3. We must remove this link.
2801 The simplest way to remove the link is to point it at I1,
2802 which we know will be a NOTE. */
2804 /* newi2pat is usually a SET here; however, recog_for_combine might
2805 have added some clobbers. */
2806 if (GET_CODE (newi2pat) == PARALLEL)
2807 ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
2809 ni2dest = SET_DEST (newi2pat);
2811 for (insn = NEXT_INSN (i3);
2812 insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2813 || insn != BB_HEAD (this_basic_block->next_bb));
2814 insn = NEXT_INSN (insn))
2816 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2818 for (link = LOG_LINKS (insn); link;
2819 link = XEXP (link, 1))
2820 if (XEXP (link, 0) == i3)
2821 XEXP (link, 0) = i1;
2829 rtx i3notes, i2notes, i1notes = 0;
2830 rtx i3links, i2links, i1links = 0;
2833 /* Compute which registers we expect to eliminate. newi2pat may be setting
2834 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2835 same as i3dest, in which case newi2pat may be setting i1dest. */
2836 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2837 || i2dest_in_i2src || i2dest_in_i1src
2840 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2841 || (newi2pat && reg_set_p (i1dest, newi2pat))
2845 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2847 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2848 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2850 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2852 /* Ensure that we do not have something that should not be shared but
2853 occurs multiple times in the new insns. Check this by first
2854 resetting all the `used' flags and then copying anything is shared. */
2856 reset_used_flags (i3notes);
2857 reset_used_flags (i2notes);
2858 reset_used_flags (i1notes);
2859 reset_used_flags (newpat);
2860 reset_used_flags (newi2pat);
2861 if (undobuf.other_insn)
2862 reset_used_flags (PATTERN (undobuf.other_insn));
2864 i3notes = copy_rtx_if_shared (i3notes);
2865 i2notes = copy_rtx_if_shared (i2notes);
2866 i1notes = copy_rtx_if_shared (i1notes);
2867 newpat = copy_rtx_if_shared (newpat);
2868 newi2pat = copy_rtx_if_shared (newi2pat);
2869 if (undobuf.other_insn)
2870 reset_used_flags (PATTERN (undobuf.other_insn));
2872 INSN_CODE (i3) = insn_code_number;
2873 PATTERN (i3) = newpat;
2875 if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
2877 rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2879 reset_used_flags (call_usage);
2880 call_usage = copy_rtx (call_usage);
2883 replace_rtx (call_usage, i2dest, i2src);
2886 replace_rtx (call_usage, i1dest, i1src);
2888 CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2891 if (undobuf.other_insn)
2892 INSN_CODE (undobuf.other_insn) = other_code_number;
2894 /* We had one special case above where I2 had more than one set and
2895 we replaced a destination of one of those sets with the destination
2896 of I3. In that case, we have to update LOG_LINKS of insns later
2897 in this basic block. Note that this (expensive) case is rare.
2899 Also, in this case, we must pretend that all REG_NOTEs for I2
2900 actually came from I3, so that REG_UNUSED notes from I2 will be
2901 properly handled. */
2903 if (i3_subst_into_i2)
2905 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2906 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2907 && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
2908 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2909 && ! find_reg_note (i2, REG_UNUSED,
2910 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2911 for (temp = NEXT_INSN (i2);
2912 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2913 || BB_HEAD (this_basic_block) != temp);
2914 temp = NEXT_INSN (temp))
2915 if (temp != i3 && INSN_P (temp))
2916 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2917 if (XEXP (link, 0) == i2)
2918 XEXP (link, 0) = i3;
2923 while (XEXP (link, 1))
2924 link = XEXP (link, 1);
2925 XEXP (link, 1) = i2notes;
2939 INSN_CODE (i2) = i2_code_number;
2940 PATTERN (i2) = newi2pat;
2943 SET_INSN_DELETED (i2);
2949 SET_INSN_DELETED (i1);
2952 /* Get death notes for everything that is now used in either I3 or
2953 I2 and used to die in a previous insn. If we built two new
2954 patterns, move from I1 to I2 then I2 to I3 so that we get the
2955 proper movement on registers that I2 modifies. */
2959 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2960 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2963 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2966 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2968 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2971 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2974 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2977 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2980 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2981 know these are REG_UNUSED and want them to go to the desired insn,
2982 so we always pass it as i3. We have not counted the notes in
2983 reg_n_deaths yet, so we need to do so now. */
2985 if (newi2pat && new_i2_notes)
2987 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2988 if (REG_P (XEXP (temp, 0)))
2989 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2991 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2996 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2997 if (REG_P (XEXP (temp, 0)))
2998 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
3000 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
3003 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
3004 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
3005 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
3006 in that case, it might delete I2. Similarly for I2 and I1.
3007 Show an additional death due to the REG_DEAD note we make here. If
3008 we discard it in distribute_notes, we will decrement it again. */
3012 if (REG_P (i3dest_killed))
3013 REG_N_DEATHS (REGNO (i3dest_killed))++;
3015 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
3016 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
3018 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
3020 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
3022 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3026 if (i2dest_in_i2src)
3029 REG_N_DEATHS (REGNO (i2dest))++;
3031 if (newi2pat && reg_set_p (i2dest, newi2pat))
3032 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
3033 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3035 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
3036 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3037 NULL_RTX, NULL_RTX);
3040 if (i1dest_in_i1src)
3043 REG_N_DEATHS (REGNO (i1dest))++;
3045 if (newi2pat && reg_set_p (i1dest, newi2pat))
3046 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
3047 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3049 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
3050 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3051 NULL_RTX, NULL_RTX);
3054 distribute_links (i3links);
3055 distribute_links (i2links);
3056 distribute_links (i1links);
3061 rtx i2_insn = 0, i2_val = 0, set;
3063 /* The insn that used to set this register doesn't exist, and
3064 this life of the register may not exist either. See if one of
3065 I3's links points to an insn that sets I2DEST. If it does,
3066 that is now the last known value for I2DEST. If we don't update
3067 this and I2 set the register to a value that depended on its old
3068 contents, we will get confused. If this insn is used, thing
3069 will be set correctly in combine_instructions. */
3071 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3072 if ((set = single_set (XEXP (link, 0))) != 0
3073 && rtx_equal_p (i2dest, SET_DEST (set)))
3074 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
3076 record_value_for_reg (i2dest, i2_insn, i2_val);
3078 /* If the reg formerly set in I2 died only once and that was in I3,
3079 zero its use count so it won't make `reload' do any work. */
3081 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
3082 && ! i2dest_in_i2src)
3084 regno = REGNO (i2dest);
3085 REG_N_SETS (regno)--;
3089 if (i1 && REG_P (i1dest))
3092 rtx i1_insn = 0, i1_val = 0, set;
3094 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3095 if ((set = single_set (XEXP (link, 0))) != 0
3096 && rtx_equal_p (i1dest, SET_DEST (set)))
3097 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
3099 record_value_for_reg (i1dest, i1_insn, i1_val);
3101 regno = REGNO (i1dest);
3102 if (! added_sets_1 && ! i1dest_in_i1src)
3103 REG_N_SETS (regno)--;
3106 /* Update reg_stat[].nonzero_bits et al for any changes that may have
3107 been made to this insn. The order of
3108 set_nonzero_bits_and_sign_copies() is important. Because newi2pat
3109 can affect nonzero_bits of newpat */
3111 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
3112 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
3114 /* Set new_direct_jump_p if a new return or simple jump instruction
3117 If I3 is now an unconditional jump, ensure that it has a
3118 BARRIER following it since it may have initially been a
3119 conditional jump. It may also be the last nonnote insn. */
3121 if (returnjump_p (i3) || any_uncondjump_p (i3))
3123 *new_direct_jump_p = 1;
3124 mark_jump_label (PATTERN (i3), i3, 0);
3126 if ((temp = next_nonnote_insn (i3)) == NULL_RTX
3127 || !BARRIER_P (temp))
3128 emit_barrier_after (i3);
3131 if (undobuf.other_insn != NULL_RTX
3132 && (returnjump_p (undobuf.other_insn)
3133 || any_uncondjump_p (undobuf.other_insn)))
3135 *new_direct_jump_p = 1;
3137 if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
3138 || !BARRIER_P (temp))
3139 emit_barrier_after (undobuf.other_insn);
3142 /* An NOOP jump does not need barrier, but it does need cleaning up
3144 if (GET_CODE (newpat) == SET
3145 && SET_SRC (newpat) == pc_rtx
3146 && SET_DEST (newpat) == pc_rtx)
3147 *new_direct_jump_p = 1;
3150 combine_successes++;
3153 if (added_links_insn
3154 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
3155 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
3156 return added_links_insn;
3158 return newi2pat ? i2 : i3;
3161 /* Undo all the modifications recorded in undobuf. */
3166 struct undo *undo, *next;
3168 for (undo = undobuf.undos; undo; undo = next)
3172 *undo->where.i = undo->old_contents.i;
3174 *undo->where.r = undo->old_contents.r;
3176 undo->next = undobuf.frees;
3177 undobuf.frees = undo;
3183 /* We've committed to accepting the changes we made. Move all
3184 of the undos to the free list. */
3189 struct undo *undo, *next;
3191 for (undo = undobuf.undos; undo; undo = next)
3194 undo->next = undobuf.frees;
3195 undobuf.frees = undo;
3201 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
3202 where we have an arithmetic expression and return that point. LOC will
3205 try_combine will call this function to see if an insn can be split into
3209 find_split_point (rtx *loc, rtx insn)
3212 enum rtx_code code = GET_CODE (x);
3214 unsigned HOST_WIDE_INT len = 0;
3215 HOST_WIDE_INT pos = 0;
3217 rtx inner = NULL_RTX;
3219 /* First special-case some codes. */
3223 #ifdef INSN_SCHEDULING
3224 /* If we are making a paradoxical SUBREG invalid, it becomes a split
3226 if (MEM_P (SUBREG_REG (x)))
3229 return find_split_point (&SUBREG_REG (x), insn);
3233 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
3234 using LO_SUM and HIGH. */
3235 if (GET_CODE (XEXP (x, 0)) == CONST
3236 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3239 gen_rtx_LO_SUM (Pmode,
3240 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
3242 return &XEXP (XEXP (x, 0), 0);
3246 /* If we have a PLUS whose second operand is a constant and the
3247 address is not valid, perhaps will can split it up using
3248 the machine-specific way to split large constants. We use
3249 the first pseudo-reg (one of the virtual regs) as a placeholder;
3250 it will not remain in the result. */
3251 if (GET_CODE (XEXP (x, 0)) == PLUS
3252 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3253 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
3255 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
3256 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
3259 /* This should have produced two insns, each of which sets our
3260 placeholder. If the source of the second is a valid address,
3261 we can make put both sources together and make a split point
3265 && NEXT_INSN (seq) != NULL_RTX
3266 && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
3267 && NONJUMP_INSN_P (seq)
3268 && GET_CODE (PATTERN (seq)) == SET
3269 && SET_DEST (PATTERN (seq)) == reg
3270 && ! reg_mentioned_p (reg,
3271 SET_SRC (PATTERN (seq)))
3272 && NONJUMP_INSN_P (NEXT_INSN (seq))
3273 && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
3274 && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
3275 && memory_address_p (GET_MODE (x),
3276 SET_SRC (PATTERN (NEXT_INSN (seq)))))
3278 rtx src1 = SET_SRC (PATTERN (seq));
3279 rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
3281 /* Replace the placeholder in SRC2 with SRC1. If we can
3282 find where in SRC2 it was placed, that can become our
3283 split point and we can replace this address with SRC2.
3284 Just try two obvious places. */
3286 src2 = replace_rtx (src2, reg, src1);
3288 if (XEXP (src2, 0) == src1)
3289 split = &XEXP (src2, 0);
3290 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
3291 && XEXP (XEXP (src2, 0), 0) == src1)
3292 split = &XEXP (XEXP (src2, 0), 0);
3296 SUBST (XEXP (x, 0), src2);
3301 /* If that didn't work, perhaps the first operand is complex and
3302 needs to be computed separately, so make a split point there.
3303 This will occur on machines that just support REG + CONST
3304 and have a constant moved through some previous computation. */
3306 else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
3307 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
3308 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
3309 return &XEXP (XEXP (x, 0), 0);
3315 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3316 ZERO_EXTRACT, the most likely reason why this doesn't match is that
3317 we need to put the operand into a register. So split at that
3320 if (SET_DEST (x) == cc0_rtx
3321 && GET_CODE (SET_SRC (x)) != COMPARE
3322 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
3323 && !OBJECT_P (SET_SRC (x))
3324 && ! (GET_CODE (SET_SRC (x)) == SUBREG
3325 && OBJECT_P (SUBREG_REG (SET_SRC (x)))))
3326 return &SET_SRC (x);
3329 /* See if we can split SET_SRC as it stands. */
3330 split = find_split_point (&SET_SRC (x), insn);
3331 if (split && split != &SET_SRC (x))
3334 /* See if we can split SET_DEST as it stands. */
3335 split = find_split_point (&SET_DEST (x), insn);
3336 if (split && split != &SET_DEST (x))
3339 /* See if this is a bitfield assignment with everything constant. If
3340 so, this is an IOR of an AND, so split it into that. */
3341 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3342 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
3343 <= HOST_BITS_PER_WIDE_INT)
3344 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3345 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3346 && GET_CODE (SET_SRC (x)) == CONST_INT
3347 && ((INTVAL (XEXP (SET_DEST (x), 1))
3348 + INTVAL (XEXP (SET_DEST (x), 2)))
3349 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3350 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3352 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3353 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3354 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3355 rtx dest = XEXP (SET_DEST (x), 0);
3356 enum machine_mode mode = GET_MODE (dest);
3357 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3360 if (BITS_BIG_ENDIAN)
3361 pos = GET_MODE_BITSIZE (mode) - len - pos;
3363 or_mask = gen_int_mode (src << pos, mode);
3366 simplify_gen_binary (IOR, mode, dest, or_mask));
3369 rtx negmask = gen_int_mode (~(mask << pos), mode);
3371 simplify_gen_binary (IOR, mode,
3372 simplify_gen_binary (AND, mode,
3377 SUBST (SET_DEST (x), dest);
3379 split = find_split_point (&SET_SRC (x), insn);
3380 if (split && split != &SET_SRC (x))
3384 /* Otherwise, see if this is an operation that we can split into two.
3385 If so, try to split that. */
3386 code = GET_CODE (SET_SRC (x));
3391 /* If we are AND'ing with a large constant that is only a single
3392 bit and the result is only being used in a context where we
3393 need to know if it is zero or nonzero, replace it with a bit
3394 extraction. This will avoid the large constant, which might
3395 have taken more than one insn to make. If the constant were
3396 not a valid argument to the AND but took only one insn to make,
3397 this is no worse, but if it took more than one insn, it will
3400 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3401 && REG_P (XEXP (SET_SRC (x), 0))
3402 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3403 && REG_P (SET_DEST (x))
3404 && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3405 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3406 && XEXP (*split, 0) == SET_DEST (x)
3407 && XEXP (*split, 1) == const0_rtx)
3409 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3410 XEXP (SET_SRC (x), 0),
3411 pos, NULL_RTX, 1, 1, 0, 0);
3412 if (extraction != 0)
3414 SUBST (SET_SRC (x), extraction);
3415 return find_split_point (loc, insn);
3421 /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3422 is known to be on, this can be converted into a NEG of a shift. */
3423 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3424 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3425 && 1 <= (pos = exact_log2
3426 (nonzero_bits (XEXP (SET_SRC (x), 0),
3427 GET_MODE (XEXP (SET_SRC (x), 0))))))
3429 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3433 gen_rtx_LSHIFTRT (mode,
3434 XEXP (SET_SRC (x), 0),
3437 split = find_split_point (&SET_SRC (x), insn);
3438 if (split && split != &SET_SRC (x))
3444 inner = XEXP (SET_SRC (x), 0);
3446 /* We can't optimize if either mode is a partial integer
3447 mode as we don't know how many bits are significant
3449 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3450 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3454 len = GET_MODE_BITSIZE (GET_MODE (inner));
3460 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3461 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3463 inner = XEXP (SET_SRC (x), 0);
3464 len = INTVAL (XEXP (SET_SRC (x), 1));
3465 pos = INTVAL (XEXP (SET_SRC (x), 2));
3467 if (BITS_BIG_ENDIAN)
3468 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3469 unsignedp = (code == ZERO_EXTRACT);
3477 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3479 enum machine_mode mode = GET_MODE (SET_SRC (x));
3481 /* For unsigned, we have a choice of a shift followed by an
3482 AND or two shifts. Use two shifts for field sizes where the
3483 constant might be too large. We assume here that we can
3484 always at least get 8-bit constants in an AND insn, which is
3485 true for every current RISC. */
3487 if (unsignedp && len <= 8)
3492 (mode, gen_lowpart (mode, inner),
3494 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3496 split = find_split_point (&SET_SRC (x), insn);
3497 if (split && split != &SET_SRC (x))
3504 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3505 gen_rtx_ASHIFT (mode,
3506 gen_lowpart (mode, inner),
3507 GEN_INT (GET_MODE_BITSIZE (mode)
3509 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3511 split = find_split_point (&SET_SRC (x), insn);
3512 if (split && split != &SET_SRC (x))
3517 /* See if this is a simple operation with a constant as the second
3518 operand. It might be that this constant is out of range and hence
3519 could be used as a split point. */
3520 if (BINARY_P (SET_SRC (x))
3521 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3522 && (OBJECT_P (XEXP (SET_SRC (x), 0))
3523 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3524 && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
3525 return &XEXP (SET_SRC (x), 1);
3527 /* Finally, see if this is a simple operation with its first operand
3528 not in a register. The operation might require this operand in a
3529 register, so return it as a split point. We can always do this
3530 because if the first operand were another operation, we would have
3531 already found it as a split point. */
3532 if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
3533 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3534 return &XEXP (SET_SRC (x), 0);
3540 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3541 it is better to write this as (not (ior A B)) so we can split it.
3542 Similarly for IOR. */
3543 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3546 gen_rtx_NOT (GET_MODE (x),
3547 gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3549 XEXP (XEXP (x, 0), 0),
3550 XEXP (XEXP (x, 1), 0))));
3551 return find_split_point (loc, insn);
3554 /* Many RISC machines have a large set of logical insns. If the
3555 second operand is a NOT, put it first so we will try to split the
3556 other operand first. */
3557 if (GET_CODE (XEXP (x, 1)) == NOT)
3559 rtx tem = XEXP (x, 0);
3560 SUBST (XEXP (x, 0), XEXP (x, 1));
3561 SUBST (XEXP (x, 1), tem);
3569 /* Otherwise, select our actions depending on our rtx class. */
3570 switch (GET_RTX_CLASS (code))
3572 case RTX_BITFIELD_OPS: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3574 split = find_split_point (&XEXP (x, 2), insn);
3577 /* ... fall through ... */
3579 case RTX_COMM_ARITH:
3581 case RTX_COMM_COMPARE:
3582 split = find_split_point (&XEXP (x, 1), insn);
3585 /* ... fall through ... */
3587 /* Some machines have (and (shift ...) ...) insns. If X is not
3588 an AND, but XEXP (X, 0) is, use it as our split point. */
3589 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3590 return &XEXP (x, 0);
3592 split = find_split_point (&XEXP (x, 0), insn);
3598 /* Otherwise, we don't have a split point. */
3603 /* Throughout X, replace FROM with TO, and return the result.
3604 The result is TO if X is FROM;
3605 otherwise the result is X, but its contents may have been modified.
3606 If they were modified, a record was made in undobuf so that
3607 undo_all will (among other things) return X to its original state.
3609 If the number of changes necessary is too much to record to undo,
3610 the excess changes are not made, so the result is invalid.
3611 The changes already made can still be undone.
3612 undobuf.num_undo is incremented for such changes, so by testing that
3613 the caller can tell whether the result is valid.
3615 `n_occurrences' is incremented each time FROM is replaced.
3617 IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3619 UNIQUE_COPY is nonzero if each substitution must be unique. We do this
3620 by copying if `n_occurrences' is nonzero. */
3623 subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
3625 enum rtx_code code = GET_CODE (x);
3626 enum machine_mode op0_mode = VOIDmode;
3631 /* Two expressions are equal if they are identical copies of a shared
3632 RTX or if they are both registers with the same register number
3635 #define COMBINE_RTX_EQUAL_P(X,Y) \
3637 || (REG_P (X) && REG_P (Y) \
3638 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3640 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3643 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3646 /* If X and FROM are the same register but different modes, they will
3647 not have been seen as equal above. However, flow.c will make a
3648 LOG_LINKS entry for that case. If we do nothing, we will try to
3649 rerecognize our original insn and, when it succeeds, we will
3650 delete the feeding insn, which is incorrect.
3652 So force this insn not to match in this (rare) case. */
3653 if (! in_dest && code == REG && REG_P (from)
3654 && REGNO (x) == REGNO (from))
3655 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3657 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3658 of which may contain things that can be combined. */
3659 if (code != MEM && code != LO_SUM && OBJECT_P (x))
3662 /* It is possible to have a subexpression appear twice in the insn.
3663 Suppose that FROM is a register that appears within TO.
3664 Then, after that subexpression has been scanned once by `subst',
3665 the second time it is scanned, TO may be found. If we were
3666 to scan TO here, we would find FROM within it and create a
3667 self-referent rtl structure which is completely wrong. */
3668 if (COMBINE_RTX_EQUAL_P (x, to))
3671 /* Parallel asm_operands need special attention because all of the
3672 inputs are shared across the arms. Furthermore, unsharing the
3673 rtl results in recognition failures. Failure to handle this case
3674 specially can result in circular rtl.
3676 Solve this by doing a normal pass across the first entry of the
3677 parallel, and only processing the SET_DESTs of the subsequent
3680 if (code == PARALLEL
3681 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3682 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3684 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3686 /* If this substitution failed, this whole thing fails. */
3687 if (GET_CODE (new) == CLOBBER
3688 && XEXP (new, 0) == const0_rtx)
3691 SUBST (XVECEXP (x, 0, 0), new);
3693 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3695 rtx dest = SET_DEST (XVECEXP (x, 0, i));
3698 && GET_CODE (dest) != CC0
3699 && GET_CODE (dest) != PC)
3701 new = subst (dest, from, to, 0, unique_copy);
3703 /* If this substitution failed, this whole thing fails. */
3704 if (GET_CODE (new) == CLOBBER
3705 && XEXP (new, 0) == const0_rtx)
3708 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3714 len = GET_RTX_LENGTH (code);
3715 fmt = GET_RTX_FORMAT (code);
3717 /* We don't need to process a SET_DEST that is a register, CC0,
3718 or PC, so set up to skip this common case. All other cases
3719 where we want to suppress replacing something inside a
3720 SET_SRC are handled via the IN_DEST operand. */
3722 && (REG_P (SET_DEST (x))
3723 || GET_CODE (SET_DEST (x)) == CC0
3724 || GET_CODE (SET_DEST (x)) == PC))
3727 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3730 op0_mode = GET_MODE (XEXP (x, 0));
3732 for (i = 0; i < len; i++)
3737 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3739 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3741 new = (unique_copy && n_occurrences
3742 ? copy_rtx (to) : to);
3747 new = subst (XVECEXP (x, i, j), from, to, 0,
3750 /* If this substitution failed, this whole thing
3752 if (GET_CODE (new) == CLOBBER
3753 && XEXP (new, 0) == const0_rtx)
3757 SUBST (XVECEXP (x, i, j), new);
3760 else if (fmt[i] == 'e')
3762 /* If this is a register being set, ignore it. */
3766 && (((code == SUBREG || code == ZERO_EXTRACT)
3768 || code == STRICT_LOW_PART))
3771 else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3773 /* In general, don't install a subreg involving two
3774 modes not tieable. It can worsen register
3775 allocation, and can even make invalid reload
3776 insns, since the reg inside may need to be copied
3777 from in the outside mode, and that may be invalid
3778 if it is an fp reg copied in integer mode.
3780 We allow two exceptions to this: It is valid if
3781 it is inside another SUBREG and the mode of that
3782 SUBREG and the mode of the inside of TO is
3783 tieable and it is valid if X is a SET that copies
3786 if (GET_CODE (to) == SUBREG
3787 && ! MODES_TIEABLE_P (GET_MODE (to),
3788 GET_MODE (SUBREG_REG (to)))
3789 && ! (code == SUBREG
3790 && MODES_TIEABLE_P (GET_MODE (x),
3791 GET_MODE (SUBREG_REG (to))))
3793 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3796 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3798 #ifdef CANNOT_CHANGE_MODE_CLASS
3801 && REGNO (to) < FIRST_PSEUDO_REGISTER
3802 && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
3805 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3808 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3812 /* If we are in a SET_DEST, suppress most cases unless we
3813 have gone inside a MEM, in which case we want to
3814 simplify the address. We assume here that things that
3815 are actually part of the destination have their inner
3816 parts in the first expression. This is true for SUBREG,
3817 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3818 things aside from REG and MEM that should appear in a
3820 new = subst (XEXP (x, i), from, to,
3822 && (code == SUBREG || code == STRICT_LOW_PART
3823 || code == ZERO_EXTRACT))
3825 && i == 0), unique_copy);
3827 /* If we found that we will have to reject this combination,
3828 indicate that by returning the CLOBBER ourselves, rather than
3829 an expression containing it. This will speed things up as
3830 well as prevent accidents where two CLOBBERs are considered
3831 to be equal, thus producing an incorrect simplification. */
3833 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3836 if (GET_CODE (x) == SUBREG
3837 && (GET_CODE (new) == CONST_INT
3838 || GET_CODE (new) == CONST_DOUBLE))
3840 enum machine_mode mode = GET_MODE (x);
3842 x = simplify_subreg (GET_MODE (x), new,
3843 GET_MODE (SUBREG_REG (x)),
3846 x = gen_rtx_CLOBBER (mode, const0_rtx);
3848 else if (GET_CODE (new) == CONST_INT
3849 && GET_CODE (x) == ZERO_EXTEND)
3851 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3852 new, GET_MODE (XEXP (x, 0)));
3856 SUBST (XEXP (x, i), new);
3861 /* Try to simplify X. If the simplification changed the code, it is likely
3862 that further simplification will help, so loop, but limit the number
3863 of repetitions that will be performed. */
3865 for (i = 0; i < 4; i++)
3867 /* If X is sufficiently simple, don't bother trying to do anything
3869 if (code != CONST_INT && code != REG && code != CLOBBER)
3870 x = combine_simplify_rtx (x, op0_mode, in_dest);
3872 if (GET_CODE (x) == code)
3875 code = GET_CODE (x);
3877 /* We no longer know the original mode of operand 0 since we
3878 have changed the form of X) */
3879 op0_mode = VOIDmode;
3885 /* Simplify X, a piece of RTL. We just operate on the expression at the
3886 outer level; call `subst' to simplify recursively. Return the new
3889 OP0_MODE is the original mode of XEXP (x, 0). IN_DEST is nonzero
3890 if we are inside a SET_DEST. */
3893 combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
3895 enum rtx_code code = GET_CODE (x);
3896 enum machine_mode mode = GET_MODE (x);
3901 /* If this is a commutative operation, put a constant last and a complex
3902 expression first. We don't need to do this for comparisons here. */
3903 if (COMMUTATIVE_ARITH_P (x)
3904 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3907 SUBST (XEXP (x, 0), XEXP (x, 1));
3908 SUBST (XEXP (x, 1), temp);
3911 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3912 applying it to the arms of the IF_THEN_ELSE. This often simplifies
3913 things. Check for cases where both arms are testing the same
3916 Don't do anything if all operands are very simple. */
3919 && ((!OBJECT_P (XEXP (x, 0))
3920 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3921 && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
3922 || (!OBJECT_P (XEXP (x, 1))
3923 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3924 && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
3926 && (!OBJECT_P (XEXP (x, 0))
3927 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3928 && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
3930 rtx cond, true_rtx, false_rtx;
3932 cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3934 /* If everything is a comparison, what we have is highly unlikely
3935 to be simpler, so don't use it. */
3936 && ! (COMPARISON_P (x)
3937 && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
3939 rtx cop1 = const0_rtx;
3940 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3942 if (cond_code == NE && COMPARISON_P (cond))
3945 /* Simplify the alternative arms; this may collapse the true and
3946 false arms to store-flag values. Be careful to use copy_rtx
3947 here since true_rtx or false_rtx might share RTL with x as a
3948 result of the if_then_else_cond call above. */
3949 true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
3950 false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
3952 /* If true_rtx and false_rtx are not general_operands, an if_then_else
3953 is unlikely to be simpler. */
3954 if (general_operand (true_rtx, VOIDmode)
3955 && general_operand (false_rtx, VOIDmode))
3957 enum rtx_code reversed;
3959 /* Restarting if we generate a store-flag expression will cause
3960 us to loop. Just drop through in this case. */
3962 /* If the result values are STORE_FLAG_VALUE and zero, we can
3963 just make the comparison operation. */
3964 if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3965 x = simplify_gen_relational (cond_code, mode, VOIDmode,
3967 else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3968 && ((reversed = reversed_comparison_code_parts
3969 (cond_code, cond, cop1, NULL))
3971 x = simplify_gen_relational (reversed, mode, VOIDmode,
3974 /* Likewise, we can make the negate of a comparison operation
3975 if the result values are - STORE_FLAG_VALUE and zero. */
3976 else if (GET_CODE (true_rtx) == CONST_INT
3977 && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3978 && false_rtx == const0_rtx)
3979 x = simplify_gen_unary (NEG, mode,
3980 simplify_gen_relational (cond_code,
3984 else if (GET_CODE (false_rtx) == CONST_INT
3985 && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3986 && true_rtx == const0_rtx
3987 && ((reversed = reversed_comparison_code_parts
3988 (cond_code, cond, cop1, NULL))
3990 x = simplify_gen_unary (NEG, mode,
3991 simplify_gen_relational (reversed,
3996 return gen_rtx_IF_THEN_ELSE (mode,
3997 simplify_gen_relational (cond_code,
4002 true_rtx, false_rtx);
4004 code = GET_CODE (x);
4005 op0_mode = VOIDmode;
4010 /* Try to fold this expression in case we have constants that weren't
4013 switch (GET_RTX_CLASS (code))
4016 if (op0_mode == VOIDmode)
4017 op0_mode = GET_MODE (XEXP (x, 0));
4018 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
4021 case RTX_COMM_COMPARE:
4023 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
4024 if (cmp_mode == VOIDmode)
4026 cmp_mode = GET_MODE (XEXP (x, 1));
4027 if (cmp_mode == VOIDmode)
4028 cmp_mode = op0_mode;
4030 temp = simplify_relational_operation (code, mode, cmp_mode,
4031 XEXP (x, 0), XEXP (x, 1));
4034 case RTX_COMM_ARITH:
4036 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
4038 case RTX_BITFIELD_OPS:
4040 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
4041 XEXP (x, 1), XEXP (x, 2));
4050 code = GET_CODE (temp);
4051 op0_mode = VOIDmode;
4052 mode = GET_MODE (temp);
4055 /* First see if we can apply the inverse distributive law. */
4056 if (code == PLUS || code == MINUS
4057 || code == AND || code == IOR || code == XOR)
4059 x = apply_distributive_law (x);
4060 code = GET_CODE (x);
4061 op0_mode = VOIDmode;
4064 /* If CODE is an associative operation not otherwise handled, see if we
4065 can associate some operands. This can win if they are constants or
4066 if they are logically related (i.e. (a & b) & a). */
4067 if ((code == PLUS || code == MINUS || code == MULT || code == DIV
4068 || code == AND || code == IOR || code == XOR
4069 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
4070 && ((INTEGRAL_MODE_P (mode) && code != DIV)
4071 || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
4073 if (GET_CODE (XEXP (x, 0)) == code)
4075 rtx other = XEXP (XEXP (x, 0), 0);
4076 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
4077 rtx inner_op1 = XEXP (x, 1);
4080 /* Make sure we pass the constant operand if any as the second
4081 one if this is a commutative operation. */
4082 if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
4084 rtx tem = inner_op0;
4085 inner_op0 = inner_op1;
4088 inner = simplify_binary_operation (code == MINUS ? PLUS
4089 : code == DIV ? MULT
4091 mode, inner_op0, inner_op1);
4093 /* For commutative operations, try the other pair if that one
4095 if (inner == 0 && COMMUTATIVE_ARITH_P (x))
4097 other = XEXP (XEXP (x, 0), 1);
4098 inner = simplify_binary_operation (code, mode,
4099 XEXP (XEXP (x, 0), 0),
4104 return simplify_gen_binary (code, mode, other, inner);
4108 /* A little bit of algebraic simplification here. */
4112 /* Ensure that our address has any ASHIFTs converted to MULT in case
4113 address-recognizing predicates are called later. */
4114 temp = make_compound_operation (XEXP (x, 0), MEM);
4115 SUBST (XEXP (x, 0), temp);
4119 if (op0_mode == VOIDmode)
4120 op0_mode = GET_MODE (SUBREG_REG (x));
4122 /* See if this can be moved to simplify_subreg. */
4123 if (CONSTANT_P (SUBREG_REG (x))
4124 && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
4125 /* Don't call gen_lowpart if the inner mode
4126 is VOIDmode and we cannot simplify it, as SUBREG without
4127 inner mode is invalid. */
4128 && (GET_MODE (SUBREG_REG (x)) != VOIDmode
4129 || gen_lowpart_common (mode, SUBREG_REG (x))))
4130 return gen_lowpart (mode, SUBREG_REG (x));
4132 if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
4136 temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
4142 /* Don't change the mode of the MEM if that would change the meaning
4144 if (MEM_P (SUBREG_REG (x))
4145 && (MEM_VOLATILE_P (SUBREG_REG (x))
4146 || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
4147 return gen_rtx_CLOBBER (mode, const0_rtx);
4149 /* Note that we cannot do any narrowing for non-constants since
4150 we might have been counting on using the fact that some bits were
4151 zero. We now do this in the SET. */
4156 if (GET_CODE (XEXP (x, 0)) == SUBREG
4157 && subreg_lowpart_p (XEXP (x, 0))
4158 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
4159 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
4160 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
4161 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
4163 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
4165 x = gen_rtx_ROTATE (inner_mode,
4166 simplify_gen_unary (NOT, inner_mode, const1_rtx,
4168 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
4169 return gen_lowpart (mode, x);
4172 /* Apply De Morgan's laws to reduce number of patterns for machines
4173 with negating logical insns (and-not, nand, etc.). If result has
4174 only one NOT, put it first, since that is how the patterns are
4177 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
4179 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
4180 enum machine_mode op_mode;
4182 op_mode = GET_MODE (in1);
4183 in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
4185 op_mode = GET_MODE (in2);
4186 if (op_mode == VOIDmode)
4188 in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
4190 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
4193 in2 = in1; in1 = tem;
4196 return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
4202 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
4203 if (GET_CODE (XEXP (x, 0)) == XOR
4204 && XEXP (XEXP (x, 0), 1) == const1_rtx
4205 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
4206 return simplify_gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4209 temp = expand_compound_operation (XEXP (x, 0));
4211 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4212 replaced by (lshiftrt X C). This will convert
4213 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
4215 if (GET_CODE (temp) == ASHIFTRT
4216 && GET_CODE (XEXP (temp, 1)) == CONST_INT
4217 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4218 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4219 INTVAL (XEXP (temp, 1)));
4221 /* If X has only a single bit that might be nonzero, say, bit I, convert
4222 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4223 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
4224 (sign_extract X 1 Y). But only do this if TEMP isn't a register
4225 or a SUBREG of one since we'd be making the expression more
4226 complex if it was just a register. */
4229 && ! (GET_CODE (temp) == SUBREG
4230 && REG_P (SUBREG_REG (temp)))
4231 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4233 rtx temp1 = simplify_shift_const
4234 (NULL_RTX, ASHIFTRT, mode,
4235 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4236 GET_MODE_BITSIZE (mode) - 1 - i),
4237 GET_MODE_BITSIZE (mode) - 1 - i);
4239 /* If all we did was surround TEMP with the two shifts, we
4240 haven't improved anything, so don't use it. Otherwise,
4241 we are better off with TEMP1. */
4242 if (GET_CODE (temp1) != ASHIFTRT
4243 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4244 || XEXP (XEXP (temp1, 0), 0) != temp)
4250 /* We can't handle truncation to a partial integer mode here
4251 because we don't know the real bitsize of the partial
4253 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4256 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4257 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4258 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4260 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4261 GET_MODE_MASK (mode), NULL_RTX, 0));
4263 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
4264 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4265 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4266 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4267 return XEXP (XEXP (x, 0), 0);
4269 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4270 (OP:SI foo:SI) if OP is NEG or ABS. */
4271 if ((GET_CODE (XEXP (x, 0)) == ABS
4272 || GET_CODE (XEXP (x, 0)) == NEG)
4273 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4274 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4275 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4276 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4277 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4279 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4281 if (GET_CODE (XEXP (x, 0)) == SUBREG
4282 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4283 && subreg_lowpart_p (XEXP (x, 0)))
4284 return SUBREG_REG (XEXP (x, 0));
4286 /* If we know that the value is already truncated, we can
4287 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4288 is nonzero for the corresponding modes. But don't do this
4289 for an (LSHIFTRT (MULT ...)) since this will cause problems
4290 with the umulXi3_highpart patterns. */
4291 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4292 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4293 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4294 >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
4295 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4296 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4297 return gen_lowpart (mode, XEXP (x, 0));
4299 /* A truncate of a comparison can be replaced with a subreg if
4300 STORE_FLAG_VALUE permits. This is like the previous test,
4301 but it works even if the comparison is done in a mode larger
4302 than HOST_BITS_PER_WIDE_INT. */
4303 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4304 && COMPARISON_P (XEXP (x, 0))
4305 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4306 return gen_lowpart (mode, XEXP (x, 0));
4308 /* Similarly, a truncate of a register whose value is a
4309 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4311 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4312 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4313 && (temp = get_last_value (XEXP (x, 0)))
4314 && COMPARISON_P (temp))
4315 return gen_lowpart (mode, XEXP (x, 0));
4319 case FLOAT_TRUNCATE:
4320 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4321 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4322 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4323 return XEXP (XEXP (x, 0), 0);
4325 /* (float_truncate:SF (float_truncate:DF foo:XF))
4326 = (float_truncate:SF foo:XF).
4327 This may eliminate double rounding, so it is unsafe.
4329 (float_truncate:SF (float_extend:XF foo:DF))
4330 = (float_truncate:SF foo:DF).
4332 (float_truncate:DF (float_extend:XF foo:SF))
4333 = (float_extend:SF foo:DF). */
4334 if ((GET_CODE (XEXP (x, 0)) == FLOAT_TRUNCATE
4335 && flag_unsafe_math_optimizations)
4336 || GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND)
4337 return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0),
4339 > GET_MODE_SIZE (mode)
4340 ? FLOAT_TRUNCATE : FLOAT_EXTEND,
4342 XEXP (XEXP (x, 0), 0), mode);
4344 /* (float_truncate (float x)) is (float x) */
4345 if (GET_CODE (XEXP (x, 0)) == FLOAT
4346 && (flag_unsafe_math_optimizations
4347 || ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4348 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4349 - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4350 GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4351 return simplify_gen_unary (FLOAT, mode,
4352 XEXP (XEXP (x, 0), 0),
4353 GET_MODE (XEXP (XEXP (x, 0), 0)));
4355 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4356 (OP:SF foo:SF) if OP is NEG or ABS. */
4357 if ((GET_CODE (XEXP (x, 0)) == ABS
4358 || GET_CODE (XEXP (x, 0)) == NEG)
4359 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4360 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4361 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4362 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4364 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4365 is (float_truncate:SF x). */
4366 if (GET_CODE (XEXP (x, 0)) == SUBREG
4367 && subreg_lowpart_p (XEXP (x, 0))
4368 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4369 return SUBREG_REG (XEXP (x, 0));
4372 /* (float_extend (float_extend x)) is (float_extend x)
4374 (float_extend (float x)) is (float x) assuming that double
4375 rounding can't happen.
4377 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4378 || (GET_CODE (XEXP (x, 0)) == FLOAT
4379 && ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4380 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4381 - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4382 GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4383 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4384 XEXP (XEXP (x, 0), 0),
4385 GET_MODE (XEXP (XEXP (x, 0), 0)));
4390 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4391 using cc0, in which case we want to leave it as a COMPARE
4392 so we can distinguish it from a register-register-copy. */
4393 if (XEXP (x, 1) == const0_rtx)
4396 /* x - 0 is the same as x unless x's mode has signed zeros and
4397 allows rounding towards -infinity. Under those conditions,
4399 if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4400 && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4401 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4407 /* (const (const X)) can become (const X). Do it this way rather than
4408 returning the inner CONST since CONST can be shared with a
4410 if (GET_CODE (XEXP (x, 0)) == CONST)
4411 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4416 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4417 can add in an offset. find_split_point will split this address up
4418 again if it doesn't match. */
4419 if (GET_CODE (XEXP (x, 0)) == HIGH
4420 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4426 /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).
4428 if (GET_CODE (XEXP (x, 0)) == MULT
4429 && GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG)
4433 in1 = XEXP (XEXP (XEXP (x, 0), 0), 0);
4434 in2 = XEXP (XEXP (x, 0), 1);
4435 return simplify_gen_binary (MINUS, mode, XEXP (x, 1),
4436 simplify_gen_binary (MULT, mode,
4440 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4441 outermost. That's because that's the way indexed addresses are
4442 supposed to appear. This code used to check many more cases, but
4443 they are now checked elsewhere. */
4444 if (GET_CODE (XEXP (x, 0)) == PLUS
4445 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4446 return simplify_gen_binary (PLUS, mode,
4447 simplify_gen_binary (PLUS, mode,
4448 XEXP (XEXP (x, 0), 0),
4450 XEXP (XEXP (x, 0), 1));
4452 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4453 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4454 bit-field and can be replaced by either a sign_extend or a
4455 sign_extract. The `and' may be a zero_extend and the two
4456 <c>, -<c> constants may be reversed. */
4457 if (GET_CODE (XEXP (x, 0)) == XOR
4458 && GET_CODE (XEXP (x, 1)) == CONST_INT
4459 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4460 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4461 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4462 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4463 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4464 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4465 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4466 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4467 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4468 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4469 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4470 == (unsigned int) i + 1))))
4471 return simplify_shift_const
4472 (NULL_RTX, ASHIFTRT, mode,
4473 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4474 XEXP (XEXP (XEXP (x, 0), 0), 0),
4475 GET_MODE_BITSIZE (mode) - (i + 1)),
4476 GET_MODE_BITSIZE (mode) - (i + 1));
4478 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4479 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4480 is 1. This produces better code than the alternative immediately
4482 if (COMPARISON_P (XEXP (x, 0))
4483 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4484 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4485 && (reversed = reversed_comparison (XEXP (x, 0), mode)))
4487 simplify_gen_unary (NEG, mode, reversed, mode);
4489 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4490 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4491 the bitsize of the mode - 1. This allows simplification of
4492 "a = (b & 8) == 0;" */
4493 if (XEXP (x, 1) == constm1_rtx
4494 && !REG_P (XEXP (x, 0))
4495 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4496 && REG_P (SUBREG_REG (XEXP (x, 0))))
4497 && nonzero_bits (XEXP (x, 0), mode) == 1)
4498 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4499 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4500 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4501 GET_MODE_BITSIZE (mode) - 1),
4502 GET_MODE_BITSIZE (mode) - 1);
4504 /* If we are adding two things that have no bits in common, convert
4505 the addition into an IOR. This will often be further simplified,
4506 for example in cases like ((a & 1) + (a & 2)), which can
4509 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4510 && (nonzero_bits (XEXP (x, 0), mode)
4511 & nonzero_bits (XEXP (x, 1), mode)) == 0)
4513 /* Try to simplify the expression further. */
4514 rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4515 temp = combine_simplify_rtx (tor, mode, in_dest);
4517 /* If we could, great. If not, do not go ahead with the IOR
4518 replacement, since PLUS appears in many special purpose
4519 address arithmetic instructions. */
4520 if (GET_CODE (temp) != CLOBBER && temp != tor)
4526 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4527 by reversing the comparison code if valid. */
4528 if (STORE_FLAG_VALUE == 1
4529 && XEXP (x, 0) == const1_rtx
4530 && COMPARISON_P (XEXP (x, 1))
4531 && (reversed = reversed_comparison (XEXP (x, 1), mode)))
4534 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4535 (and <foo> (const_int pow2-1)) */
4536 if (GET_CODE (XEXP (x, 1)) == AND
4537 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4538 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4539 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4540 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4541 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4543 /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).
4545 if (GET_CODE (XEXP (x, 1)) == MULT
4546 && GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG)
4550 in1 = XEXP (XEXP (XEXP (x, 1), 0), 0);
4551 in2 = XEXP (XEXP (x, 1), 1);
4552 return simplify_gen_binary (PLUS, mode,
4553 simplify_gen_binary (MULT, mode,
4558 /* Canonicalize (minus (neg A) (mult B C)) to
4559 (minus (mult (neg B) C) A). */
4560 if (GET_CODE (XEXP (x, 1)) == MULT
4561 && GET_CODE (XEXP (x, 0)) == NEG)
4565 in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode);
4566 in2 = XEXP (XEXP (x, 1), 1);
4567 return simplify_gen_binary (MINUS, mode,
4568 simplify_gen_binary (MULT, mode,
4570 XEXP (XEXP (x, 0), 0));
4573 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4575 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4576 return simplify_gen_binary (MINUS, mode,
4577 simplify_gen_binary (MINUS, mode,
4579 XEXP (XEXP (x, 1), 0)),
4580 XEXP (XEXP (x, 1), 1));
4584 /* If we have (mult (plus A B) C), apply the distributive law and then
4585 the inverse distributive law to see if things simplify. This
4586 occurs mostly in addresses, often when unrolling loops. */
4588 if (GET_CODE (XEXP (x, 0)) == PLUS)
4590 rtx result = distribute_and_simplify_rtx (x, 0);
4595 /* Try simplify a*(b/c) as (a*b)/c. */
4596 if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4597 && GET_CODE (XEXP (x, 0)) == DIV)
4599 rtx tem = simplify_binary_operation (MULT, mode,
4600 XEXP (XEXP (x, 0), 0),
4603 return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4608 /* If this is a divide by a power of two, treat it as a shift if
4609 its first operand is a shift. */
4610 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4611 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4612 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4613 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4614 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4615 || GET_CODE (XEXP (x, 0)) == ROTATE
4616 || GET_CODE (XEXP (x, 0)) == ROTATERT))
4617 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4621 case GT: case GTU: case GE: case GEU:
4622 case LT: case LTU: case LE: case LEU:
4623 case UNEQ: case LTGT:
4624 case UNGT: case UNGE:
4625 case UNLT: case UNLE:
4626 case UNORDERED: case ORDERED:
4627 /* If the first operand is a condition code, we can't do anything
4629 if (GET_CODE (XEXP (x, 0)) == COMPARE
4630 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4631 && ! CC0_P (XEXP (x, 0))))
4633 rtx op0 = XEXP (x, 0);
4634 rtx op1 = XEXP (x, 1);
4635 enum rtx_code new_code;
4637 if (GET_CODE (op0) == COMPARE)
4638 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4640 /* Simplify our comparison, if possible. */
4641 new_code = simplify_comparison (code, &op0, &op1);
4643 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4644 if only the low-order bit is possibly nonzero in X (such as when
4645 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4646 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4647 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4650 Remove any ZERO_EXTRACT we made when thinking this was a
4651 comparison. It may now be simpler to use, e.g., an AND. If a
4652 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4653 the call to make_compound_operation in the SET case. */
4655 if (STORE_FLAG_VALUE == 1
4656 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4657 && op1 == const0_rtx
4658 && mode == GET_MODE (op0)
4659 && nonzero_bits (op0, mode) == 1)
4660 return gen_lowpart (mode,
4661 expand_compound_operation (op0));
4663 else if (STORE_FLAG_VALUE == 1
4664 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4665 && op1 == const0_rtx
4666 && mode == GET_MODE (op0)
4667 && (num_sign_bit_copies (op0, mode)
4668 == GET_MODE_BITSIZE (mode)))
4670 op0 = expand_compound_operation (op0);
4671 return simplify_gen_unary (NEG, mode,
4672 gen_lowpart (mode, op0),
4676 else if (STORE_FLAG_VALUE == 1
4677 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4678 && op1 == const0_rtx
4679 && mode == GET_MODE (op0)
4680 && nonzero_bits (op0, mode) == 1)
4682 op0 = expand_compound_operation (op0);
4683 return simplify_gen_binary (XOR, mode,
4684 gen_lowpart (mode, op0),
4688 else if (STORE_FLAG_VALUE == 1
4689 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4690 && op1 == const0_rtx
4691 && mode == GET_MODE (op0)
4692 && (num_sign_bit_copies (op0, mode)
4693 == GET_MODE_BITSIZE (mode)))
4695 op0 = expand_compound_operation (op0);
4696 return plus_constant (gen_lowpart (mode, op0), 1);
4699 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4701 if (STORE_FLAG_VALUE == -1
4702 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4703 && op1 == const0_rtx
4704 && (num_sign_bit_copies (op0, mode)
4705 == GET_MODE_BITSIZE (mode)))
4706 return gen_lowpart (mode,
4707 expand_compound_operation (op0));
4709 else if (STORE_FLAG_VALUE == -1
4710 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4711 && op1 == const0_rtx
4712 && mode == GET_MODE (op0)
4713 && nonzero_bits (op0, mode) == 1)
4715 op0 = expand_compound_operation (op0);
4716 return simplify_gen_unary (NEG, mode,
4717 gen_lowpart (mode, op0),
4721 else if (STORE_FLAG_VALUE == -1
4722 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4723 && op1 == const0_rtx
4724 && mode == GET_MODE (op0)
4725 && (num_sign_bit_copies (op0, mode)
4726 == GET_MODE_BITSIZE (mode)))
4728 op0 = expand_compound_operation (op0);
4729 return simplify_gen_unary (NOT, mode,
4730 gen_lowpart (mode, op0),
4734 /* If X is 0/1, (eq X 0) is X-1. */
4735 else if (STORE_FLAG_VALUE == -1
4736 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4737 && op1 == const0_rtx
4738 && mode == GET_MODE (op0)
4739 && nonzero_bits (op0, mode) == 1)
4741 op0 = expand_compound_operation (op0);
4742 return plus_constant (gen_lowpart (mode, op0), -1);
4745 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4746 one bit that might be nonzero, we can convert (ne x 0) to
4747 (ashift x c) where C puts the bit in the sign bit. Remove any
4748 AND with STORE_FLAG_VALUE when we are done, since we are only
4749 going to test the sign bit. */
4750 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4751 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4752 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4753 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4754 && op1 == const0_rtx
4755 && mode == GET_MODE (op0)
4756 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4758 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4759 expand_compound_operation (op0),
4760 GET_MODE_BITSIZE (mode) - 1 - i);
4761 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4767 /* If the code changed, return a whole new comparison. */
4768 if (new_code != code)
4769 return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4771 /* Otherwise, keep this operation, but maybe change its operands.
4772 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4773 SUBST (XEXP (x, 0), op0);
4774 SUBST (XEXP (x, 1), op1);
4779 return simplify_if_then_else (x);
4785 /* If we are processing SET_DEST, we are done. */
4789 return expand_compound_operation (x);
4792 return simplify_set (x);
4797 return simplify_logical (x);
4800 /* (abs (neg <foo>)) -> (abs <foo>) */
4801 if (GET_CODE (XEXP (x, 0)) == NEG)
4802 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4804 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4806 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4809 /* If operand is something known to be positive, ignore the ABS. */
4810 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4811 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4812 <= HOST_BITS_PER_WIDE_INT)
4813 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4814 & ((HOST_WIDE_INT) 1
4815 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4819 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4820 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4821 return gen_rtx_NEG (mode, XEXP (x, 0));
4826 /* (ffs (*_extend <X>)) = (ffs <X>) */
4827 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4828 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4829 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4834 /* (pop* (zero_extend <X>)) = (pop* <X>) */
4835 if (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4836 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4840 /* (float (sign_extend <X>)) = (float <X>). */
4841 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4842 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4850 /* If this is a shift by a constant amount, simplify it. */
4851 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4852 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4853 INTVAL (XEXP (x, 1)));
4855 else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
4857 force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4859 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4866 rtx op0 = XEXP (x, 0);
4867 rtx op1 = XEXP (x, 1);
4870 gcc_assert (GET_CODE (op1) == PARALLEL);
4871 len = XVECLEN (op1, 0);
4873 && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4874 && GET_CODE (op0) == VEC_CONCAT)
4876 int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4878 /* Try to find the element in the VEC_CONCAT. */
4881 if (GET_MODE (op0) == GET_MODE (x))
4883 if (GET_CODE (op0) == VEC_CONCAT)
4885 HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4886 if (offset < op0_size)
4887 op0 = XEXP (op0, 0);
4891 op0 = XEXP (op0, 1);
4909 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
4912 simplify_if_then_else (rtx x)
4914 enum machine_mode mode = GET_MODE (x);
4915 rtx cond = XEXP (x, 0);
4916 rtx true_rtx = XEXP (x, 1);
4917 rtx false_rtx = XEXP (x, 2);
4918 enum rtx_code true_code = GET_CODE (cond);
4919 int comparison_p = COMPARISON_P (cond);
4922 enum rtx_code false_code;
4925 /* Simplify storing of the truth value. */
4926 if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4927 return simplify_gen_relational (true_code, mode, VOIDmode,
4928 XEXP (cond, 0), XEXP (cond, 1));
4930 /* Also when the truth value has to be reversed. */
4932 && true_rtx == const0_rtx && false_rtx == const_true_rtx
4933 && (reversed = reversed_comparison (cond, mode)))
4936 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4937 in it is being compared against certain values. Get the true and false
4938 comparisons and see if that says anything about the value of each arm. */
4941 && ((false_code = reversed_comparison_code (cond, NULL))
4943 && REG_P (XEXP (cond, 0)))
4946 rtx from = XEXP (cond, 0);
4947 rtx true_val = XEXP (cond, 1);
4948 rtx false_val = true_val;
4951 /* If FALSE_CODE is EQ, swap the codes and arms. */
4953 if (false_code == EQ)
4955 swapped = 1, true_code = EQ, false_code = NE;
4956 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4959 /* If we are comparing against zero and the expression being tested has
4960 only a single bit that might be nonzero, that is its value when it is
4961 not equal to zero. Similarly if it is known to be -1 or 0. */
4963 if (true_code == EQ && true_val == const0_rtx
4964 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4965 false_code = EQ, false_val = GEN_INT (nzb);
4966 else if (true_code == EQ && true_val == const0_rtx
4967 && (num_sign_bit_copies (from, GET_MODE (from))
4968 == GET_MODE_BITSIZE (GET_MODE (from))))
4969 false_code = EQ, false_val = constm1_rtx;
4971 /* Now simplify an arm if we know the value of the register in the
4972 branch and it is used in the arm. Be careful due to the potential
4973 of locally-shared RTL. */
4975 if (reg_mentioned_p (from, true_rtx))
4976 true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4978 pc_rtx, pc_rtx, 0, 0);
4979 if (reg_mentioned_p (from, false_rtx))
4980 false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4982 pc_rtx, pc_rtx, 0, 0);
4984 SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4985 SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4987 true_rtx = XEXP (x, 1);
4988 false_rtx = XEXP (x, 2);
4989 true_code = GET_CODE (cond);
4992 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4993 reversed, do so to avoid needing two sets of patterns for
4994 subtract-and-branch insns. Similarly if we have a constant in the true
4995 arm, the false arm is the same as the first operand of the comparison, or
4996 the false arm is more complicated than the true arm. */
4999 && reversed_comparison_code (cond, NULL) != UNKNOWN
5000 && (true_rtx == pc_rtx
5001 || (CONSTANT_P (true_rtx)
5002 && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
5003 || true_rtx == const0_rtx
5004 || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
5005 || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
5006 && !OBJECT_P (false_rtx))
5007 || reg_mentioned_p (true_rtx, false_rtx)
5008 || rtx_equal_p (false_rtx, XEXP (cond, 0))))
5010 true_code = reversed_comparison_code (cond, NULL);
5011 SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
5012 SUBST (XEXP (x, 1), false_rtx);
5013 SUBST (XEXP (x, 2), true_rtx);
5015 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5018 /* It is possible that the conditional has been simplified out. */
5019 true_code = GET_CODE (cond);
5020 comparison_p = COMPARISON_P (cond);
5023 /* If the two arms are identical, we don't need the comparison. */
5025 if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
5028 /* Convert a == b ? b : a to "a". */
5029 if (true_code == EQ && ! side_effects_p (cond)
5030 && !HONOR_NANS (mode)
5031 && rtx_equal_p (XEXP (cond, 0), false_rtx)
5032 && rtx_equal_p (XEXP (cond, 1), true_rtx))
5034 else if (true_code == NE && ! side_effects_p (cond)
5035 && !HONOR_NANS (mode)
5036 && rtx_equal_p (XEXP (cond, 0), true_rtx)
5037 && rtx_equal_p (XEXP (cond, 1), false_rtx))
5040 /* Look for cases where we have (abs x) or (neg (abs X)). */
5042 if (GET_MODE_CLASS (mode) == MODE_INT
5043 && GET_CODE (false_rtx) == NEG
5044 && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
5046 && rtx_equal_p (true_rtx, XEXP (cond, 0))
5047 && ! side_effects_p (true_rtx))
5052 return simplify_gen_unary (ABS, mode, true_rtx, mode);
5056 simplify_gen_unary (NEG, mode,
5057 simplify_gen_unary (ABS, mode, true_rtx, mode),
5063 /* Look for MIN or MAX. */
5065 if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
5067 && rtx_equal_p (XEXP (cond, 0), true_rtx)
5068 && rtx_equal_p (XEXP (cond, 1), false_rtx)
5069 && ! side_effects_p (cond))
5074 return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
5077 return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
5080 return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
5083 return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
5088 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5089 second operand is zero, this can be done as (OP Z (mult COND C2)) where
5090 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5091 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5092 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5093 neither 1 or -1, but it isn't worth checking for. */
5095 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
5097 && GET_MODE_CLASS (mode) == MODE_INT
5098 && ! side_effects_p (x))
5100 rtx t = make_compound_operation (true_rtx, SET);
5101 rtx f = make_compound_operation (false_rtx, SET);
5102 rtx cond_op0 = XEXP (cond, 0);
5103 rtx cond_op1 = XEXP (cond, 1);
5104 enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
5105 enum machine_mode m = mode;
5106 rtx z = 0, c1 = NULL_RTX;
5108 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
5109 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
5110 || GET_CODE (t) == ASHIFT
5111 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
5112 && rtx_equal_p (XEXP (t, 0), f))
5113 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
5115 /* If an identity-zero op is commutative, check whether there
5116 would be a match if we swapped the operands. */
5117 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
5118 || GET_CODE (t) == XOR)
5119 && rtx_equal_p (XEXP (t, 1), f))
5120 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
5121 else if (GET_CODE (t) == SIGN_EXTEND
5122 && (GET_CODE (XEXP (t, 0)) == PLUS
5123 || GET_CODE (XEXP (t, 0)) == MINUS
5124 || GET_CODE (XEXP (t, 0)) == IOR
5125 || GET_CODE (XEXP (t, 0)) == XOR
5126 || GET_CODE (XEXP (t, 0)) == ASHIFT
5127 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5128 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5129 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5130 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5131 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5132 && (num_sign_bit_copies (f, GET_MODE (f))
5134 (GET_MODE_BITSIZE (mode)
5135 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5137 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5138 extend_op = SIGN_EXTEND;
5139 m = GET_MODE (XEXP (t, 0));
5141 else if (GET_CODE (t) == SIGN_EXTEND
5142 && (GET_CODE (XEXP (t, 0)) == PLUS
5143 || GET_CODE (XEXP (t, 0)) == IOR
5144 || GET_CODE (XEXP (t, 0)) == XOR)
5145 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5146 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5147 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5148 && (num_sign_bit_copies (f, GET_MODE (f))
5150 (GET_MODE_BITSIZE (mode)
5151 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5153 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5154 extend_op = SIGN_EXTEND;
5155 m = GET_MODE (XEXP (t, 0));
5157 else if (GET_CODE (t) == ZERO_EXTEND
5158 && (GET_CODE (XEXP (t, 0)) == PLUS
5159 || GET_CODE (XEXP (t, 0)) == MINUS
5160 || GET_CODE (XEXP (t, 0)) == IOR
5161 || GET_CODE (XEXP (t, 0)) == XOR
5162 || GET_CODE (XEXP (t, 0)) == ASHIFT
5163 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5164 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5165 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5166 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5167 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5168 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5169 && ((nonzero_bits (f, GET_MODE (f))
5170 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5173 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5174 extend_op = ZERO_EXTEND;
5175 m = GET_MODE (XEXP (t, 0));
5177 else if (GET_CODE (t) == ZERO_EXTEND
5178 && (GET_CODE (XEXP (t, 0)) == PLUS
5179 || GET_CODE (XEXP (t, 0)) == IOR
5180 || GET_CODE (XEXP (t, 0)) == XOR)
5181 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5182 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5183 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5184 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5185 && ((nonzero_bits (f, GET_MODE (f))
5186 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5189 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5190 extend_op = ZERO_EXTEND;
5191 m = GET_MODE (XEXP (t, 0));
5196 temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
5197 cond_op0, cond_op1),
5198 pc_rtx, pc_rtx, 0, 0);
5199 temp = simplify_gen_binary (MULT, m, temp,
5200 simplify_gen_binary (MULT, m, c1,
5202 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5203 temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
5205 if (extend_op != UNKNOWN)
5206 temp = simplify_gen_unary (extend_op, mode, temp, m);
5212 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5213 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5214 negation of a single bit, we can convert this operation to a shift. We
5215 can actually do this more generally, but it doesn't seem worth it. */
5217 if (true_code == NE && XEXP (cond, 1) == const0_rtx
5218 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5219 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5220 && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5221 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5222 == GET_MODE_BITSIZE (mode))
5223 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5225 simplify_shift_const (NULL_RTX, ASHIFT, mode,
5226 gen_lowpart (mode, XEXP (cond, 0)), i);
5228 /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */
5229 if (true_code == NE && XEXP (cond, 1) == const0_rtx
5230 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5231 && GET_MODE (XEXP (cond, 0)) == mode
5232 && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
5233 == nonzero_bits (XEXP (cond, 0), mode)
5234 && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
5235 return XEXP (cond, 0);
5240 /* Simplify X, a SET expression. Return the new expression. */
5243 simplify_set (rtx x)
5245 rtx src = SET_SRC (x);
5246 rtx dest = SET_DEST (x);
5247 enum machine_mode mode
5248 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5252 /* (set (pc) (return)) gets written as (return). */
5253 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5256 /* Now that we know for sure which bits of SRC we are using, see if we can
5257 simplify the expression for the object knowing that we only need the
5260 if (GET_MODE_CLASS (mode) == MODE_INT
5261 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5263 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
5264 SUBST (SET_SRC (x), src);
5267 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5268 the comparison result and try to simplify it unless we already have used
5269 undobuf.other_insn. */
5270 if ((GET_MODE_CLASS (mode) == MODE_CC
5271 || GET_CODE (src) == COMPARE
5273 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5274 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5275 && COMPARISON_P (*cc_use)
5276 && rtx_equal_p (XEXP (*cc_use, 0), dest))
5278 enum rtx_code old_code = GET_CODE (*cc_use);
5279 enum rtx_code new_code;
5281 int other_changed = 0;
5282 enum machine_mode compare_mode = GET_MODE (dest);
5284 if (GET_CODE (src) == COMPARE)
5285 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5287 op0 = src, op1 = CONST0_RTX (GET_MODE (src));
5289 tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
5292 new_code = old_code;
5293 else if (!CONSTANT_P (tmp))
5295 new_code = GET_CODE (tmp);
5296 op0 = XEXP (tmp, 0);
5297 op1 = XEXP (tmp, 1);
5301 rtx pat = PATTERN (other_insn);
5302 undobuf.other_insn = other_insn;
5303 SUBST (*cc_use, tmp);
5305 /* Attempt to simplify CC user. */
5306 if (GET_CODE (pat) == SET)
5308 rtx new = simplify_rtx (SET_SRC (pat));
5309 if (new != NULL_RTX)
5310 SUBST (SET_SRC (pat), new);
5313 /* Convert X into a no-op move. */
5314 SUBST (SET_DEST (x), pc_rtx);
5315 SUBST (SET_SRC (x), pc_rtx);
5319 /* Simplify our comparison, if possible. */
5320 new_code = simplify_comparison (new_code, &op0, &op1);
5322 #ifdef SELECT_CC_MODE
5323 /* If this machine has CC modes other than CCmode, check to see if we
5324 need to use a different CC mode here. */
5325 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5326 compare_mode = GET_MODE (op0);
5328 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5331 /* If the mode changed, we have to change SET_DEST, the mode in the
5332 compare, and the mode in the place SET_DEST is used. If SET_DEST is
5333 a hard register, just build new versions with the proper mode. If it
5334 is a pseudo, we lose unless it is only time we set the pseudo, in
5335 which case we can safely change its mode. */
5336 if (compare_mode != GET_MODE (dest))
5338 if (can_change_dest_mode (dest, 0, compare_mode))
5340 unsigned int regno = REGNO (dest);
5341 rtx new_dest = gen_rtx_REG (compare_mode, regno);
5343 if (regno >= FIRST_PSEUDO_REGISTER)
5344 SUBST (regno_reg_rtx[regno], new_dest);
5346 SUBST (SET_DEST (x), new_dest);
5347 SUBST (XEXP (*cc_use, 0), new_dest);
5354 #endif /* SELECT_CC_MODE */
5356 /* If the code changed, we have to build a new comparison in
5357 undobuf.other_insn. */
5358 if (new_code != old_code)
5360 int other_changed_previously = other_changed;
5361 unsigned HOST_WIDE_INT mask;
5363 SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5367 /* If the only change we made was to change an EQ into an NE or
5368 vice versa, OP0 has only one bit that might be nonzero, and OP1
5369 is zero, check if changing the user of the condition code will
5370 produce a valid insn. If it won't, we can keep the original code
5371 in that insn by surrounding our operation with an XOR. */
5373 if (((old_code == NE && new_code == EQ)
5374 || (old_code == EQ && new_code == NE))
5375 && ! other_changed_previously && op1 == const0_rtx
5376 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5377 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5379 rtx pat = PATTERN (other_insn), note = 0;
5381 if ((recog_for_combine (&pat, other_insn, ¬e) < 0
5382 && ! check_asm_operands (pat)))
5384 PUT_CODE (*cc_use, old_code);
5387 op0 = simplify_gen_binary (XOR, GET_MODE (op0),
5388 op0, GEN_INT (mask));
5394 undobuf.other_insn = other_insn;
5397 /* If we are now comparing against zero, change our source if
5398 needed. If we do not use cc0, we always have a COMPARE. */
5399 if (op1 == const0_rtx && dest == cc0_rtx)
5401 SUBST (SET_SRC (x), op0);
5407 /* Otherwise, if we didn't previously have a COMPARE in the
5408 correct mode, we need one. */
5409 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5411 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5414 else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
5416 SUBST(SET_SRC (x), op0);
5421 /* Otherwise, update the COMPARE if needed. */
5422 SUBST (XEXP (src, 0), op0);
5423 SUBST (XEXP (src, 1), op1);
5428 /* Get SET_SRC in a form where we have placed back any
5429 compound expressions. Then do the checks below. */
5430 src = make_compound_operation (src, SET);
5431 SUBST (SET_SRC (x), src);
5434 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5435 and X being a REG or (subreg (reg)), we may be able to convert this to
5436 (set (subreg:m2 x) (op)).
5438 We can always do this if M1 is narrower than M2 because that means that
5439 we only care about the low bits of the result.
5441 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5442 perform a narrower operation than requested since the high-order bits will
5443 be undefined. On machine where it is defined, this transformation is safe
5444 as long as M1 and M2 have the same number of words. */
5446 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5447 && !OBJECT_P (SUBREG_REG (src))
5448 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5450 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5451 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5452 #ifndef WORD_REGISTER_OPERATIONS
5453 && (GET_MODE_SIZE (GET_MODE (src))
5454 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5456 #ifdef CANNOT_CHANGE_MODE_CLASS
5457 && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
5458 && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
5459 GET_MODE (SUBREG_REG (src)),
5463 || (GET_CODE (dest) == SUBREG
5464 && REG_P (SUBREG_REG (dest)))))
5466 SUBST (SET_DEST (x),
5467 gen_lowpart (GET_MODE (SUBREG_REG (src)),
5469 SUBST (SET_SRC (x), SUBREG_REG (src));
5471 src = SET_SRC (x), dest = SET_DEST (x);
5475 /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5478 && GET_CODE (src) == SUBREG
5479 && subreg_lowpart_p (src)
5480 && (GET_MODE_BITSIZE (GET_MODE (src))
5481 < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5483 rtx inner = SUBREG_REG (src);
5484 enum machine_mode inner_mode = GET_MODE (inner);
5486 /* Here we make sure that we don't have a sign bit on. */
5487 if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5488 && (nonzero_bits (inner, inner_mode)
5489 < ((unsigned HOST_WIDE_INT) 1
5490 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
5492 SUBST (SET_SRC (x), inner);
5498 #ifdef LOAD_EXTEND_OP
5499 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5500 would require a paradoxical subreg. Replace the subreg with a
5501 zero_extend to avoid the reload that would otherwise be required. */
5503 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5504 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
5505 && SUBREG_BYTE (src) == 0
5506 && (GET_MODE_SIZE (GET_MODE (src))
5507 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5508 && MEM_P (SUBREG_REG (src)))
5511 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5512 GET_MODE (src), SUBREG_REG (src)));
5518 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5519 are comparing an item known to be 0 or -1 against 0, use a logical
5520 operation instead. Check for one of the arms being an IOR of the other
5521 arm with some value. We compute three terms to be IOR'ed together. In
5522 practice, at most two will be nonzero. Then we do the IOR's. */
5524 if (GET_CODE (dest) != PC
5525 && GET_CODE (src) == IF_THEN_ELSE
5526 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5527 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5528 && XEXP (XEXP (src, 0), 1) == const0_rtx
5529 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5530 #ifdef HAVE_conditional_move
5531 && ! can_conditionally_move_p (GET_MODE (src))
5533 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5534 GET_MODE (XEXP (XEXP (src, 0), 0)))
5535 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5536 && ! side_effects_p (src))
5538 rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5539 ? XEXP (src, 1) : XEXP (src, 2));
5540 rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5541 ? XEXP (src, 2) : XEXP (src, 1));
5542 rtx term1 = const0_rtx, term2, term3;
5544 if (GET_CODE (true_rtx) == IOR
5545 && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5546 term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
5547 else if (GET_CODE (true_rtx) == IOR
5548 && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5549 term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
5550 else if (GET_CODE (false_rtx) == IOR
5551 && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5552 term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
5553 else if (GET_CODE (false_rtx) == IOR
5554 && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5555 term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
5557 term2 = simplify_gen_binary (AND, GET_MODE (src),
5558 XEXP (XEXP (src, 0), 0), true_rtx);
5559 term3 = simplify_gen_binary (AND, GET_MODE (src),
5560 simplify_gen_unary (NOT, GET_MODE (src),
5561 XEXP (XEXP (src, 0), 0),
5566 simplify_gen_binary (IOR, GET_MODE (src),
5567 simplify_gen_binary (IOR, GET_MODE (src),
5574 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5575 whole thing fail. */
5576 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5578 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5581 /* Convert this into a field assignment operation, if possible. */
5582 return make_field_assignment (x);
5585 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5589 simplify_logical (rtx x)
5591 enum machine_mode mode = GET_MODE (x);
5592 rtx op0 = XEXP (x, 0);
5593 rtx op1 = XEXP (x, 1);
5596 switch (GET_CODE (x))
5599 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5600 insn (and may simplify more). */
5601 if (GET_CODE (op0) == XOR
5602 && rtx_equal_p (XEXP (op0, 0), op1)
5603 && ! side_effects_p (op1))
5604 x = simplify_gen_binary (AND, mode,
5605 simplify_gen_unary (NOT, mode,
5606 XEXP (op0, 1), mode),
5609 if (GET_CODE (op0) == XOR
5610 && rtx_equal_p (XEXP (op0, 1), op1)
5611 && ! side_effects_p (op1))
5612 x = simplify_gen_binary (AND, mode,
5613 simplify_gen_unary (NOT, mode,
5614 XEXP (op0, 0), mode),
5617 /* Similarly for (~(A ^ B)) & A. */
5618 if (GET_CODE (op0) == NOT
5619 && GET_CODE (XEXP (op0, 0)) == XOR
5620 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5621 && ! side_effects_p (op1))
5622 x = simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5624 if (GET_CODE (op0) == NOT
5625 && GET_CODE (XEXP (op0, 0)) == XOR
5626 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5627 && ! side_effects_p (op1))
5628 x = simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5630 /* We can call simplify_and_const_int only if we don't lose
5631 any (sign) bits when converting INTVAL (op1) to
5632 "unsigned HOST_WIDE_INT". */
5633 if (GET_CODE (op1) == CONST_INT
5634 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5635 || INTVAL (op1) > 0))
5637 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5639 /* If we have (ior (and (X C1) C2)) and the next restart would be
5640 the last, simplify this by making C1 as small as possible
5641 and then exit. Only do this if C1 actually changes: for now
5642 this only saves memory but, should this transformation be
5643 moved to simplify-rtx.c, we'd risk unbounded recursion there. */
5644 if (GET_CODE (x) == IOR && GET_CODE (op0) == AND
5645 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5646 && GET_CODE (op1) == CONST_INT
5647 && (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0)
5648 return simplify_gen_binary (IOR, mode,
5650 (AND, mode, XEXP (op0, 0),
5651 GEN_INT (INTVAL (XEXP (op0, 1))
5652 & ~INTVAL (op1))), op1);
5654 if (GET_CODE (x) != AND)
5661 /* Convert (A | B) & A to A. */
5662 if (GET_CODE (op0) == IOR
5663 && (rtx_equal_p (XEXP (op0, 0), op1)
5664 || rtx_equal_p (XEXP (op0, 1), op1))
5665 && ! side_effects_p (XEXP (op0, 0))
5666 && ! side_effects_p (XEXP (op0, 1)))
5669 /* If we have any of (and (ior A B) C) or (and (xor A B) C),
5670 apply the distributive law and then the inverse distributive
5671 law to see if things simplify. */
5672 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5674 rtx result = distribute_and_simplify_rtx (x, 0);
5678 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5680 rtx result = distribute_and_simplify_rtx (x, 1);
5687 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
5688 if (GET_CODE (op1) == CONST_INT
5689 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5690 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5693 /* Convert (A & B) | A to A. */
5694 if (GET_CODE (op0) == AND
5695 && (rtx_equal_p (XEXP (op0, 0), op1)
5696 || rtx_equal_p (XEXP (op0, 1), op1))
5697 && ! side_effects_p (XEXP (op0, 0))
5698 && ! side_effects_p (XEXP (op0, 1)))
5701 /* If we have (ior (and A B) C), apply the distributive law and then
5702 the inverse distributive law to see if things simplify. */
5704 if (GET_CODE (op0) == AND)
5706 rtx result = distribute_and_simplify_rtx (x, 0);
5711 if (GET_CODE (op1) == AND)
5713 rtx result = distribute_and_simplify_rtx (x, 1);
5718 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5719 mode size to (rotate A CX). */
5721 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5722 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5723 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5724 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5725 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5726 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5727 == GET_MODE_BITSIZE (mode)))
5728 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5729 (GET_CODE (op0) == ASHIFT
5730 ? XEXP (op0, 1) : XEXP (op1, 1)));
5732 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5733 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5734 does not affect any of the bits in OP1, it can really be done
5735 as a PLUS and we can associate. We do this by seeing if OP1
5736 can be safely shifted left C bits. */
5737 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5738 && GET_CODE (XEXP (op0, 0)) == PLUS
5739 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5740 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5741 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5743 int count = INTVAL (XEXP (op0, 1));
5744 HOST_WIDE_INT mask = INTVAL (op1) << count;
5746 if (mask >> count == INTVAL (op1)
5747 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5749 SUBST (XEXP (XEXP (op0, 0), 1),
5750 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5757 /* If we are XORing two things that have no bits in common,
5758 convert them into an IOR. This helps to detect rotation encoded
5759 using those methods and possibly other simplifications. */
5761 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5762 && (nonzero_bits (op0, mode)
5763 & nonzero_bits (op1, mode)) == 0)
5764 return (simplify_gen_binary (IOR, mode, op0, op1));
5766 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5767 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5770 int num_negated = 0;
5772 if (GET_CODE (op0) == NOT)
5773 num_negated++, op0 = XEXP (op0, 0);
5774 if (GET_CODE (op1) == NOT)
5775 num_negated++, op1 = XEXP (op1, 0);
5777 if (num_negated == 2)
5779 SUBST (XEXP (x, 0), op0);
5780 SUBST (XEXP (x, 1), op1);
5782 else if (num_negated == 1)
5784 simplify_gen_unary (NOT, mode,
5785 simplify_gen_binary (XOR, mode, op0, op1),
5789 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5790 correspond to a machine insn or result in further simplifications
5791 if B is a constant. */
5793 if (GET_CODE (op0) == AND
5794 && rtx_equal_p (XEXP (op0, 1), op1)
5795 && ! side_effects_p (op1))
5796 return simplify_gen_binary (AND, mode,
5797 simplify_gen_unary (NOT, mode,
5798 XEXP (op0, 0), mode),
5801 else if (GET_CODE (op0) == AND
5802 && rtx_equal_p (XEXP (op0, 0), op1)
5803 && ! side_effects_p (op1))
5804 return simplify_gen_binary (AND, mode,
5805 simplify_gen_unary (NOT, mode,
5806 XEXP (op0, 1), mode),
5809 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5810 comparison if STORE_FLAG_VALUE is 1. */
5811 if (STORE_FLAG_VALUE == 1
5812 && op1 == const1_rtx
5813 && COMPARISON_P (op0)
5814 && (reversed = reversed_comparison (op0, mode)))
5817 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5818 is (lt foo (const_int 0)), so we can perform the above
5819 simplification if STORE_FLAG_VALUE is 1. */
5821 if (STORE_FLAG_VALUE == 1
5822 && op1 == const1_rtx
5823 && GET_CODE (op0) == LSHIFTRT
5824 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5825 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5826 return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5828 /* (xor (comparison foo bar) (const_int sign-bit))
5829 when STORE_FLAG_VALUE is the sign bit. */
5830 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5831 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5832 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5833 && op1 == const_true_rtx
5834 && COMPARISON_P (op0)
5835 && (reversed = reversed_comparison (op0, mode)))
5847 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5848 operations" because they can be replaced with two more basic operations.
5849 ZERO_EXTEND is also considered "compound" because it can be replaced with
5850 an AND operation, which is simpler, though only one operation.
5852 The function expand_compound_operation is called with an rtx expression
5853 and will convert it to the appropriate shifts and AND operations,
5854 simplifying at each stage.
5856 The function make_compound_operation is called to convert an expression
5857 consisting of shifts and ANDs into the equivalent compound expression.
5858 It is the inverse of this function, loosely speaking. */
5861 expand_compound_operation (rtx x)
5863 unsigned HOST_WIDE_INT pos = 0, len;
5865 unsigned int modewidth;
5868 switch (GET_CODE (x))
5873 /* We can't necessarily use a const_int for a multiword mode;
5874 it depends on implicitly extending the value.
5875 Since we don't know the right way to extend it,
5876 we can't tell whether the implicit way is right.
5878 Even for a mode that is no wider than a const_int,
5879 we can't win, because we need to sign extend one of its bits through
5880 the rest of it, and we don't know which bit. */
5881 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5884 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5885 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5886 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5887 reloaded. If not for that, MEM's would very rarely be safe.
5889 Reject MODEs bigger than a word, because we might not be able
5890 to reference a two-register group starting with an arbitrary register
5891 (and currently gen_lowpart might crash for a SUBREG). */
5893 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5896 /* Reject MODEs that aren't scalar integers because turning vector
5897 or complex modes into shifts causes problems. */
5899 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5902 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5903 /* If the inner object has VOIDmode (the only way this can happen
5904 is if it is an ASM_OPERANDS), we can't do anything since we don't
5905 know how much masking to do. */
5914 /* ... fall through ... */
5917 /* If the operand is a CLOBBER, just return it. */
5918 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5921 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5922 || GET_CODE (XEXP (x, 2)) != CONST_INT
5923 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5926 /* Reject MODEs that aren't scalar integers because turning vector
5927 or complex modes into shifts causes problems. */
5929 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5932 len = INTVAL (XEXP (x, 1));
5933 pos = INTVAL (XEXP (x, 2));
5935 /* If this goes outside the object being extracted, replace the object
5936 with a (use (mem ...)) construct that only combine understands
5937 and is used only for this purpose. */
5938 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5939 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5941 if (BITS_BIG_ENDIAN)
5942 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5949 /* Convert sign extension to zero extension, if we know that the high
5950 bit is not set, as this is easier to optimize. It will be converted
5951 back to cheaper alternative in make_extraction. */
5952 if (GET_CODE (x) == SIGN_EXTEND
5953 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5954 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5955 & ~(((unsigned HOST_WIDE_INT)
5956 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5960 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5961 rtx temp2 = expand_compound_operation (temp);
5963 /* Make sure this is a profitable operation. */
5964 if (rtx_cost (x, SET) > rtx_cost (temp2, SET))
5966 else if (rtx_cost (x, SET) > rtx_cost (temp, SET))
5972 /* We can optimize some special cases of ZERO_EXTEND. */
5973 if (GET_CODE (x) == ZERO_EXTEND)
5975 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5976 know that the last value didn't have any inappropriate bits
5978 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5979 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5980 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5981 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5982 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5983 return XEXP (XEXP (x, 0), 0);
5985 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5986 if (GET_CODE (XEXP (x, 0)) == SUBREG
5987 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5988 && subreg_lowpart_p (XEXP (x, 0))
5989 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5990 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5991 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5992 return SUBREG_REG (XEXP (x, 0));
5994 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5995 is a comparison and STORE_FLAG_VALUE permits. This is like
5996 the first case, but it works even when GET_MODE (x) is larger
5997 than HOST_WIDE_INT. */
5998 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5999 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6000 && COMPARISON_P (XEXP (XEXP (x, 0), 0))
6001 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6002 <= HOST_BITS_PER_WIDE_INT)
6003 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6004 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6005 return XEXP (XEXP (x, 0), 0);
6007 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
6008 if (GET_CODE (XEXP (x, 0)) == SUBREG
6009 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6010 && subreg_lowpart_p (XEXP (x, 0))
6011 && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
6012 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6013 <= HOST_BITS_PER_WIDE_INT)
6014 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6015 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6016 return SUBREG_REG (XEXP (x, 0));
6020 /* If we reach here, we want to return a pair of shifts. The inner
6021 shift is a left shift of BITSIZE - POS - LEN bits. The outer
6022 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
6023 logical depending on the value of UNSIGNEDP.
6025 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
6026 converted into an AND of a shift.
6028 We must check for the case where the left shift would have a negative
6029 count. This can happen in a case like (x >> 31) & 255 on machines
6030 that can't shift by a constant. On those machines, we would first
6031 combine the shift with the AND to produce a variable-position
6032 extraction. Then the constant of 31 would be substituted in to produce
6033 a such a position. */
6035 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
6036 if (modewidth + len >= pos)
6037 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
6039 simplify_shift_const (NULL_RTX, ASHIFT,
6042 modewidth - pos - len),
6045 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
6046 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
6047 simplify_shift_const (NULL_RTX, LSHIFTRT,
6050 ((HOST_WIDE_INT) 1 << len) - 1);
6052 /* Any other cases we can't handle. */
6055 /* If we couldn't do this for some reason, return the original
6057 if (GET_CODE (tem) == CLOBBER)
6063 /* X is a SET which contains an assignment of one object into
6064 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
6065 or certain SUBREGS). If possible, convert it into a series of
6068 We half-heartedly support variable positions, but do not at all
6069 support variable lengths. */
6072 expand_field_assignment (rtx x)
6075 rtx pos; /* Always counts from low bit. */
6077 rtx mask, cleared, masked;
6078 enum machine_mode compute_mode;
6080 /* Loop until we find something we can't simplify. */
6083 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6084 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
6086 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
6087 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
6088 pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
6090 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
6091 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
6093 inner = XEXP (SET_DEST (x), 0);
6094 len = INTVAL (XEXP (SET_DEST (x), 1));
6095 pos = XEXP (SET_DEST (x), 2);
6097 /* If the position is constant and spans the width of INNER,
6098 surround INNER with a USE to indicate this. */
6099 if (GET_CODE (pos) == CONST_INT
6100 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
6101 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
6103 if (BITS_BIG_ENDIAN)
6105 if (GET_CODE (pos) == CONST_INT)
6106 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
6108 else if (GET_CODE (pos) == MINUS
6109 && GET_CODE (XEXP (pos, 1)) == CONST_INT
6110 && (INTVAL (XEXP (pos, 1))
6111 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
6112 /* If position is ADJUST - X, new position is X. */
6113 pos = XEXP (pos, 0);
6115 pos = simplify_gen_binary (MINUS, GET_MODE (pos),
6116 GEN_INT (GET_MODE_BITSIZE (
6123 /* A SUBREG between two modes that occupy the same numbers of words
6124 can be done by moving the SUBREG to the source. */
6125 else if (GET_CODE (SET_DEST (x)) == SUBREG
6126 /* We need SUBREGs to compute nonzero_bits properly. */
6127 && nonzero_sign_valid
6128 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
6129 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6130 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
6131 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
6133 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
6135 (GET_MODE (SUBREG_REG (SET_DEST (x))),
6142 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6143 inner = SUBREG_REG (inner);
6145 compute_mode = GET_MODE (inner);
6147 /* Don't attempt bitwise arithmetic on non scalar integer modes. */
6148 if (! SCALAR_INT_MODE_P (compute_mode))
6150 enum machine_mode imode;
6152 /* Don't do anything for vector or complex integral types. */
6153 if (! FLOAT_MODE_P (compute_mode))
6156 /* Try to find an integral mode to pun with. */
6157 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
6158 if (imode == BLKmode)
6161 compute_mode = imode;
6162 inner = gen_lowpart (imode, inner);
6165 /* Compute a mask of LEN bits, if we can do this on the host machine. */
6166 if (len >= HOST_BITS_PER_WIDE_INT)
6169 /* Now compute the equivalent expression. Make a copy of INNER
6170 for the SET_DEST in case it is a MEM into which we will substitute;
6171 we don't want shared RTL in that case. */
6172 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
6173 cleared = simplify_gen_binary (AND, compute_mode,
6174 simplify_gen_unary (NOT, compute_mode,
6175 simplify_gen_binary (ASHIFT,
6180 masked = simplify_gen_binary (ASHIFT, compute_mode,
6181 simplify_gen_binary (
6183 gen_lowpart (compute_mode, SET_SRC (x)),
6187 x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
6188 simplify_gen_binary (IOR, compute_mode,
6195 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
6196 it is an RTX that represents a variable starting position; otherwise,
6197 POS is the (constant) starting bit position (counted from the LSB).
6199 INNER may be a USE. This will occur when we started with a bitfield
6200 that went outside the boundary of the object in memory, which is
6201 allowed on most machines. To isolate this case, we produce a USE
6202 whose mode is wide enough and surround the MEM with it. The only
6203 code that understands the USE is this routine. If it is not removed,
6204 it will cause the resulting insn not to match.
6206 UNSIGNEDP is nonzero for an unsigned reference and zero for a
6209 IN_DEST is nonzero if this is a reference in the destination of a
6210 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero,
6211 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6214 IN_COMPARE is nonzero if we are in a COMPARE. This means that a
6215 ZERO_EXTRACT should be built even for bits starting at bit 0.
6217 MODE is the desired mode of the result (if IN_DEST == 0).
6219 The result is an RTX for the extraction or NULL_RTX if the target
6223 make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
6224 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
6225 int in_dest, int in_compare)
6227 /* This mode describes the size of the storage area
6228 to fetch the overall value from. Within that, we
6229 ignore the POS lowest bits, etc. */
6230 enum machine_mode is_mode = GET_MODE (inner);
6231 enum machine_mode inner_mode;
6232 enum machine_mode wanted_inner_mode = byte_mode;
6233 enum machine_mode wanted_inner_reg_mode = word_mode;
6234 enum machine_mode pos_mode = word_mode;
6235 enum machine_mode extraction_mode = word_mode;
6236 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
6239 rtx orig_pos_rtx = pos_rtx;
6240 HOST_WIDE_INT orig_pos;
6242 /* Get some information about INNER and get the innermost object. */
6243 if (GET_CODE (inner) == USE)
6244 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
6245 /* We don't need to adjust the position because we set up the USE
6246 to pretend that it was a full-word object. */
6247 spans_byte = 1, inner = XEXP (inner, 0);
6248 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6250 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6251 consider just the QI as the memory to extract from.
6252 The subreg adds or removes high bits; its mode is
6253 irrelevant to the meaning of this extraction,
6254 since POS and LEN count from the lsb. */
6255 if (MEM_P (SUBREG_REG (inner)))
6256 is_mode = GET_MODE (SUBREG_REG (inner));
6257 inner = SUBREG_REG (inner);
6259 else if (GET_CODE (inner) == ASHIFT
6260 && GET_CODE (XEXP (inner, 1)) == CONST_INT
6261 && pos_rtx == 0 && pos == 0
6262 && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
6264 /* We're extracting the least significant bits of an rtx
6265 (ashift X (const_int C)), where LEN > C. Extract the
6266 least significant (LEN - C) bits of X, giving an rtx
6267 whose mode is MODE, then shift it left C times. */
6268 new = make_extraction (mode, XEXP (inner, 0),
6269 0, 0, len - INTVAL (XEXP (inner, 1)),
6270 unsignedp, in_dest, in_compare);
6272 return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
6275 inner_mode = GET_MODE (inner);
6277 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6278 pos = INTVAL (pos_rtx), pos_rtx = 0;
6280 /* See if this can be done without an extraction. We never can if the
6281 width of the field is not the same as that of some integer mode. For
6282 registers, we can only avoid the extraction if the position is at the
6283 low-order bit and this is either not in the destination or we have the
6284 appropriate STRICT_LOW_PART operation available.
6286 For MEM, we can avoid an extract if the field starts on an appropriate
6287 boundary and we can change the mode of the memory reference. However,
6288 we cannot directly access the MEM if we have a USE and the underlying
6289 MEM is not TMODE. This combination means that MEM was being used in a
6290 context where bits outside its mode were being referenced; that is only
6291 valid in bit-field insns. */
6293 if (tmode != BLKmode
6294 && ! (spans_byte && inner_mode != tmode)
6295 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6299 && have_insn_for (STRICT_LOW_PART, tmode))))
6300 || (MEM_P (inner) && pos_rtx == 0
6302 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6303 : BITS_PER_UNIT)) == 0
6304 /* We can't do this if we are widening INNER_MODE (it
6305 may not be aligned, for one thing). */
6306 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6307 && (inner_mode == tmode
6308 || (! mode_dependent_address_p (XEXP (inner, 0))
6309 && ! MEM_VOLATILE_P (inner))))))
6311 /* If INNER is a MEM, make a new MEM that encompasses just the desired
6312 field. If the original and current mode are the same, we need not
6313 adjust the offset. Otherwise, we do if bytes big endian.
6315 If INNER is not a MEM, get a piece consisting of just the field
6316 of interest (in this case POS % BITS_PER_WORD must be 0). */
6320 HOST_WIDE_INT offset;
6322 /* POS counts from lsb, but make OFFSET count in memory order. */
6323 if (BYTES_BIG_ENDIAN)
6324 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6326 offset = pos / BITS_PER_UNIT;
6328 new = adjust_address_nv (inner, tmode, offset);
6330 else if (REG_P (inner))
6332 if (tmode != inner_mode)
6334 /* We can't call gen_lowpart in a DEST since we
6335 always want a SUBREG (see below) and it would sometimes
6336 return a new hard register. */
6339 HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6341 if (WORDS_BIG_ENDIAN
6342 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6343 final_word = ((GET_MODE_SIZE (inner_mode)
6344 - GET_MODE_SIZE (tmode))
6345 / UNITS_PER_WORD) - final_word;
6347 final_word *= UNITS_PER_WORD;
6348 if (BYTES_BIG_ENDIAN &&
6349 GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6350 final_word += (GET_MODE_SIZE (inner_mode)
6351 - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6353 /* Avoid creating invalid subregs, for example when
6354 simplifying (x>>32)&255. */
6355 if (!validate_subreg (tmode, inner_mode, inner, final_word))
6358 new = gen_rtx_SUBREG (tmode, inner, final_word);
6361 new = gen_lowpart (tmode, inner);
6367 new = force_to_mode (inner, tmode,
6368 len >= HOST_BITS_PER_WIDE_INT
6369 ? ~(unsigned HOST_WIDE_INT) 0
6370 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6373 /* If this extraction is going into the destination of a SET,
6374 make a STRICT_LOW_PART unless we made a MEM. */
6377 return (MEM_P (new) ? new
6378 : (GET_CODE (new) != SUBREG
6379 ? gen_rtx_CLOBBER (tmode, const0_rtx)
6380 : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6385 if (GET_CODE (new) == CONST_INT)
6386 return gen_int_mode (INTVAL (new), mode);
6388 /* If we know that no extraneous bits are set, and that the high
6389 bit is not set, convert the extraction to the cheaper of
6390 sign and zero extension, that are equivalent in these cases. */
6391 if (flag_expensive_optimizations
6392 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6393 && ((nonzero_bits (new, tmode)
6394 & ~(((unsigned HOST_WIDE_INT)
6395 GET_MODE_MASK (tmode))
6399 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6400 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6402 /* Prefer ZERO_EXTENSION, since it gives more information to
6404 if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6409 /* Otherwise, sign- or zero-extend unless we already are in the
6412 return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6416 /* Unless this is a COMPARE or we have a funny memory reference,
6417 don't do anything with zero-extending field extracts starting at
6418 the low-order bit since they are simple AND operations. */
6419 if (pos_rtx == 0 && pos == 0 && ! in_dest
6420 && ! in_compare && ! spans_byte && unsignedp)
6423 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6424 we would be spanning bytes or if the position is not a constant and the
6425 length is not 1. In all other cases, we would only be going outside
6426 our object in cases when an original shift would have been
6428 if (! spans_byte && MEM_P (inner)
6429 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6430 || (pos_rtx != 0 && len != 1)))
6433 /* Get the mode to use should INNER not be a MEM, the mode for the position,
6434 and the mode for the result. */
6435 if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6437 wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6438 pos_mode = mode_for_extraction (EP_insv, 2);
6439 extraction_mode = mode_for_extraction (EP_insv, 3);
6442 if (! in_dest && unsignedp
6443 && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6445 wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6446 pos_mode = mode_for_extraction (EP_extzv, 3);
6447 extraction_mode = mode_for_extraction (EP_extzv, 0);
6450 if (! in_dest && ! unsignedp
6451 && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6453 wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6454 pos_mode = mode_for_extraction (EP_extv, 3);
6455 extraction_mode = mode_for_extraction (EP_extv, 0);
6458 /* Never narrow an object, since that might not be safe. */
6460 if (mode != VOIDmode
6461 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6462 extraction_mode = mode;
6464 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6465 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6466 pos_mode = GET_MODE (pos_rtx);
6468 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6469 if we have to change the mode of memory and cannot, the desired mode is
6472 wanted_inner_mode = wanted_inner_reg_mode;
6473 else if (inner_mode != wanted_inner_mode
6474 && (mode_dependent_address_p (XEXP (inner, 0))
6475 || MEM_VOLATILE_P (inner)))
6476 wanted_inner_mode = extraction_mode;
6480 if (BITS_BIG_ENDIAN)
6482 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6483 BITS_BIG_ENDIAN style. If position is constant, compute new
6484 position. Otherwise, build subtraction.
6485 Note that POS is relative to the mode of the original argument.
6486 If it's a MEM we need to recompute POS relative to that.
6487 However, if we're extracting from (or inserting into) a register,
6488 we want to recompute POS relative to wanted_inner_mode. */
6489 int width = (MEM_P (inner)
6490 ? GET_MODE_BITSIZE (is_mode)
6491 : GET_MODE_BITSIZE (wanted_inner_mode));
6494 pos = width - len - pos;
6497 = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6498 /* POS may be less than 0 now, but we check for that below.
6499 Note that it can only be less than 0 if !MEM_P (inner). */
6502 /* If INNER has a wider mode, make it smaller. If this is a constant
6503 extract, try to adjust the byte to point to the byte containing
6505 if (wanted_inner_mode != VOIDmode
6506 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6508 && (inner_mode == wanted_inner_mode
6509 || (! mode_dependent_address_p (XEXP (inner, 0))
6510 && ! MEM_VOLATILE_P (inner))))))
6514 /* The computations below will be correct if the machine is big
6515 endian in both bits and bytes or little endian in bits and bytes.
6516 If it is mixed, we must adjust. */
6518 /* If bytes are big endian and we had a paradoxical SUBREG, we must
6519 adjust OFFSET to compensate. */
6520 if (BYTES_BIG_ENDIAN
6522 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6523 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6525 /* If this is a constant position, we can move to the desired byte.
6526 Be careful not to go beyond the original object and maintain the
6527 natural alignment of the memory. */
6530 enum machine_mode bfmode = smallest_mode_for_size (len, MODE_INT);
6531 offset += (pos / GET_MODE_BITSIZE (bfmode)) * GET_MODE_SIZE (bfmode);
6532 pos %= GET_MODE_BITSIZE (bfmode);
6535 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6537 && is_mode != wanted_inner_mode)
6538 offset = (GET_MODE_SIZE (is_mode)
6539 - GET_MODE_SIZE (wanted_inner_mode) - offset);
6541 if (offset != 0 || inner_mode != wanted_inner_mode)
6542 inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6545 /* If INNER is not memory, we can always get it into the proper mode. If we
6546 are changing its mode, POS must be a constant and smaller than the size
6548 else if (!MEM_P (inner))
6550 if (GET_MODE (inner) != wanted_inner_mode
6552 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6558 inner = force_to_mode (inner, wanted_inner_mode,
6560 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6561 ? ~(unsigned HOST_WIDE_INT) 0
6562 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6567 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6568 have to zero extend. Otherwise, we can just use a SUBREG. */
6570 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6572 rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6574 /* If we know that no extraneous bits are set, and that the high
6575 bit is not set, convert extraction to cheaper one - either
6576 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6578 if (flag_expensive_optimizations
6579 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6580 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6581 & ~(((unsigned HOST_WIDE_INT)
6582 GET_MODE_MASK (GET_MODE (pos_rtx)))
6586 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6588 /* Prefer ZERO_EXTENSION, since it gives more information to
6590 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6595 else if (pos_rtx != 0
6596 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6597 pos_rtx = gen_lowpart (pos_mode, pos_rtx);
6599 /* Make POS_RTX unless we already have it and it is correct. If we don't
6600 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6602 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6603 pos_rtx = orig_pos_rtx;
6605 else if (pos_rtx == 0)
6606 pos_rtx = GEN_INT (pos);
6608 /* Make the required operation. See if we can use existing rtx. */
6609 new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6610 extraction_mode, inner, GEN_INT (len), pos_rtx);
6612 new = gen_lowpart (mode, new);
6617 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6618 with any other operations in X. Return X without that shift if so. */
6621 extract_left_shift (rtx x, int count)
6623 enum rtx_code code = GET_CODE (x);
6624 enum machine_mode mode = GET_MODE (x);
6630 /* This is the shift itself. If it is wide enough, we will return
6631 either the value being shifted if the shift count is equal to
6632 COUNT or a shift for the difference. */
6633 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6634 && INTVAL (XEXP (x, 1)) >= count)
6635 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6636 INTVAL (XEXP (x, 1)) - count);
6640 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6641 return simplify_gen_unary (code, mode, tem, mode);
6645 case PLUS: case IOR: case XOR: case AND:
6646 /* If we can safely shift this constant and we find the inner shift,
6647 make a new operation. */
6648 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6649 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6650 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6651 return simplify_gen_binary (code, mode, tem,
6652 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6663 /* Look at the expression rooted at X. Look for expressions
6664 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6665 Form these expressions.
6667 Return the new rtx, usually just X.
6669 Also, for machines like the VAX that don't have logical shift insns,
6670 try to convert logical to arithmetic shift operations in cases where
6671 they are equivalent. This undoes the canonicalizations to logical
6672 shifts done elsewhere.
6674 We try, as much as possible, to re-use rtl expressions to save memory.
6676 IN_CODE says what kind of expression we are processing. Normally, it is
6677 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6678 being kludges), it is MEM. When processing the arguments of a comparison
6679 or a COMPARE against zero, it is COMPARE. */
6682 make_compound_operation (rtx x, enum rtx_code in_code)
6684 enum rtx_code code = GET_CODE (x);
6685 enum machine_mode mode = GET_MODE (x);
6686 int mode_width = GET_MODE_BITSIZE (mode);
6688 enum rtx_code next_code;
6694 /* Select the code to be used in recursive calls. Once we are inside an
6695 address, we stay there. If we have a comparison, set to COMPARE,
6696 but once inside, go back to our default of SET. */
6698 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6699 : ((code == COMPARE || COMPARISON_P (x))
6700 && XEXP (x, 1) == const0_rtx) ? COMPARE
6701 : in_code == COMPARE ? SET : in_code);
6703 /* Process depending on the code of this operation. If NEW is set
6704 nonzero, it will be returned. */
6709 /* Convert shifts by constants into multiplications if inside
6711 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6712 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6713 && INTVAL (XEXP (x, 1)) >= 0)
6715 new = make_compound_operation (XEXP (x, 0), next_code);
6716 new = gen_rtx_MULT (mode, new,
6717 GEN_INT ((HOST_WIDE_INT) 1
6718 << INTVAL (XEXP (x, 1))));
6723 /* If the second operand is not a constant, we can't do anything
6725 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6728 /* If the constant is a power of two minus one and the first operand
6729 is a logical right shift, make an extraction. */
6730 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6731 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6733 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6734 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6735 0, in_code == COMPARE);
6738 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6739 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6740 && subreg_lowpart_p (XEXP (x, 0))
6741 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6742 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6744 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6746 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6747 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6748 0, in_code == COMPARE);
6750 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
6751 else if ((GET_CODE (XEXP (x, 0)) == XOR
6752 || GET_CODE (XEXP (x, 0)) == IOR)
6753 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6754 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6755 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6757 /* Apply the distributive law, and then try to make extractions. */
6758 new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6759 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6761 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6763 new = make_compound_operation (new, in_code);
6766 /* If we are have (and (rotate X C) M) and C is larger than the number
6767 of bits in M, this is an extraction. */
6769 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6770 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6771 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6772 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6774 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6775 new = make_extraction (mode, new,
6776 (GET_MODE_BITSIZE (mode)
6777 - INTVAL (XEXP (XEXP (x, 0), 1))),
6778 NULL_RTX, i, 1, 0, in_code == COMPARE);
6781 /* On machines without logical shifts, if the operand of the AND is
6782 a logical shift and our mask turns off all the propagated sign
6783 bits, we can replace the logical shift with an arithmetic shift. */
6784 else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6785 && !have_insn_for (LSHIFTRT, mode)
6786 && have_insn_for (ASHIFTRT, mode)
6787 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6788 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6789 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6790 && mode_width <= HOST_BITS_PER_WIDE_INT)
6792 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6794 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6795 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6797 gen_rtx_ASHIFTRT (mode,
6798 make_compound_operation
6799 (XEXP (XEXP (x, 0), 0), next_code),
6800 XEXP (XEXP (x, 0), 1)));
6803 /* If the constant is one less than a power of two, this might be
6804 representable by an extraction even if no shift is present.
6805 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6806 we are in a COMPARE. */
6807 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6808 new = make_extraction (mode,
6809 make_compound_operation (XEXP (x, 0),
6811 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6813 /* If we are in a comparison and this is an AND with a power of two,
6814 convert this into the appropriate bit extract. */
6815 else if (in_code == COMPARE
6816 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6817 new = make_extraction (mode,
6818 make_compound_operation (XEXP (x, 0),
6820 i, NULL_RTX, 1, 1, 0, 1);
6825 /* If the sign bit is known to be zero, replace this with an
6826 arithmetic shift. */
6827 if (have_insn_for (ASHIFTRT, mode)
6828 && ! have_insn_for (LSHIFTRT, mode)
6829 && mode_width <= HOST_BITS_PER_WIDE_INT
6830 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6832 new = gen_rtx_ASHIFTRT (mode,
6833 make_compound_operation (XEXP (x, 0),
6839 /* ... fall through ... */
6845 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6846 this is a SIGN_EXTRACT. */
6847 if (GET_CODE (rhs) == CONST_INT
6848 && GET_CODE (lhs) == ASHIFT
6849 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6850 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6852 new = make_compound_operation (XEXP (lhs, 0), next_code);
6853 new = make_extraction (mode, new,
6854 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6855 NULL_RTX, mode_width - INTVAL (rhs),
6856 code == LSHIFTRT, 0, in_code == COMPARE);
6860 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6861 If so, try to merge the shifts into a SIGN_EXTEND. We could
6862 also do this for some cases of SIGN_EXTRACT, but it doesn't
6863 seem worth the effort; the case checked for occurs on Alpha. */
6866 && ! (GET_CODE (lhs) == SUBREG
6867 && (OBJECT_P (SUBREG_REG (lhs))))
6868 && GET_CODE (rhs) == CONST_INT
6869 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6870 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6871 new = make_extraction (mode, make_compound_operation (new, next_code),
6872 0, NULL_RTX, mode_width - INTVAL (rhs),
6873 code == LSHIFTRT, 0, in_code == COMPARE);
6878 /* Call ourselves recursively on the inner expression. If we are
6879 narrowing the object and it has a different RTL code from
6880 what it originally did, do this SUBREG as a force_to_mode. */
6882 tem = make_compound_operation (SUBREG_REG (x), in_code);
6886 simplified = simplify_subreg (GET_MODE (x), tem, GET_MODE (tem),
6892 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6893 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6894 && subreg_lowpart_p (x))
6896 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6899 /* If we have something other than a SUBREG, we might have
6900 done an expansion, so rerun ourselves. */
6901 if (GET_CODE (newer) != SUBREG)
6902 newer = make_compound_operation (newer, in_code);
6918 x = gen_lowpart (mode, new);
6919 code = GET_CODE (x);
6922 /* Now recursively process each operand of this operation. */
6923 fmt = GET_RTX_FORMAT (code);
6924 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6927 new = make_compound_operation (XEXP (x, i), next_code);
6928 SUBST (XEXP (x, i), new);
6934 /* Given M see if it is a value that would select a field of bits
6935 within an item, but not the entire word. Return -1 if not.
6936 Otherwise, return the starting position of the field, where 0 is the
6939 *PLEN is set to the length of the field. */
6942 get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
6944 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6945 int pos = exact_log2 (m & -m);
6949 /* Now shift off the low-order zero bits and see if we have a
6950 power of two minus 1. */
6951 len = exact_log2 ((m >> pos) + 1);
6960 /* See if X can be simplified knowing that we will only refer to it in
6961 MODE and will only refer to those bits that are nonzero in MASK.
6962 If other bits are being computed or if masking operations are done
6963 that select a superset of the bits in MASK, they can sometimes be
6966 Return a possibly simplified expression, but always convert X to
6967 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
6969 Also, if REG is nonzero and X is a register equal in value to REG,
6972 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6973 are all off in X. This is used when X will be complemented, by either
6974 NOT, NEG, or XOR. */
6977 force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
6978 rtx reg, int just_select)
6980 enum rtx_code code = GET_CODE (x);
6981 int next_select = just_select || code == XOR || code == NOT || code == NEG;
6982 enum machine_mode op_mode;
6983 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6986 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6987 code below will do the wrong thing since the mode of such an
6988 expression is VOIDmode.
6990 Also do nothing if X is a CLOBBER; this can happen if X was
6991 the return value from a call to gen_lowpart. */
6992 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6995 /* We want to perform the operation is its present mode unless we know
6996 that the operation is valid in MODE, in which case we do the operation
6998 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6999 && have_insn_for (code, mode))
7000 ? mode : GET_MODE (x));
7002 /* It is not valid to do a right-shift in a narrower mode
7003 than the one it came in with. */
7004 if ((code == LSHIFTRT || code == ASHIFTRT)
7005 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
7006 op_mode = GET_MODE (x);
7008 /* Truncate MASK to fit OP_MODE. */
7010 mask &= GET_MODE_MASK (op_mode);
7012 /* When we have an arithmetic operation, or a shift whose count we
7013 do not know, we need to assume that all bits up to the highest-order
7014 bit in MASK will be needed. This is how we form such a mask. */
7015 if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
7016 fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
7018 fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
7021 /* Determine what bits of X are guaranteed to be (non)zero. */
7022 nonzero = nonzero_bits (x, mode);
7024 /* If none of the bits in X are needed, return a zero. */
7025 if (! just_select && (nonzero & mask) == 0)
7028 /* If X is a CONST_INT, return a new one. Do this here since the
7029 test below will fail. */
7030 if (GET_CODE (x) == CONST_INT)
7032 if (SCALAR_INT_MODE_P (mode))
7033 return gen_int_mode (INTVAL (x) & mask, mode);
7036 x = GEN_INT (INTVAL (x) & mask);
7037 return gen_lowpart_common (mode, x);
7041 /* If X is narrower than MODE and we want all the bits in X's mode, just
7042 get X in the proper mode. */
7043 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
7044 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
7045 return gen_lowpart (mode, x);
7050 /* If X is a (clobber (const_int)), return it since we know we are
7051 generating something that won't match. */
7055 /* X is a (use (mem ..)) that was made from a bit-field extraction that
7056 spanned the boundary of the MEM. If we are now masking so it is
7057 within that boundary, we don't need the USE any more. */
7058 if (! BITS_BIG_ENDIAN
7059 && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
7060 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7067 x = expand_compound_operation (x);
7068 if (GET_CODE (x) != code)
7069 return force_to_mode (x, mode, mask, reg, next_select);
7073 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
7074 || rtx_equal_p (reg, get_last_value (x))))
7079 if (subreg_lowpart_p (x)
7080 /* We can ignore the effect of this SUBREG if it narrows the mode or
7081 if the constant masks to zero all the bits the mode doesn't
7083 && ((GET_MODE_SIZE (GET_MODE (x))
7084 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7086 & GET_MODE_MASK (GET_MODE (x))
7087 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
7088 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
7092 /* If this is an AND with a constant, convert it into an AND
7093 whose constant is the AND of that constant with MASK. If it
7094 remains an AND of MASK, delete it since it is redundant. */
7096 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7098 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
7099 mask & INTVAL (XEXP (x, 1)));
7101 /* If X is still an AND, see if it is an AND with a mask that
7102 is just some low-order bits. If so, and it is MASK, we don't
7105 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
7106 && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
7110 /* If it remains an AND, try making another AND with the bits
7111 in the mode mask that aren't in MASK turned on. If the
7112 constant in the AND is wide enough, this might make a
7113 cheaper constant. */
7115 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
7116 && GET_MODE_MASK (GET_MODE (x)) != mask
7117 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
7119 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
7120 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
7121 int width = GET_MODE_BITSIZE (GET_MODE (x));
7124 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
7125 number, sign extend it. */
7126 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
7127 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7128 cval |= (HOST_WIDE_INT) -1 << width;
7130 y = simplify_gen_binary (AND, GET_MODE (x),
7131 XEXP (x, 0), GEN_INT (cval));
7132 if (rtx_cost (y, SET) < rtx_cost (x, SET))
7142 /* In (and (plus FOO C1) M), if M is a mask that just turns off
7143 low-order bits (as in an alignment operation) and FOO is already
7144 aligned to that boundary, mask C1 to that boundary as well.
7145 This may eliminate that PLUS and, later, the AND. */
7148 unsigned int width = GET_MODE_BITSIZE (mode);
7149 unsigned HOST_WIDE_INT smask = mask;
7151 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7152 number, sign extend it. */
7154 if (width < HOST_BITS_PER_WIDE_INT
7155 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7156 smask |= (HOST_WIDE_INT) -1 << width;
7158 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7159 && exact_log2 (- smask) >= 0
7160 && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7161 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7162 return force_to_mode (plus_constant (XEXP (x, 0),
7163 (INTVAL (XEXP (x, 1)) & smask)),
7164 mode, smask, reg, next_select);
7167 /* ... fall through ... */
7170 /* For PLUS, MINUS and MULT, we need any bits less significant than the
7171 most significant bit in MASK since carries from those bits will
7172 affect the bits we are interested in. */
7177 /* If X is (minus C Y) where C's least set bit is larger than any bit
7178 in the mask, then we may replace with (neg Y). */
7179 if (GET_CODE (XEXP (x, 0)) == CONST_INT
7180 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7181 & -INTVAL (XEXP (x, 0))))
7184 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7186 return force_to_mode (x, mode, mask, reg, next_select);
7189 /* Similarly, if C contains every bit in the fuller_mask, then we may
7190 replace with (not Y). */
7191 if (GET_CODE (XEXP (x, 0)) == CONST_INT
7192 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7193 == INTVAL (XEXP (x, 0))))
7195 x = simplify_gen_unary (NOT, GET_MODE (x),
7196 XEXP (x, 1), GET_MODE (x));
7197 return force_to_mode (x, mode, mask, reg, next_select);
7205 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7206 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7207 operation which may be a bitfield extraction. Ensure that the
7208 constant we form is not wider than the mode of X. */
7210 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7211 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7212 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7213 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7214 && GET_CODE (XEXP (x, 1)) == CONST_INT
7215 && ((INTVAL (XEXP (XEXP (x, 0), 1))
7216 + floor_log2 (INTVAL (XEXP (x, 1))))
7217 < GET_MODE_BITSIZE (GET_MODE (x)))
7218 && (INTVAL (XEXP (x, 1))
7219 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7221 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7222 << INTVAL (XEXP (XEXP (x, 0), 1)));
7223 temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
7224 XEXP (XEXP (x, 0), 0), temp);
7225 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
7226 XEXP (XEXP (x, 0), 1));
7227 return force_to_mode (x, mode, mask, reg, next_select);
7231 /* For most binary operations, just propagate into the operation and
7232 change the mode if we have an operation of that mode. */
7234 op0 = gen_lowpart (op_mode,
7235 force_to_mode (XEXP (x, 0), mode, mask,
7237 op1 = gen_lowpart (op_mode,
7238 force_to_mode (XEXP (x, 1), mode, mask,
7241 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7242 x = simplify_gen_binary (code, op_mode, op0, op1);
7246 /* For left shifts, do the same, but just for the first operand.
7247 However, we cannot do anything with shifts where we cannot
7248 guarantee that the counts are smaller than the size of the mode
7249 because such a count will have a different meaning in a
7252 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
7253 && INTVAL (XEXP (x, 1)) >= 0
7254 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7255 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7256 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7257 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7260 /* If the shift count is a constant and we can do arithmetic in
7261 the mode of the shift, refine which bits we need. Otherwise, use the
7262 conservative form of the mask. */
7263 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7264 && INTVAL (XEXP (x, 1)) >= 0
7265 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7266 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7267 mask >>= INTVAL (XEXP (x, 1));
7271 op0 = gen_lowpart (op_mode,
7272 force_to_mode (XEXP (x, 0), op_mode,
7273 mask, reg, next_select));
7275 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7276 x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
7280 /* Here we can only do something if the shift count is a constant,
7281 this shift constant is valid for the host, and we can do arithmetic
7284 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7285 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7286 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7288 rtx inner = XEXP (x, 0);
7289 unsigned HOST_WIDE_INT inner_mask;
7291 /* Select the mask of the bits we need for the shift operand. */
7292 inner_mask = mask << INTVAL (XEXP (x, 1));
7294 /* We can only change the mode of the shift if we can do arithmetic
7295 in the mode of the shift and INNER_MASK is no wider than the
7296 width of X's mode. */
7297 if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
7298 op_mode = GET_MODE (x);
7300 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
7302 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7303 x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7306 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7307 shift and AND produces only copies of the sign bit (C2 is one less
7308 than a power of two), we can do this with just a shift. */
7310 if (GET_CODE (x) == LSHIFTRT
7311 && GET_CODE (XEXP (x, 1)) == CONST_INT
7312 /* The shift puts one of the sign bit copies in the least significant
7314 && ((INTVAL (XEXP (x, 1))
7315 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7316 >= GET_MODE_BITSIZE (GET_MODE (x)))
7317 && exact_log2 (mask + 1) >= 0
7318 /* Number of bits left after the shift must be more than the mask
7320 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7321 <= GET_MODE_BITSIZE (GET_MODE (x)))
7322 /* Must be more sign bit copies than the mask needs. */
7323 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7324 >= exact_log2 (mask + 1)))
7325 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7326 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7327 - exact_log2 (mask + 1)));
7332 /* If we are just looking for the sign bit, we don't need this shift at
7333 all, even if it has a variable count. */
7334 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7335 && (mask == ((unsigned HOST_WIDE_INT) 1
7336 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7337 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7339 /* If this is a shift by a constant, get a mask that contains those bits
7340 that are not copies of the sign bit. We then have two cases: If
7341 MASK only includes those bits, this can be a logical shift, which may
7342 allow simplifications. If MASK is a single-bit field not within
7343 those bits, we are requesting a copy of the sign bit and hence can
7344 shift the sign bit to the appropriate location. */
7346 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7347 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7351 /* If the considered data is wider than HOST_WIDE_INT, we can't
7352 represent a mask for all its bits in a single scalar.
7353 But we only care about the lower bits, so calculate these. */
7355 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7357 nonzero = ~(HOST_WIDE_INT) 0;
7359 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7360 is the number of bits a full-width mask would have set.
7361 We need only shift if these are fewer than nonzero can
7362 hold. If not, we must keep all bits set in nonzero. */
7364 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7365 < HOST_BITS_PER_WIDE_INT)
7366 nonzero >>= INTVAL (XEXP (x, 1))
7367 + HOST_BITS_PER_WIDE_INT
7368 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7372 nonzero = GET_MODE_MASK (GET_MODE (x));
7373 nonzero >>= INTVAL (XEXP (x, 1));
7376 if ((mask & ~nonzero) == 0
7377 || (i = exact_log2 (mask)) >= 0)
7379 x = simplify_shift_const
7380 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7381 i < 0 ? INTVAL (XEXP (x, 1))
7382 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7384 if (GET_CODE (x) != ASHIFTRT)
7385 return force_to_mode (x, mode, mask, reg, next_select);
7389 /* If MASK is 1, convert this to an LSHIFTRT. This can be done
7390 even if the shift count isn't a constant. */
7392 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7393 XEXP (x, 0), XEXP (x, 1));
7397 /* If this is a zero- or sign-extension operation that just affects bits
7398 we don't care about, remove it. Be sure the call above returned
7399 something that is still a shift. */
7401 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7402 && GET_CODE (XEXP (x, 1)) == CONST_INT
7403 && INTVAL (XEXP (x, 1)) >= 0
7404 && (INTVAL (XEXP (x, 1))
7405 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7406 && GET_CODE (XEXP (x, 0)) == ASHIFT
7407 && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
7408 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7415 /* If the shift count is constant and we can do computations
7416 in the mode of X, compute where the bits we care about are.
7417 Otherwise, we can't do anything. Don't change the mode of
7418 the shift or propagate MODE into the shift, though. */
7419 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7420 && INTVAL (XEXP (x, 1)) >= 0)
7422 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7423 GET_MODE (x), GEN_INT (mask),
7425 if (temp && GET_CODE (temp) == CONST_INT)
7427 force_to_mode (XEXP (x, 0), GET_MODE (x),
7428 INTVAL (temp), reg, next_select));
7433 /* If we just want the low-order bit, the NEG isn't needed since it
7434 won't change the low-order bit. */
7436 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7438 /* We need any bits less significant than the most significant bit in
7439 MASK since carries from those bits will affect the bits we are
7445 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7446 same as the XOR case above. Ensure that the constant we form is not
7447 wider than the mode of X. */
7449 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7450 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7451 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7452 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7453 < GET_MODE_BITSIZE (GET_MODE (x)))
7454 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7456 temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
7458 temp = simplify_gen_binary (XOR, GET_MODE (x),
7459 XEXP (XEXP (x, 0), 0), temp);
7460 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7461 temp, XEXP (XEXP (x, 0), 1));
7463 return force_to_mode (x, mode, mask, reg, next_select);
7466 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7467 use the full mask inside the NOT. */
7471 op0 = gen_lowpart (op_mode,
7472 force_to_mode (XEXP (x, 0), mode, mask,
7474 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7475 x = simplify_gen_unary (code, op_mode, op0, op_mode);
7479 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7480 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7481 which is equal to STORE_FLAG_VALUE. */
7482 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7483 && GET_MODE (XEXP (x, 0)) == mode
7484 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7485 && (nonzero_bits (XEXP (x, 0), mode)
7486 == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
7487 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7492 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7493 written in a narrower mode. We play it safe and do not do so. */
7496 gen_lowpart (GET_MODE (x),
7497 force_to_mode (XEXP (x, 1), mode,
7498 mask, reg, next_select)));
7500 gen_lowpart (GET_MODE (x),
7501 force_to_mode (XEXP (x, 2), mode,
7502 mask, reg, next_select)));
7509 /* Ensure we return a value of the proper mode. */
7510 return gen_lowpart (mode, x);
7513 /* Return nonzero if X is an expression that has one of two values depending on
7514 whether some other value is zero or nonzero. In that case, we return the
7515 value that is being tested, *PTRUE is set to the value if the rtx being
7516 returned has a nonzero value, and *PFALSE is set to the other alternative.
7518 If we return zero, we set *PTRUE and *PFALSE to X. */
7521 if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
7523 enum machine_mode mode = GET_MODE (x);
7524 enum rtx_code code = GET_CODE (x);
7525 rtx cond0, cond1, true0, true1, false0, false1;
7526 unsigned HOST_WIDE_INT nz;
7528 /* If we are comparing a value against zero, we are done. */
7529 if ((code == NE || code == EQ)
7530 && XEXP (x, 1) == const0_rtx)
7532 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7533 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7537 /* If this is a unary operation whose operand has one of two values, apply
7538 our opcode to compute those values. */
7539 else if (UNARY_P (x)
7540 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7542 *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7543 *pfalse = simplify_gen_unary (code, mode, false0,
7544 GET_MODE (XEXP (x, 0)));
7548 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7549 make can't possibly match and would suppress other optimizations. */
7550 else if (code == COMPARE)
7553 /* If this is a binary operation, see if either side has only one of two
7554 values. If either one does or if both do and they are conditional on
7555 the same value, compute the new true and false values. */
7556 else if (BINARY_P (x))
7558 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7559 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7561 if ((cond0 != 0 || cond1 != 0)
7562 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7564 /* If if_then_else_cond returned zero, then true/false are the
7565 same rtl. We must copy one of them to prevent invalid rtl
7568 true0 = copy_rtx (true0);
7569 else if (cond1 == 0)
7570 true1 = copy_rtx (true1);
7572 if (COMPARISON_P (x))
7574 *ptrue = simplify_gen_relational (code, mode, VOIDmode,
7576 *pfalse = simplify_gen_relational (code, mode, VOIDmode,
7581 *ptrue = simplify_gen_binary (code, mode, true0, true1);
7582 *pfalse = simplify_gen_binary (code, mode, false0, false1);
7585 return cond0 ? cond0 : cond1;
7588 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7589 operands is zero when the other is nonzero, and vice-versa,
7590 and STORE_FLAG_VALUE is 1 or -1. */
7592 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7593 && (code == PLUS || code == IOR || code == XOR || code == MINUS
7595 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7597 rtx op0 = XEXP (XEXP (x, 0), 1);
7598 rtx op1 = XEXP (XEXP (x, 1), 1);
7600 cond0 = XEXP (XEXP (x, 0), 0);
7601 cond1 = XEXP (XEXP (x, 1), 0);
7603 if (COMPARISON_P (cond0)
7604 && COMPARISON_P (cond1)
7605 && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
7606 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7607 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7608 || ((swap_condition (GET_CODE (cond0))
7609 == reversed_comparison_code (cond1, NULL))
7610 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7611 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7612 && ! side_effects_p (x))
7614 *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
7615 *pfalse = simplify_gen_binary (MULT, mode,
7617 ? simplify_gen_unary (NEG, mode,
7625 /* Similarly for MULT, AND and UMIN, except that for these the result
7627 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7628 && (code == MULT || code == AND || code == UMIN)
7629 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7631 cond0 = XEXP (XEXP (x, 0), 0);
7632 cond1 = XEXP (XEXP (x, 1), 0);
7634 if (COMPARISON_P (cond0)
7635 && COMPARISON_P (cond1)
7636 && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
7637 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7638 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7639 || ((swap_condition (GET_CODE (cond0))
7640 == reversed_comparison_code (cond1, NULL))
7641 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7642 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7643 && ! side_effects_p (x))
7645 *ptrue = *pfalse = const0_rtx;
7651 else if (code == IF_THEN_ELSE)
7653 /* If we have IF_THEN_ELSE already, extract the condition and
7654 canonicalize it if it is NE or EQ. */
7655 cond0 = XEXP (x, 0);
7656 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7657 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7658 return XEXP (cond0, 0);
7659 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7661 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7662 return XEXP (cond0, 0);
7668 /* If X is a SUBREG, we can narrow both the true and false values
7669 if the inner expression, if there is a condition. */
7670 else if (code == SUBREG
7671 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7674 true0 = simplify_gen_subreg (mode, true0,
7675 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7676 false0 = simplify_gen_subreg (mode, false0,
7677 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7678 if (true0 && false0)
7686 /* If X is a constant, this isn't special and will cause confusions
7687 if we treat it as such. Likewise if it is equivalent to a constant. */
7688 else if (CONSTANT_P (x)
7689 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7692 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7693 will be least confusing to the rest of the compiler. */
7694 else if (mode == BImode)
7696 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7700 /* If X is known to be either 0 or -1, those are the true and
7701 false values when testing X. */
7702 else if (x == constm1_rtx || x == const0_rtx
7703 || (mode != VOIDmode
7704 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7706 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7710 /* Likewise for 0 or a single bit. */
7711 else if (SCALAR_INT_MODE_P (mode)
7712 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7713 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7715 *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7719 /* Otherwise fail; show no condition with true and false values the same. */
7720 *ptrue = *pfalse = x;
7724 /* Return the value of expression X given the fact that condition COND
7725 is known to be true when applied to REG as its first operand and VAL
7726 as its second. X is known to not be shared and so can be modified in
7729 We only handle the simplest cases, and specifically those cases that
7730 arise with IF_THEN_ELSE expressions. */
7733 known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
7735 enum rtx_code code = GET_CODE (x);
7740 if (side_effects_p (x))
7743 /* If either operand of the condition is a floating point value,
7744 then we have to avoid collapsing an EQ comparison. */
7746 && rtx_equal_p (x, reg)
7747 && ! FLOAT_MODE_P (GET_MODE (x))
7748 && ! FLOAT_MODE_P (GET_MODE (val)))
7751 if (cond == UNEQ && rtx_equal_p (x, reg))
7754 /* If X is (abs REG) and we know something about REG's relationship
7755 with zero, we may be able to simplify this. */
7757 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7760 case GE: case GT: case EQ:
7763 return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7765 GET_MODE (XEXP (x, 0)));
7770 /* The only other cases we handle are MIN, MAX, and comparisons if the
7771 operands are the same as REG and VAL. */
7773 else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
7775 if (rtx_equal_p (XEXP (x, 0), val))
7776 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7778 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7780 if (COMPARISON_P (x))
7782 if (comparison_dominates_p (cond, code))
7783 return const_true_rtx;
7785 code = reversed_comparison_code (x, NULL);
7787 && comparison_dominates_p (cond, code))
7792 else if (code == SMAX || code == SMIN
7793 || code == UMIN || code == UMAX)
7795 int unsignedp = (code == UMIN || code == UMAX);
7797 /* Do not reverse the condition when it is NE or EQ.
7798 This is because we cannot conclude anything about
7799 the value of 'SMAX (x, y)' when x is not equal to y,
7800 but we can when x equals y. */
7801 if ((code == SMAX || code == UMAX)
7802 && ! (cond == EQ || cond == NE))
7803 cond = reverse_condition (cond);
7808 return unsignedp ? x : XEXP (x, 1);
7810 return unsignedp ? x : XEXP (x, 0);
7812 return unsignedp ? XEXP (x, 1) : x;
7814 return unsignedp ? XEXP (x, 0) : x;
7821 else if (code == SUBREG)
7823 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7824 rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7826 if (SUBREG_REG (x) != r)
7828 /* We must simplify subreg here, before we lose track of the
7829 original inner_mode. */
7830 new = simplify_subreg (GET_MODE (x), r,
7831 inner_mode, SUBREG_BYTE (x));
7835 SUBST (SUBREG_REG (x), r);
7840 /* We don't have to handle SIGN_EXTEND here, because even in the
7841 case of replacing something with a modeless CONST_INT, a
7842 CONST_INT is already (supposed to be) a valid sign extension for
7843 its narrower mode, which implies it's already properly
7844 sign-extended for the wider mode. Now, for ZERO_EXTEND, the
7845 story is different. */
7846 else if (code == ZERO_EXTEND)
7848 enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7849 rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7851 if (XEXP (x, 0) != r)
7853 /* We must simplify the zero_extend here, before we lose
7854 track of the original inner_mode. */
7855 new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7860 SUBST (XEXP (x, 0), r);
7866 fmt = GET_RTX_FORMAT (code);
7867 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7870 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7871 else if (fmt[i] == 'E')
7872 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7873 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7880 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
7881 assignment as a field assignment. */
7884 rtx_equal_for_field_assignment_p (rtx x, rtx y)
7886 if (x == y || rtx_equal_p (x, y))
7889 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7892 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7893 Note that all SUBREGs of MEM are paradoxical; otherwise they
7894 would have been rewritten. */
7895 if (MEM_P (x) && GET_CODE (y) == SUBREG
7896 && MEM_P (SUBREG_REG (y))
7897 && rtx_equal_p (SUBREG_REG (y),
7898 gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
7901 if (MEM_P (y) && GET_CODE (x) == SUBREG
7902 && MEM_P (SUBREG_REG (x))
7903 && rtx_equal_p (SUBREG_REG (x),
7904 gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
7907 /* We used to see if get_last_value of X and Y were the same but that's
7908 not correct. In one direction, we'll cause the assignment to have
7909 the wrong destination and in the case, we'll import a register into this
7910 insn that might have already have been dead. So fail if none of the
7911 above cases are true. */
7915 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
7916 Return that assignment if so.
7918 We only handle the most common cases. */
7921 make_field_assignment (rtx x)
7923 rtx dest = SET_DEST (x);
7924 rtx src = SET_SRC (x);
7929 unsigned HOST_WIDE_INT len;
7931 enum machine_mode mode;
7933 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7934 a clear of a one-bit field. We will have changed it to
7935 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7938 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7939 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7940 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7941 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7943 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7946 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7950 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7951 && subreg_lowpart_p (XEXP (src, 0))
7952 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7953 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7954 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7955 && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
7956 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7957 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7959 assign = make_extraction (VOIDmode, dest, 0,
7960 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7963 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7967 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7969 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7970 && XEXP (XEXP (src, 0), 0) == const1_rtx
7971 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7973 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7976 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7980 /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
7981 SRC is an AND with all bits of that field set, then we can discard
7983 if (GET_CODE (dest) == ZERO_EXTRACT
7984 && GET_CODE (XEXP (dest, 1)) == CONST_INT
7985 && GET_CODE (src) == AND
7986 && GET_CODE (XEXP (src, 1)) == CONST_INT)
7988 HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
7989 unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
7990 unsigned HOST_WIDE_INT ze_mask;
7992 if (width >= HOST_BITS_PER_WIDE_INT)
7995 ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
7997 /* Complete overlap. We can remove the source AND. */
7998 if ((and_mask & ze_mask) == ze_mask)
7999 return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
8001 /* Partial overlap. We can reduce the source AND. */
8002 if ((and_mask & ze_mask) != and_mask)
8004 mode = GET_MODE (src);
8005 src = gen_rtx_AND (mode, XEXP (src, 0),
8006 gen_int_mode (and_mask & ze_mask, mode));
8007 return gen_rtx_SET (VOIDmode, dest, src);
8011 /* The other case we handle is assignments into a constant-position
8012 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
8013 a mask that has all one bits except for a group of zero bits and
8014 OTHER is known to have zeros where C1 has ones, this is such an
8015 assignment. Compute the position and length from C1. Shift OTHER
8016 to the appropriate position, force it to the required mode, and
8017 make the extraction. Check for the AND in both operands. */
8019 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
8022 rhs = expand_compound_operation (XEXP (src, 0));
8023 lhs = expand_compound_operation (XEXP (src, 1));
8025 if (GET_CODE (rhs) == AND
8026 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
8027 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
8028 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
8029 else if (GET_CODE (lhs) == AND
8030 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
8031 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
8032 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
8036 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
8037 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
8038 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
8039 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
8042 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
8046 /* The mode to use for the source is the mode of the assignment, or of
8047 what is inside a possible STRICT_LOW_PART. */
8048 mode = (GET_CODE (assign) == STRICT_LOW_PART
8049 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
8051 /* Shift OTHER right POS places and make it the source, restricting it
8052 to the proper length and mode. */
8054 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
8055 GET_MODE (src), other, pos),
8057 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
8058 ? ~(unsigned HOST_WIDE_INT) 0
8059 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
8062 /* If SRC is masked by an AND that does not make a difference in
8063 the value being stored, strip it. */
8064 if (GET_CODE (assign) == ZERO_EXTRACT
8065 && GET_CODE (XEXP (assign, 1)) == CONST_INT
8066 && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
8067 && GET_CODE (src) == AND
8068 && GET_CODE (XEXP (src, 1)) == CONST_INT
8069 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
8070 == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
8071 src = XEXP (src, 0);
8073 return gen_rtx_SET (VOIDmode, assign, src);
8076 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
8080 apply_distributive_law (rtx x)
8082 enum rtx_code code = GET_CODE (x);
8083 enum rtx_code inner_code;
8084 rtx lhs, rhs, other;
8087 /* Distributivity is not true for floating point as it can change the
8088 value. So we don't do it unless -funsafe-math-optimizations. */
8089 if (FLOAT_MODE_P (GET_MODE (x))
8090 && ! flag_unsafe_math_optimizations)
8093 /* The outer operation can only be one of the following: */
8094 if (code != IOR && code != AND && code != XOR
8095 && code != PLUS && code != MINUS)
8101 /* If either operand is a primitive we can't do anything, so get out
8103 if (OBJECT_P (lhs) || OBJECT_P (rhs))
8106 lhs = expand_compound_operation (lhs);
8107 rhs = expand_compound_operation (rhs);
8108 inner_code = GET_CODE (lhs);
8109 if (inner_code != GET_CODE (rhs))
8112 /* See if the inner and outer operations distribute. */
8119 /* These all distribute except over PLUS. */
8120 if (code == PLUS || code == MINUS)
8125 if (code != PLUS && code != MINUS)
8130 /* This is also a multiply, so it distributes over everything. */
8134 /* Non-paradoxical SUBREGs distributes over all operations,
8135 provided the inner modes and byte offsets are the same, this
8136 is an extraction of a low-order part, we don't convert an fp
8137 operation to int or vice versa, this is not a vector mode,
8138 and we would not be converting a single-word operation into a
8139 multi-word operation. The latter test is not required, but
8140 it prevents generating unneeded multi-word operations. Some
8141 of the previous tests are redundant given the latter test,
8142 but are retained because they are required for correctness.
8144 We produce the result slightly differently in this case. */
8146 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
8147 || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
8148 || ! subreg_lowpart_p (lhs)
8149 || (GET_MODE_CLASS (GET_MODE (lhs))
8150 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
8151 || (GET_MODE_SIZE (GET_MODE (lhs))
8152 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
8153 || VECTOR_MODE_P (GET_MODE (lhs))
8154 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
8157 tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8158 SUBREG_REG (lhs), SUBREG_REG (rhs));
8159 return gen_lowpart (GET_MODE (x), tem);
8165 /* Set LHS and RHS to the inner operands (A and B in the example
8166 above) and set OTHER to the common operand (C in the example).
8167 There is only one way to do this unless the inner operation is
8169 if (COMMUTATIVE_ARITH_P (lhs)
8170 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8171 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8172 else if (COMMUTATIVE_ARITH_P (lhs)
8173 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8174 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8175 else if (COMMUTATIVE_ARITH_P (lhs)
8176 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8177 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8178 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8179 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8183 /* Form the new inner operation, seeing if it simplifies first. */
8184 tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
8186 /* There is one exception to the general way of distributing:
8187 (a | c) ^ (b | c) -> (a ^ b) & ~c */
8188 if (code == XOR && inner_code == IOR)
8191 other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8194 /* We may be able to continuing distributing the result, so call
8195 ourselves recursively on the inner operation before forming the
8196 outer operation, which we return. */
8197 return simplify_gen_binary (inner_code, GET_MODE (x),
8198 apply_distributive_law (tem), other);
8201 /* See if X is of the form (* (+ A B) C), and if so convert to
8202 (+ (* A C) (* B C)) and try to simplify.
8204 Most of the time, this results in no change. However, if some of
8205 the operands are the same or inverses of each other, simplifications
8208 For example, (and (ior A B) (not B)) can occur as the result of
8209 expanding a bit field assignment. When we apply the distributive
8210 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8211 which then simplifies to (and (A (not B))).
8213 Note that no checks happen on the validity of applying the inverse
8214 distributive law. This is pointless since we can do it in the
8215 few places where this routine is called.
8217 N is the index of the term that is decomposed (the arithmetic operation,
8218 i.e. (+ A B) in the first example above). !N is the index of the term that
8219 is distributed, i.e. of C in the first example above. */
8221 distribute_and_simplify_rtx (rtx x, int n)
8223 enum machine_mode mode;
8224 enum rtx_code outer_code, inner_code;
8225 rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
8227 decomposed = XEXP (x, n);
8228 if (!ARITHMETIC_P (decomposed))
8231 mode = GET_MODE (x);
8232 outer_code = GET_CODE (x);
8233 distributed = XEXP (x, !n);
8235 inner_code = GET_CODE (decomposed);
8236 inner_op0 = XEXP (decomposed, 0);
8237 inner_op1 = XEXP (decomposed, 1);
8239 /* Special case (and (xor B C) (not A)), which is equivalent to
8240 (xor (ior A B) (ior A C)) */
8241 if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
8243 distributed = XEXP (distributed, 0);
8249 /* Distribute the second term. */
8250 new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
8251 new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
8255 /* Distribute the first term. */
8256 new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
8257 new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
8260 tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
8262 if (GET_CODE (tmp) != outer_code
8263 && rtx_cost (tmp, SET) < rtx_cost (x, SET))
8269 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8272 Return an equivalent form, if different from X. Otherwise, return X. If
8273 X is zero, we are to always construct the equivalent form. */
8276 simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
8277 unsigned HOST_WIDE_INT constop)
8279 unsigned HOST_WIDE_INT nonzero;
8282 /* Simplify VAROP knowing that we will be only looking at some of the
8285 Note by passing in CONSTOP, we guarantee that the bits not set in
8286 CONSTOP are not significant and will never be examined. We must
8287 ensure that is the case by explicitly masking out those bits
8288 before returning. */
8289 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
8291 /* If VAROP is a CLOBBER, we will fail so return it. */
8292 if (GET_CODE (varop) == CLOBBER)
8295 /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8296 to VAROP and return the new constant. */
8297 if (GET_CODE (varop) == CONST_INT)
8298 return gen_int_mode (INTVAL (varop) & constop, mode);
8300 /* See what bits may be nonzero in VAROP. Unlike the general case of
8301 a call to nonzero_bits, here we don't care about bits outside
8304 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8306 /* Turn off all bits in the constant that are known to already be zero.
8307 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8308 which is tested below. */
8312 /* If we don't have any bits left, return zero. */
8316 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8317 a power of two, we can replace this with an ASHIFT. */
8318 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8319 && (i = exact_log2 (constop)) >= 0)
8320 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8322 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8323 or XOR, then try to apply the distributive law. This may eliminate
8324 operations if either branch can be simplified because of the AND.
8325 It may also make some cases more complex, but those cases probably
8326 won't match a pattern either with or without this. */
8328 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8332 apply_distributive_law
8333 (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
8334 simplify_and_const_int (NULL_RTX,
8338 simplify_and_const_int (NULL_RTX,
8343 /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
8344 the AND and see if one of the operands simplifies to zero. If so, we
8345 may eliminate it. */
8347 if (GET_CODE (varop) == PLUS
8348 && exact_log2 (constop + 1) >= 0)
8352 o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8353 o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8354 if (o0 == const0_rtx)
8356 if (o1 == const0_rtx)
8360 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
8361 if we already had one (just check for the simplest cases). */
8362 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8363 && GET_MODE (XEXP (x, 0)) == mode
8364 && SUBREG_REG (XEXP (x, 0)) == varop)
8365 varop = XEXP (x, 0);
8367 varop = gen_lowpart (mode, varop);
8369 /* If we can't make the SUBREG, try to return what we were given. */
8370 if (GET_CODE (varop) == CLOBBER)
8371 return x ? x : varop;
8373 /* If we are only masking insignificant bits, return VAROP. */
8374 if (constop == nonzero)
8378 /* Otherwise, return an AND. */
8379 constop = trunc_int_for_mode (constop, mode);
8380 /* See how much, if any, of X we can use. */
8381 if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
8382 x = simplify_gen_binary (AND, mode, varop, GEN_INT (constop));
8386 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8387 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
8388 SUBST (XEXP (x, 1), GEN_INT (constop));
8390 SUBST (XEXP (x, 0), varop);
8397 /* Given a REG, X, compute which bits in X can be nonzero.
8398 We don't care about bits outside of those defined in MODE.
8400 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8401 a shift, AND, or zero_extract, we can do better. */
8404 reg_nonzero_bits_for_combine (rtx x, enum machine_mode mode,
8405 rtx known_x ATTRIBUTE_UNUSED,
8406 enum machine_mode known_mode ATTRIBUTE_UNUSED,
8407 unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
8408 unsigned HOST_WIDE_INT *nonzero)
8412 /* If X is a register whose nonzero bits value is current, use it.
8413 Otherwise, if X is a register whose value we can find, use that
8414 value. Otherwise, use the previously-computed global nonzero bits
8415 for this register. */
8417 if (reg_stat[REGNO (x)].last_set_value != 0
8418 && (reg_stat[REGNO (x)].last_set_mode == mode
8419 || (GET_MODE_CLASS (reg_stat[REGNO (x)].last_set_mode) == MODE_INT
8420 && GET_MODE_CLASS (mode) == MODE_INT))
8421 && (reg_stat[REGNO (x)].last_set_label == label_tick
8422 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8423 && REG_N_SETS (REGNO (x)) == 1
8424 && ! REGNO_REG_SET_P
8425 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
8427 && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
8429 *nonzero &= reg_stat[REGNO (x)].last_set_nonzero_bits;
8433 tem = get_last_value (x);
8437 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8438 /* If X is narrower than MODE and TEM is a non-negative
8439 constant that would appear negative in the mode of X,
8440 sign-extend it for use in reg_nonzero_bits because some
8441 machines (maybe most) will actually do the sign-extension
8442 and this is the conservative approach.
8444 ??? For 2.5, try to tighten up the MD files in this regard
8445 instead of this kludge. */
8447 if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
8448 && GET_CODE (tem) == CONST_INT
8450 && 0 != (INTVAL (tem)
8451 & ((HOST_WIDE_INT) 1
8452 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8453 tem = GEN_INT (INTVAL (tem)
8454 | ((HOST_WIDE_INT) (-1)
8455 << GET_MODE_BITSIZE (GET_MODE (x))));
8459 else if (nonzero_sign_valid && reg_stat[REGNO (x)].nonzero_bits)
8461 unsigned HOST_WIDE_INT mask = reg_stat[REGNO (x)].nonzero_bits;
8463 if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
8464 /* We don't know anything about the upper bits. */
8465 mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8472 /* Return the number of bits at the high-order end of X that are known to
8473 be equal to the sign bit. X will be used in mode MODE; if MODE is
8474 VOIDmode, X will be used in its own mode. The returned value will always
8475 be between 1 and the number of bits in MODE. */
8478 reg_num_sign_bit_copies_for_combine (rtx x, enum machine_mode mode,
8479 rtx known_x ATTRIBUTE_UNUSED,
8480 enum machine_mode known_mode
8482 unsigned int known_ret ATTRIBUTE_UNUSED,
8483 unsigned int *result)
8487 if (reg_stat[REGNO (x)].last_set_value != 0
8488 && reg_stat[REGNO (x)].last_set_mode == mode
8489 && (reg_stat[REGNO (x)].last_set_label == label_tick
8490 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8491 && REG_N_SETS (REGNO (x)) == 1
8492 && ! REGNO_REG_SET_P
8493 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
8495 && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
8497 *result = reg_stat[REGNO (x)].last_set_sign_bit_copies;
8501 tem = get_last_value (x);
8505 if (nonzero_sign_valid && reg_stat[REGNO (x)].sign_bit_copies != 0
8506 && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
8507 *result = reg_stat[REGNO (x)].sign_bit_copies;
8512 /* Return the number of "extended" bits there are in X, when interpreted
8513 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8514 unsigned quantities, this is the number of high-order zero bits.
8515 For signed quantities, this is the number of copies of the sign bit
8516 minus 1. In both case, this function returns the number of "spare"
8517 bits. For example, if two quantities for which this function returns
8518 at least 1 are added, the addition is known not to overflow.
8520 This function will always return 0 unless called during combine, which
8521 implies that it must be called from a define_split. */
8524 extended_count (rtx x, enum machine_mode mode, int unsignedp)
8526 if (nonzero_sign_valid == 0)
8530 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8531 ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
8532 - floor_log2 (nonzero_bits (x, mode)))
8534 : num_sign_bit_copies (x, mode) - 1);
8537 /* This function is called from `simplify_shift_const' to merge two
8538 outer operations. Specifically, we have already found that we need
8539 to perform operation *POP0 with constant *PCONST0 at the outermost
8540 position. We would now like to also perform OP1 with constant CONST1
8541 (with *POP0 being done last).
8543 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8544 the resulting operation. *PCOMP_P is set to 1 if we would need to
8545 complement the innermost operand, otherwise it is unchanged.
8547 MODE is the mode in which the operation will be done. No bits outside
8548 the width of this mode matter. It is assumed that the width of this mode
8549 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8551 If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS,
8552 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8553 result is simply *PCONST0.
8555 If the resulting operation cannot be expressed as one operation, we
8556 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8559 merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
8561 enum rtx_code op0 = *pop0;
8562 HOST_WIDE_INT const0 = *pconst0;
8564 const0 &= GET_MODE_MASK (mode);
8565 const1 &= GET_MODE_MASK (mode);
8567 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8571 /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or
8574 if (op1 == UNKNOWN || op0 == SET)
8577 else if (op0 == UNKNOWN)
8578 op0 = op1, const0 = const1;
8580 else if (op0 == op1)
8604 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8605 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8608 /* If the two constants aren't the same, we can't do anything. The
8609 remaining six cases can all be done. */
8610 else if (const0 != const1)
8618 /* (a & b) | b == b */
8620 else /* op1 == XOR */
8621 /* (a ^ b) | b == a | b */
8627 /* (a & b) ^ b == (~a) & b */
8628 op0 = AND, *pcomp_p = 1;
8629 else /* op1 == IOR */
8630 /* (a | b) ^ b == a & ~b */
8631 op0 = AND, const0 = ~const0;
8636 /* (a | b) & b == b */
8638 else /* op1 == XOR */
8639 /* (a ^ b) & b) == (~a) & b */
8646 /* Check for NO-OP cases. */
8647 const0 &= GET_MODE_MASK (mode);
8649 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8651 else if (const0 == 0 && op0 == AND)
8653 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8657 /* ??? Slightly redundant with the above mask, but not entirely.
8658 Moving this above means we'd have to sign-extend the mode mask
8659 for the final test. */
8660 const0 = trunc_int_for_mode (const0, mode);
8668 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8669 The result of the shift is RESULT_MODE. X, if nonzero, is an expression
8670 that we started with.
8672 The shift is normally computed in the widest mode we find in VAROP, as
8673 long as it isn't a different number of words than RESULT_MODE. Exceptions
8674 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8677 simplify_shift_const (rtx x, enum rtx_code code,
8678 enum machine_mode result_mode, rtx varop,
8681 enum rtx_code orig_code = code;
8684 enum machine_mode mode = result_mode;
8685 enum machine_mode shift_mode, tmode;
8686 unsigned int mode_words
8687 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8688 /* We form (outer_op (code varop count) (outer_const)). */
8689 enum rtx_code outer_op = UNKNOWN;
8690 HOST_WIDE_INT outer_const = 0;
8692 int complement_p = 0;
8695 /* Make sure and truncate the "natural" shift on the way in. We don't
8696 want to do this inside the loop as it makes it more difficult to
8698 if (SHIFT_COUNT_TRUNCATED)
8699 orig_count &= GET_MODE_BITSIZE (mode) - 1;
8701 /* If we were given an invalid count, don't do anything except exactly
8702 what was requested. */
8704 if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
8709 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
8714 /* Unless one of the branches of the `if' in this loop does a `continue',
8715 we will `break' the loop after the `if'. */
8719 /* If we have an operand of (clobber (const_int 0)), just return that
8721 if (GET_CODE (varop) == CLOBBER)
8724 /* If we discovered we had to complement VAROP, leave. Making a NOT
8725 here would cause an infinite loop. */
8729 /* Convert ROTATERT to ROTATE. */
8730 if (code == ROTATERT)
8732 unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
8734 if (VECTOR_MODE_P (result_mode))
8735 count = bitsize / GET_MODE_NUNITS (result_mode) - count;
8737 count = bitsize - count;
8740 /* We need to determine what mode we will do the shift in. If the
8741 shift is a right shift or a ROTATE, we must always do it in the mode
8742 it was originally done in. Otherwise, we can do it in MODE, the
8743 widest mode encountered. */
8745 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8746 ? result_mode : mode);
8748 /* Handle cases where the count is greater than the size of the mode
8749 minus 1. For ASHIFT, use the size minus one as the count (this can
8750 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8751 take the count modulo the size. For other shifts, the result is
8754 Since these shifts are being produced by the compiler by combining
8755 multiple operations, each of which are defined, we know what the
8756 result is supposed to be. */
8758 if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1))
8760 if (code == ASHIFTRT)
8761 count = GET_MODE_BITSIZE (shift_mode) - 1;
8762 else if (code == ROTATE || code == ROTATERT)
8763 count %= GET_MODE_BITSIZE (shift_mode);
8766 /* We can't simply return zero because there may be an
8774 /* An arithmetic right shift of a quantity known to be -1 or 0
8776 if (code == ASHIFTRT
8777 && (num_sign_bit_copies (varop, shift_mode)
8778 == GET_MODE_BITSIZE (shift_mode)))
8784 /* If we are doing an arithmetic right shift and discarding all but
8785 the sign bit copies, this is equivalent to doing a shift by the
8786 bitsize minus one. Convert it into that shift because it will often
8787 allow other simplifications. */
8789 if (code == ASHIFTRT
8790 && (count + num_sign_bit_copies (varop, shift_mode)
8791 >= GET_MODE_BITSIZE (shift_mode)))
8792 count = GET_MODE_BITSIZE (shift_mode) - 1;
8794 /* We simplify the tests below and elsewhere by converting
8795 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8796 `make_compound_operation' will convert it to an ASHIFTRT for
8797 those machines (such as VAX) that don't have an LSHIFTRT. */
8798 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8800 && ((nonzero_bits (varop, shift_mode)
8801 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8805 if (code == LSHIFTRT
8806 && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8807 && !(nonzero_bits (varop, shift_mode) >> count))
8810 && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8811 && !((nonzero_bits (varop, shift_mode) << count)
8812 & GET_MODE_MASK (shift_mode)))
8815 switch (GET_CODE (varop))
8821 new = expand_compound_operation (varop);
8830 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8831 minus the width of a smaller mode, we can do this with a
8832 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8833 if ((code == ASHIFTRT || code == LSHIFTRT)
8834 && ! mode_dependent_address_p (XEXP (varop, 0))
8835 && ! MEM_VOLATILE_P (varop)
8836 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8837 MODE_INT, 1)) != BLKmode)
8839 new = adjust_address_nv (varop, tmode,
8840 BYTES_BIG_ENDIAN ? 0
8841 : count / BITS_PER_UNIT);
8843 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8844 : ZERO_EXTEND, mode, new);
8851 /* Similar to the case above, except that we can only do this if
8852 the resulting mode is the same as that of the underlying
8853 MEM and adjust the address depending on the *bits* endianness
8854 because of the way that bit-field extract insns are defined. */
8855 if ((code == ASHIFTRT || code == LSHIFTRT)
8856 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8857 MODE_INT, 1)) != BLKmode
8858 && tmode == GET_MODE (XEXP (varop, 0)))
8860 if (BITS_BIG_ENDIAN)
8861 new = XEXP (varop, 0);
8864 new = copy_rtx (XEXP (varop, 0));
8865 SUBST (XEXP (new, 0),
8866 plus_constant (XEXP (new, 0),
8867 count / BITS_PER_UNIT));
8870 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8871 : ZERO_EXTEND, mode, new);
8878 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8879 the same number of words as what we've seen so far. Then store
8880 the widest mode in MODE. */
8881 if (subreg_lowpart_p (varop)
8882 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8883 > GET_MODE_SIZE (GET_MODE (varop)))
8884 && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8885 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8888 varop = SUBREG_REG (varop);
8889 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8890 mode = GET_MODE (varop);
8896 /* Some machines use MULT instead of ASHIFT because MULT
8897 is cheaper. But it is still better on those machines to
8898 merge two shifts into one. */
8899 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8900 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8903 = simplify_gen_binary (ASHIFT, GET_MODE (varop),
8905 GEN_INT (exact_log2 (
8906 INTVAL (XEXP (varop, 1)))));
8912 /* Similar, for when divides are cheaper. */
8913 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8914 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8917 = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
8919 GEN_INT (exact_log2 (
8920 INTVAL (XEXP (varop, 1)))));
8926 /* If we are extracting just the sign bit of an arithmetic
8927 right shift, that shift is not needed. However, the sign
8928 bit of a wider mode may be different from what would be
8929 interpreted as the sign bit in a narrower mode, so, if
8930 the result is narrower, don't discard the shift. */
8931 if (code == LSHIFTRT
8932 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
8933 && (GET_MODE_BITSIZE (result_mode)
8934 >= GET_MODE_BITSIZE (GET_MODE (varop))))
8936 varop = XEXP (varop, 0);
8940 /* ... fall through ... */
8945 /* Here we have two nested shifts. The result is usually the
8946 AND of a new shift with a mask. We compute the result below. */
8947 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8948 && INTVAL (XEXP (varop, 1)) >= 0
8949 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
8950 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8951 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8952 && !VECTOR_MODE_P (result_mode))
8954 enum rtx_code first_code = GET_CODE (varop);
8955 unsigned int first_count = INTVAL (XEXP (varop, 1));
8956 unsigned HOST_WIDE_INT mask;
8959 /* We have one common special case. We can't do any merging if
8960 the inner code is an ASHIFTRT of a smaller mode. However, if
8961 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8962 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8963 we can convert it to
8964 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8965 This simplifies certain SIGN_EXTEND operations. */
8966 if (code == ASHIFT && first_code == ASHIFTRT
8967 && count == (unsigned int)
8968 (GET_MODE_BITSIZE (result_mode)
8969 - GET_MODE_BITSIZE (GET_MODE (varop))))
8971 /* C3 has the low-order C1 bits zero. */
8973 mask = (GET_MODE_MASK (mode)
8974 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
8976 varop = simplify_and_const_int (NULL_RTX, result_mode,
8977 XEXP (varop, 0), mask);
8978 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
8980 count = first_count;
8985 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8986 than C1 high-order bits equal to the sign bit, we can convert
8987 this to either an ASHIFT or an ASHIFTRT depending on the
8990 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8992 if (code == ASHIFTRT && first_code == ASHIFT
8993 && GET_MODE (varop) == shift_mode
8994 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8997 varop = XEXP (varop, 0);
8999 signed_count = count - first_count;
9000 if (signed_count < 0)
9001 count = -signed_count, code = ASHIFT;
9003 count = signed_count;
9008 /* There are some cases we can't do. If CODE is ASHIFTRT,
9009 we can only do this if FIRST_CODE is also ASHIFTRT.
9011 We can't do the case when CODE is ROTATE and FIRST_CODE is
9014 If the mode of this shift is not the mode of the outer shift,
9015 we can't do this if either shift is a right shift or ROTATE.
9017 Finally, we can't do any of these if the mode is too wide
9018 unless the codes are the same.
9020 Handle the case where the shift codes are the same
9023 if (code == first_code)
9025 if (GET_MODE (varop) != result_mode
9026 && (code == ASHIFTRT || code == LSHIFTRT
9030 count += first_count;
9031 varop = XEXP (varop, 0);
9035 if (code == ASHIFTRT
9036 || (code == ROTATE && first_code == ASHIFTRT)
9037 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9038 || (GET_MODE (varop) != result_mode
9039 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9040 || first_code == ROTATE
9041 || code == ROTATE)))
9044 /* To compute the mask to apply after the shift, shift the
9045 nonzero bits of the inner shift the same way the
9046 outer shift will. */
9048 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9051 = simplify_binary_operation (code, result_mode, mask_rtx,
9054 /* Give up if we can't compute an outer operation to use. */
9056 || GET_CODE (mask_rtx) != CONST_INT
9057 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9059 result_mode, &complement_p))
9062 /* If the shifts are in the same direction, we add the
9063 counts. Otherwise, we subtract them. */
9064 signed_count = count;
9065 if ((code == ASHIFTRT || code == LSHIFTRT)
9066 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9067 signed_count += first_count;
9069 signed_count -= first_count;
9071 /* If COUNT is positive, the new shift is usually CODE,
9072 except for the two exceptions below, in which case it is
9073 FIRST_CODE. If the count is negative, FIRST_CODE should
9075 if (signed_count > 0
9076 && ((first_code == ROTATE && code == ASHIFT)
9077 || (first_code == ASHIFTRT && code == LSHIFTRT)))
9078 code = first_code, count = signed_count;
9079 else if (signed_count < 0)
9080 code = first_code, count = -signed_count;
9082 count = signed_count;
9084 varop = XEXP (varop, 0);
9088 /* If we have (A << B << C) for any shift, we can convert this to
9089 (A << C << B). This wins if A is a constant. Only try this if
9090 B is not a constant. */
9092 else if (GET_CODE (varop) == code
9093 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9095 = simplify_binary_operation (code, mode,
9099 varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9106 /* Make this fit the case below. */
9107 varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9108 GEN_INT (GET_MODE_MASK (mode)));
9114 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9115 with C the size of VAROP - 1 and the shift is logical if
9116 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9117 we have an (le X 0) operation. If we have an arithmetic shift
9118 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9119 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9121 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9122 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9123 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9124 && (code == LSHIFTRT || code == ASHIFTRT)
9125 && count == (unsigned int)
9126 (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9127 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9130 varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9133 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9134 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9139 /* If we have (shift (logical)), move the logical to the outside
9140 to allow it to possibly combine with another logical and the
9141 shift to combine with another shift. This also canonicalizes to
9142 what a ZERO_EXTRACT looks like. Also, some machines have
9143 (and (shift)) insns. */
9145 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9146 /* We can't do this if we have (ashiftrt (xor)) and the
9147 constant has its sign bit set in shift_mode. */
9148 && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9149 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9151 && (new = simplify_binary_operation (code, result_mode,
9153 GEN_INT (count))) != 0
9154 && GET_CODE (new) == CONST_INT
9155 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9156 INTVAL (new), result_mode, &complement_p))
9158 varop = XEXP (varop, 0);
9162 /* If we can't do that, try to simplify the shift in each arm of the
9163 logical expression, make a new logical expression, and apply
9164 the inverse distributive law. This also can't be done
9165 for some (ashiftrt (xor)). */
9166 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9167 && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9168 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9171 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9172 XEXP (varop, 0), count);
9173 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9174 XEXP (varop, 1), count);
9176 varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
9178 varop = apply_distributive_law (varop);
9186 /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9187 says that the sign bit can be tested, FOO has mode MODE, C is
9188 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9189 that may be nonzero. */
9190 if (code == LSHIFTRT
9191 && XEXP (varop, 1) == const0_rtx
9192 && GET_MODE (XEXP (varop, 0)) == result_mode
9193 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9194 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9195 && ((STORE_FLAG_VALUE
9196 & ((HOST_WIDE_INT) 1
9197 < (GET_MODE_BITSIZE (result_mode) - 1))))
9198 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9199 && merge_outer_ops (&outer_op, &outer_const, XOR,
9200 (HOST_WIDE_INT) 1, result_mode,
9203 varop = XEXP (varop, 0);
9210 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9211 than the number of bits in the mode is equivalent to A. */
9212 if (code == LSHIFTRT
9213 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9214 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9216 varop = XEXP (varop, 0);
9221 /* NEG commutes with ASHIFT since it is multiplication. Move the
9222 NEG outside to allow shifts to combine. */
9224 && merge_outer_ops (&outer_op, &outer_const, NEG,
9225 (HOST_WIDE_INT) 0, result_mode,
9228 varop = XEXP (varop, 0);
9234 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9235 is one less than the number of bits in the mode is
9236 equivalent to (xor A 1). */
9237 if (code == LSHIFTRT
9238 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9239 && XEXP (varop, 1) == constm1_rtx
9240 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9241 && merge_outer_ops (&outer_op, &outer_const, XOR,
9242 (HOST_WIDE_INT) 1, result_mode,
9246 varop = XEXP (varop, 0);
9250 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9251 that might be nonzero in BAR are those being shifted out and those
9252 bits are known zero in FOO, we can replace the PLUS with FOO.
9253 Similarly in the other operand order. This code occurs when
9254 we are computing the size of a variable-size array. */
9256 if ((code == ASHIFTRT || code == LSHIFTRT)
9257 && count < HOST_BITS_PER_WIDE_INT
9258 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9259 && (nonzero_bits (XEXP (varop, 1), result_mode)
9260 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9262 varop = XEXP (varop, 0);
9265 else if ((code == ASHIFTRT || code == LSHIFTRT)
9266 && count < HOST_BITS_PER_WIDE_INT
9267 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9268 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9270 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9271 & nonzero_bits (XEXP (varop, 1),
9274 varop = XEXP (varop, 1);
9278 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9280 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9281 && (new = simplify_binary_operation (ASHIFT, result_mode,
9283 GEN_INT (count))) != 0
9284 && GET_CODE (new) == CONST_INT
9285 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9286 INTVAL (new), result_mode, &complement_p))
9288 varop = XEXP (varop, 0);
9292 /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9293 signbit', and attempt to change the PLUS to an XOR and move it to
9294 the outer operation as is done above in the AND/IOR/XOR case
9295 leg for shift(logical). See details in logical handling above
9296 for reasoning in doing so. */
9297 if (code == LSHIFTRT
9298 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9299 && mode_signbit_p (result_mode, XEXP (varop, 1))
9300 && (new = simplify_binary_operation (code, result_mode,
9302 GEN_INT (count))) != 0
9303 && GET_CODE (new) == CONST_INT
9304 && merge_outer_ops (&outer_op, &outer_const, XOR,
9305 INTVAL (new), result_mode, &complement_p))
9307 varop = XEXP (varop, 0);
9314 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9315 with C the size of VAROP - 1 and the shift is logical if
9316 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9317 we have a (gt X 0) operation. If the shift is arithmetic with
9318 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9319 we have a (neg (gt X 0)) operation. */
9321 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9322 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9323 && count == (unsigned int)
9324 (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9325 && (code == LSHIFTRT || code == ASHIFTRT)
9326 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9327 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1))
9329 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9332 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9335 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9336 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9343 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9344 if the truncate does not affect the value. */
9345 if (code == LSHIFTRT
9346 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9347 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9348 && (INTVAL (XEXP (XEXP (varop, 0), 1))
9349 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9350 - GET_MODE_BITSIZE (GET_MODE (varop)))))
9352 rtx varop_inner = XEXP (varop, 0);
9355 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9356 XEXP (varop_inner, 0),
9358 (count + INTVAL (XEXP (varop_inner, 1))));
9359 varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9372 /* We need to determine what mode to do the shift in. If the shift is
9373 a right shift or ROTATE, we must always do it in the mode it was
9374 originally done in. Otherwise, we can do it in MODE, the widest mode
9375 encountered. The code we care about is that of the shift that will
9376 actually be done, not the shift that was originally requested. */
9378 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9379 ? result_mode : mode);
9381 /* We have now finished analyzing the shift. The result should be
9382 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9383 OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
9384 to the result of the shift. OUTER_CONST is the relevant constant,
9385 but we must turn off all bits turned off in the shift.
9387 If we were passed a value for X, see if we can use any pieces of
9388 it. If not, make new rtx. */
9390 if (x && GET_RTX_CLASS (GET_CODE (x)) == RTX_BIN_ARITH
9391 && GET_CODE (XEXP (x, 1)) == CONST_INT
9392 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count)
9393 const_rtx = XEXP (x, 1);
9395 const_rtx = GEN_INT (count);
9397 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9398 && GET_MODE (XEXP (x, 0)) == shift_mode
9399 && SUBREG_REG (XEXP (x, 0)) == varop)
9400 varop = XEXP (x, 0);
9401 else if (GET_MODE (varop) != shift_mode)
9402 varop = gen_lowpart (shift_mode, varop);
9404 /* If we can't make the SUBREG, try to return what we were given. */
9405 if (GET_CODE (varop) == CLOBBER)
9406 return x ? x : varop;
9408 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9412 x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9414 /* If we have an outer operation and we just made a shift, it is
9415 possible that we could have simplified the shift were it not
9416 for the outer operation. So try to do the simplification
9419 if (outer_op != UNKNOWN && GET_CODE (x) == code
9420 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9421 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9422 INTVAL (XEXP (x, 1)));
9424 /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9425 turn off all the bits that the shift would have turned off. */
9426 if (orig_code == LSHIFTRT && result_mode != shift_mode)
9427 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9428 GET_MODE_MASK (result_mode) >> orig_count);
9430 /* Do the remainder of the processing in RESULT_MODE. */
9431 x = gen_lowpart (result_mode, x);
9433 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9436 x = simplify_gen_unary (NOT, result_mode, x, result_mode);
9438 if (outer_op != UNKNOWN)
9440 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9441 outer_const = trunc_int_for_mode (outer_const, result_mode);
9443 if (outer_op == AND)
9444 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9445 else if (outer_op == SET)
9446 /* This means that we have determined that the result is
9447 equivalent to a constant. This should be rare. */
9448 x = GEN_INT (outer_const);
9449 else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
9450 x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9452 x = simplify_gen_binary (outer_op, result_mode, x,
9453 GEN_INT (outer_const));
9459 /* Like recog, but we receive the address of a pointer to a new pattern.
9460 We try to match the rtx that the pointer points to.
9461 If that fails, we may try to modify or replace the pattern,
9462 storing the replacement into the same pointer object.
9464 Modifications include deletion or addition of CLOBBERs.
9466 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9467 the CLOBBERs are placed.
9469 The value is the final insn code from the pattern ultimately matched,
9473 recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
9476 int insn_code_number;
9477 int num_clobbers_to_add = 0;
9480 rtx old_notes, old_pat;
9482 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9483 we use to indicate that something didn't match. If we find such a
9484 thing, force rejection. */
9485 if (GET_CODE (pat) == PARALLEL)
9486 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9487 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9488 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9491 old_pat = PATTERN (insn);
9492 old_notes = REG_NOTES (insn);
9493 PATTERN (insn) = pat;
9494 REG_NOTES (insn) = 0;
9496 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9498 /* If it isn't, there is the possibility that we previously had an insn
9499 that clobbered some register as a side effect, but the combined
9500 insn doesn't need to do that. So try once more without the clobbers
9501 unless this represents an ASM insn. */
9503 if (insn_code_number < 0 && ! check_asm_operands (pat)
9504 && GET_CODE (pat) == PARALLEL)
9508 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9509 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9512 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9516 SUBST_INT (XVECLEN (pat, 0), pos);
9519 pat = XVECEXP (pat, 0, 0);
9521 PATTERN (insn) = pat;
9522 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9524 PATTERN (insn) = old_pat;
9525 REG_NOTES (insn) = old_notes;
9527 /* Recognize all noop sets, these will be killed by followup pass. */
9528 if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9529 insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9531 /* If we had any clobbers to add, make a new pattern than contains
9532 them. Then check to make sure that all of them are dead. */
9533 if (num_clobbers_to_add)
9535 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9536 rtvec_alloc (GET_CODE (pat) == PARALLEL
9538 + num_clobbers_to_add)
9539 : num_clobbers_to_add + 1));
9541 if (GET_CODE (pat) == PARALLEL)
9542 for (i = 0; i < XVECLEN (pat, 0); i++)
9543 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9545 XVECEXP (newpat, 0, 0) = pat;
9547 add_clobbers (newpat, insn_code_number);
9549 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9550 i < XVECLEN (newpat, 0); i++)
9552 if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
9553 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9555 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9556 XEXP (XVECEXP (newpat, 0, i), 0), notes);
9564 return insn_code_number;
9567 /* Like gen_lowpart_general but for use by combine. In combine it
9568 is not possible to create any new pseudoregs. However, it is
9569 safe to create invalid memory addresses, because combine will
9570 try to recognize them and all they will do is make the combine
9573 If for some reason this cannot do its job, an rtx
9574 (clobber (const_int 0)) is returned.
9575 An insn containing that will not be recognized. */
9578 gen_lowpart_for_combine (enum machine_mode omode, rtx x)
9580 enum machine_mode imode = GET_MODE (x);
9581 unsigned int osize = GET_MODE_SIZE (omode);
9582 unsigned int isize = GET_MODE_SIZE (imode);
9588 /* Return identity if this is a CONST or symbolic reference. */
9590 && (GET_CODE (x) == CONST
9591 || GET_CODE (x) == SYMBOL_REF
9592 || GET_CODE (x) == LABEL_REF))
9595 /* We can only support MODE being wider than a word if X is a
9596 constant integer or has a mode the same size. */
9597 if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
9598 && ! ((imode == VOIDmode
9599 && (GET_CODE (x) == CONST_INT
9600 || GET_CODE (x) == CONST_DOUBLE))
9604 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9605 won't know what to do. So we will strip off the SUBREG here and
9606 process normally. */
9607 if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
9611 /* For use in case we fall down into the address adjustments
9612 further below, we need to adjust the known mode and size of
9613 x; imode and isize, since we just adjusted x. */
9614 imode = GET_MODE (x);
9619 isize = GET_MODE_SIZE (imode);
9622 result = gen_lowpart_common (omode, x);
9624 #ifdef CANNOT_CHANGE_MODE_CLASS
9625 if (result != 0 && GET_CODE (result) == SUBREG)
9626 record_subregs_of_mode (result);
9636 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9638 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9641 /* If we want to refer to something bigger than the original memref,
9642 generate a paradoxical subreg instead. That will force a reload
9643 of the original memref X. */
9645 return gen_rtx_SUBREG (omode, x, 0);
9647 if (WORDS_BIG_ENDIAN)
9648 offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
9650 /* Adjust the address so that the address-after-the-data is
9652 if (BYTES_BIG_ENDIAN)
9653 offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
9655 return adjust_address_nv (x, omode, offset);
9658 /* If X is a comparison operator, rewrite it in a new mode. This
9659 probably won't match, but may allow further simplifications. */
9660 else if (COMPARISON_P (x))
9661 return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
9663 /* If we couldn't simplify X any other way, just enclose it in a
9664 SUBREG. Normally, this SUBREG won't match, but some patterns may
9665 include an explicit SUBREG or we may simplify it further in combine. */
9671 offset = subreg_lowpart_offset (omode, imode);
9672 if (imode == VOIDmode)
9674 imode = int_mode_for_mode (omode);
9675 x = gen_lowpart_common (imode, x);
9679 res = simplify_gen_subreg (omode, x, imode, offset);
9685 return gen_rtx_CLOBBER (imode, const0_rtx);
9688 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
9689 comparison code that will be tested.
9691 The result is a possibly different comparison code to use. *POP0 and
9692 *POP1 may be updated.
9694 It is possible that we might detect that a comparison is either always
9695 true or always false. However, we do not perform general constant
9696 folding in combine, so this knowledge isn't useful. Such tautologies
9697 should have been detected earlier. Hence we ignore all such cases. */
9699 static enum rtx_code
9700 simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
9706 enum machine_mode mode, tmode;
9708 /* Try a few ways of applying the same transformation to both operands. */
9711 #ifndef WORD_REGISTER_OPERATIONS
9712 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9713 so check specially. */
9714 if (code != GTU && code != GEU && code != LTU && code != LEU
9715 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9716 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9717 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9718 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9719 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9720 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9721 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9722 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9723 && XEXP (op0, 1) == XEXP (op1, 1)
9724 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9725 && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
9726 && (INTVAL (XEXP (op0, 1))
9727 == (GET_MODE_BITSIZE (GET_MODE (op0))
9729 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9731 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9732 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9736 /* If both operands are the same constant shift, see if we can ignore the
9737 shift. We can if the shift is a rotate or if the bits shifted out of
9738 this shift are known to be zero for both inputs and if the type of
9739 comparison is compatible with the shift. */
9740 if (GET_CODE (op0) == GET_CODE (op1)
9741 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9742 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9743 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9744 && (code != GT && code != LT && code != GE && code != LE))
9745 || (GET_CODE (op0) == ASHIFTRT
9746 && (code != GTU && code != LTU
9747 && code != GEU && code != LEU)))
9748 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9749 && INTVAL (XEXP (op0, 1)) >= 0
9750 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9751 && XEXP (op0, 1) == XEXP (op1, 1))
9753 enum machine_mode mode = GET_MODE (op0);
9754 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9755 int shift_count = INTVAL (XEXP (op0, 1));
9757 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9758 mask &= (mask >> shift_count) << shift_count;
9759 else if (GET_CODE (op0) == ASHIFT)
9760 mask = (mask & (mask << shift_count)) >> shift_count;
9762 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
9763 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
9764 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9769 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9770 SUBREGs are of the same mode, and, in both cases, the AND would
9771 be redundant if the comparison was done in the narrower mode,
9772 do the comparison in the narrower mode (e.g., we are AND'ing with 1
9773 and the operand's possibly nonzero bits are 0xffffff01; in that case
9774 if we only care about QImode, we don't need the AND). This case
9775 occurs if the output mode of an scc insn is not SImode and
9776 STORE_FLAG_VALUE == 1 (e.g., the 386).
9778 Similarly, check for a case where the AND's are ZERO_EXTEND
9779 operations from some narrower mode even though a SUBREG is not
9782 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9783 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9784 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
9786 rtx inner_op0 = XEXP (op0, 0);
9787 rtx inner_op1 = XEXP (op1, 0);
9788 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9789 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9792 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9793 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9794 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9795 && (GET_MODE (SUBREG_REG (inner_op0))
9796 == GET_MODE (SUBREG_REG (inner_op1)))
9797 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
9798 <= HOST_BITS_PER_WIDE_INT)
9799 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9800 GET_MODE (SUBREG_REG (inner_op0)))))
9801 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9802 GET_MODE (SUBREG_REG (inner_op1))))))
9804 op0 = SUBREG_REG (inner_op0);
9805 op1 = SUBREG_REG (inner_op1);
9807 /* The resulting comparison is always unsigned since we masked
9808 off the original sign bit. */
9809 code = unsigned_condition (code);
9815 for (tmode = GET_CLASS_NARROWEST_MODE
9816 (GET_MODE_CLASS (GET_MODE (op0)));
9817 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9818 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
9820 op0 = gen_lowpart (tmode, inner_op0);
9821 op1 = gen_lowpart (tmode, inner_op1);
9822 code = unsigned_condition (code);
9831 /* If both operands are NOT, we can strip off the outer operation
9832 and adjust the comparison code for swapped operands; similarly for
9833 NEG, except that this must be an equality comparison. */
9834 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9835 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9836 && (code == EQ || code == NE)))
9837 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
9843 /* If the first operand is a constant, swap the operands and adjust the
9844 comparison code appropriately, but don't do this if the second operand
9845 is already a constant integer. */
9846 if (swap_commutative_operands_p (op0, op1))
9848 tem = op0, op0 = op1, op1 = tem;
9849 code = swap_condition (code);
9852 /* We now enter a loop during which we will try to simplify the comparison.
9853 For the most part, we only are concerned with comparisons with zero,
9854 but some things may really be comparisons with zero but not start
9855 out looking that way. */
9857 while (GET_CODE (op1) == CONST_INT)
9859 enum machine_mode mode = GET_MODE (op0);
9860 unsigned int mode_width = GET_MODE_BITSIZE (mode);
9861 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9862 int equality_comparison_p;
9863 int sign_bit_comparison_p;
9864 int unsigned_comparison_p;
9865 HOST_WIDE_INT const_op;
9867 /* We only want to handle integral modes. This catches VOIDmode,
9868 CCmode, and the floating-point modes. An exception is that we
9869 can handle VOIDmode if OP0 is a COMPARE or a comparison
9872 if (GET_MODE_CLASS (mode) != MODE_INT
9873 && ! (mode == VOIDmode
9874 && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
9877 /* Get the constant we are comparing against and turn off all bits
9878 not on in our mode. */
9879 const_op = INTVAL (op1);
9880 if (mode != VOIDmode)
9881 const_op = trunc_int_for_mode (const_op, mode);
9882 op1 = GEN_INT (const_op);
9884 /* If we are comparing against a constant power of two and the value
9885 being compared can only have that single bit nonzero (e.g., it was
9886 `and'ed with that bit), we can replace this with a comparison
9889 && (code == EQ || code == NE || code == GE || code == GEU
9890 || code == LT || code == LTU)
9891 && mode_width <= HOST_BITS_PER_WIDE_INT
9892 && exact_log2 (const_op) >= 0
9893 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
9895 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9896 op1 = const0_rtx, const_op = 0;
9899 /* Similarly, if we are comparing a value known to be either -1 or
9900 0 with -1, change it to the opposite comparison against zero. */
9903 && (code == EQ || code == NE || code == GT || code == LE
9904 || code == GEU || code == LTU)
9905 && num_sign_bit_copies (op0, mode) == mode_width)
9907 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9908 op1 = const0_rtx, const_op = 0;
9911 /* Do some canonicalizations based on the comparison code. We prefer
9912 comparisons against zero and then prefer equality comparisons.
9913 If we can reduce the size of a constant, we will do that too. */
9918 /* < C is equivalent to <= (C - 1) */
9922 op1 = GEN_INT (const_op);
9924 /* ... fall through to LE case below. */
9930 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9934 op1 = GEN_INT (const_op);
9938 /* If we are doing a <= 0 comparison on a value known to have
9939 a zero sign bit, we can replace this with == 0. */
9940 else if (const_op == 0
9941 && mode_width <= HOST_BITS_PER_WIDE_INT
9942 && (nonzero_bits (op0, mode)
9943 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9948 /* >= C is equivalent to > (C - 1). */
9952 op1 = GEN_INT (const_op);
9954 /* ... fall through to GT below. */
9960 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
9964 op1 = GEN_INT (const_op);
9968 /* If we are doing a > 0 comparison on a value known to have
9969 a zero sign bit, we can replace this with != 0. */
9970 else if (const_op == 0
9971 && mode_width <= HOST_BITS_PER_WIDE_INT
9972 && (nonzero_bits (op0, mode)
9973 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9978 /* < C is equivalent to <= (C - 1). */
9982 op1 = GEN_INT (const_op);
9984 /* ... fall through ... */
9987 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
9988 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9989 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9991 const_op = 0, op1 = const0_rtx;
9999 /* unsigned <= 0 is equivalent to == 0 */
10003 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
10004 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10005 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10007 const_op = 0, op1 = const0_rtx;
10013 /* >= C is equivalent to > (C - 1). */
10017 op1 = GEN_INT (const_op);
10019 /* ... fall through ... */
10022 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
10023 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10024 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10026 const_op = 0, op1 = const0_rtx;
10034 /* unsigned > 0 is equivalent to != 0 */
10038 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
10039 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10040 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10042 const_op = 0, op1 = const0_rtx;
10051 /* Compute some predicates to simplify code below. */
10053 equality_comparison_p = (code == EQ || code == NE);
10054 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10055 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10058 /* If this is a sign bit comparison and we can do arithmetic in
10059 MODE, say that we will only be needing the sign bit of OP0. */
10060 if (sign_bit_comparison_p
10061 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10062 op0 = force_to_mode (op0, mode,
10064 << (GET_MODE_BITSIZE (mode) - 1)),
10067 /* Now try cases based on the opcode of OP0. If none of the cases
10068 does a "continue", we exit this loop immediately after the
10071 switch (GET_CODE (op0))
10074 /* If we are extracting a single bit from a variable position in
10075 a constant that has only a single bit set and are comparing it
10076 with zero, we can convert this into an equality comparison
10077 between the position and the location of the single bit. */
10078 /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10079 have already reduced the shift count modulo the word size. */
10080 if (!SHIFT_COUNT_TRUNCATED
10081 && GET_CODE (XEXP (op0, 0)) == CONST_INT
10082 && XEXP (op0, 1) == const1_rtx
10083 && equality_comparison_p && const_op == 0
10084 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10086 if (BITS_BIG_ENDIAN)
10088 enum machine_mode new_mode
10089 = mode_for_extraction (EP_extzv, 1);
10090 if (new_mode == MAX_MACHINE_MODE)
10091 i = BITS_PER_WORD - 1 - i;
10095 i = (GET_MODE_BITSIZE (mode) - 1 - i);
10099 op0 = XEXP (op0, 2);
10103 /* Result is nonzero iff shift count is equal to I. */
10104 code = reverse_condition (code);
10108 /* ... fall through ... */
10111 tem = expand_compound_operation (op0);
10120 /* If testing for equality, we can take the NOT of the constant. */
10121 if (equality_comparison_p
10122 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10124 op0 = XEXP (op0, 0);
10129 /* If just looking at the sign bit, reverse the sense of the
10131 if (sign_bit_comparison_p)
10133 op0 = XEXP (op0, 0);
10134 code = (code == GE ? LT : GE);
10140 /* If testing for equality, we can take the NEG of the constant. */
10141 if (equality_comparison_p
10142 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10144 op0 = XEXP (op0, 0);
10149 /* The remaining cases only apply to comparisons with zero. */
10153 /* When X is ABS or is known positive,
10154 (neg X) is < 0 if and only if X != 0. */
10156 if (sign_bit_comparison_p
10157 && (GET_CODE (XEXP (op0, 0)) == ABS
10158 || (mode_width <= HOST_BITS_PER_WIDE_INT
10159 && (nonzero_bits (XEXP (op0, 0), mode)
10160 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10162 op0 = XEXP (op0, 0);
10163 code = (code == LT ? NE : EQ);
10167 /* If we have NEG of something whose two high-order bits are the
10168 same, we know that "(-a) < 0" is equivalent to "a > 0". */
10169 if (num_sign_bit_copies (op0, mode) >= 2)
10171 op0 = XEXP (op0, 0);
10172 code = swap_condition (code);
10178 /* If we are testing equality and our count is a constant, we
10179 can perform the inverse operation on our RHS. */
10180 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10181 && (tem = simplify_binary_operation (ROTATERT, mode,
10182 op1, XEXP (op0, 1))) != 0)
10184 op0 = XEXP (op0, 0);
10189 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10190 a particular bit. Convert it to an AND of a constant of that
10191 bit. This will be converted into a ZERO_EXTRACT. */
10192 if (const_op == 0 && sign_bit_comparison_p
10193 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10194 && mode_width <= HOST_BITS_PER_WIDE_INT)
10196 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10199 - INTVAL (XEXP (op0, 1)))));
10200 code = (code == LT ? NE : EQ);
10204 /* Fall through. */
10207 /* ABS is ignorable inside an equality comparison with zero. */
10208 if (const_op == 0 && equality_comparison_p)
10210 op0 = XEXP (op0, 0);
10216 /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10217 (compare FOO CONST) if CONST fits in FOO's mode and we
10218 are either testing inequality or have an unsigned
10219 comparison with ZERO_EXTEND or a signed comparison with
10220 SIGN_EXTEND. But don't do it if we don't have a compare
10221 insn of the given mode, since we'd have to revert it
10222 later on, and then we wouldn't know whether to sign- or
10224 mode = GET_MODE (XEXP (op0, 0));
10225 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10226 && ! unsigned_comparison_p
10227 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10228 && ((unsigned HOST_WIDE_INT) const_op
10229 < (((unsigned HOST_WIDE_INT) 1
10230 << (GET_MODE_BITSIZE (mode) - 1))))
10231 && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
10233 op0 = XEXP (op0, 0);
10239 /* Check for the case where we are comparing A - C1 with C2, that is
10241 (subreg:MODE (plus (A) (-C1))) op (C2)
10243 with C1 a constant, and try to lift the SUBREG, i.e. to do the
10244 comparison in the wider mode. One of the following two conditions
10245 must be true in order for this to be valid:
10247 1. The mode extension results in the same bit pattern being added
10248 on both sides and the comparison is equality or unsigned. As
10249 C2 has been truncated to fit in MODE, the pattern can only be
10252 2. The mode extension results in the sign bit being copied on
10255 The difficulty here is that we have predicates for A but not for
10256 (A - C1) so we need to check that C1 is within proper bounds so
10257 as to perturbate A as little as possible. */
10259 if (mode_width <= HOST_BITS_PER_WIDE_INT
10260 && subreg_lowpart_p (op0)
10261 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10262 && GET_CODE (SUBREG_REG (op0)) == PLUS
10263 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT)
10265 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10266 rtx a = XEXP (SUBREG_REG (op0), 0);
10267 HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10270 && (unsigned HOST_WIDE_INT) c1
10271 < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10272 && (equality_comparison_p || unsigned_comparison_p)
10273 /* (A - C1) zero-extends if it is positive and sign-extends
10274 if it is negative, C2 both zero- and sign-extends. */
10275 && ((0 == (nonzero_bits (a, inner_mode)
10276 & ~GET_MODE_MASK (mode))
10278 /* (A - C1) sign-extends if it is positive and 1-extends
10279 if it is negative, C2 both sign- and 1-extends. */
10280 || (num_sign_bit_copies (a, inner_mode)
10281 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10284 || ((unsigned HOST_WIDE_INT) c1
10285 < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10286 /* (A - C1) always sign-extends, like C2. */
10287 && num_sign_bit_copies (a, inner_mode)
10288 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10289 - (mode_width - 1))))
10291 op0 = SUBREG_REG (op0);
10296 /* If the inner mode is narrower and we are extracting the low part,
10297 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10298 if (subreg_lowpart_p (op0)
10299 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10300 /* Fall through */ ;
10304 /* ... fall through ... */
10307 mode = GET_MODE (XEXP (op0, 0));
10308 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10309 && (unsigned_comparison_p || equality_comparison_p)
10310 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10311 && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
10312 && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
10314 op0 = XEXP (op0, 0);
10320 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
10321 this for equality comparisons due to pathological cases involving
10323 if (equality_comparison_p
10324 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10325 op1, XEXP (op0, 1))))
10327 op0 = XEXP (op0, 0);
10332 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10333 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10334 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10336 op0 = XEXP (XEXP (op0, 0), 0);
10337 code = (code == LT ? EQ : NE);
10343 /* We used to optimize signed comparisons against zero, but that
10344 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10345 arrive here as equality comparisons, or (GEU, LTU) are
10346 optimized away. No need to special-case them. */
10348 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10349 (eq B (minus A C)), whichever simplifies. We can only do
10350 this for equality comparisons due to pathological cases involving
10352 if (equality_comparison_p
10353 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10354 XEXP (op0, 1), op1)))
10356 op0 = XEXP (op0, 0);
10361 if (equality_comparison_p
10362 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10363 XEXP (op0, 0), op1)))
10365 op0 = XEXP (op0, 1);
10370 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10371 of bits in X minus 1, is one iff X > 0. */
10372 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10373 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10374 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10376 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10378 op0 = XEXP (op0, 1);
10379 code = (code == GE ? LE : GT);
10385 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10386 if C is zero or B is a constant. */
10387 if (equality_comparison_p
10388 && 0 != (tem = simplify_binary_operation (XOR, mode,
10389 XEXP (op0, 1), op1)))
10391 op0 = XEXP (op0, 0);
10398 case UNEQ: case LTGT:
10399 case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
10400 case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
10401 case UNORDERED: case ORDERED:
10402 /* We can't do anything if OP0 is a condition code value, rather
10403 than an actual data value. */
10405 || CC0_P (XEXP (op0, 0))
10406 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10409 /* Get the two operands being compared. */
10410 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10411 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10413 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10415 /* Check for the cases where we simply want the result of the
10416 earlier test or the opposite of that result. */
10417 if (code == NE || code == EQ
10418 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10419 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10420 && (STORE_FLAG_VALUE
10421 & (((HOST_WIDE_INT) 1
10422 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10423 && (code == LT || code == GE)))
10425 enum rtx_code new_code;
10426 if (code == LT || code == NE)
10427 new_code = GET_CODE (op0);
10429 new_code = reversed_comparison_code (op0, NULL);
10431 if (new_code != UNKNOWN)
10442 /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
10444 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10445 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10446 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10448 op0 = XEXP (op0, 1);
10449 code = (code == GE ? GT : LE);
10455 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10456 will be converted to a ZERO_EXTRACT later. */
10457 if (const_op == 0 && equality_comparison_p
10458 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10459 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10461 op0 = simplify_and_const_int
10462 (op0, mode, gen_rtx_LSHIFTRT (mode,
10464 XEXP (XEXP (op0, 0), 1)),
10465 (HOST_WIDE_INT) 1);
10469 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10470 zero and X is a comparison and C1 and C2 describe only bits set
10471 in STORE_FLAG_VALUE, we can compare with X. */
10472 if (const_op == 0 && equality_comparison_p
10473 && mode_width <= HOST_BITS_PER_WIDE_INT
10474 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10475 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10476 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10477 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10478 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10480 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10481 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10482 if ((~STORE_FLAG_VALUE & mask) == 0
10483 && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
10484 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10485 && COMPARISON_P (tem))))
10487 op0 = XEXP (XEXP (op0, 0), 0);
10492 /* If we are doing an equality comparison of an AND of a bit equal
10493 to the sign bit, replace this with a LT or GE comparison of
10494 the underlying value. */
10495 if (equality_comparison_p
10497 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10498 && mode_width <= HOST_BITS_PER_WIDE_INT
10499 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10500 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10502 op0 = XEXP (op0, 0);
10503 code = (code == EQ ? GE : LT);
10507 /* If this AND operation is really a ZERO_EXTEND from a narrower
10508 mode, the constant fits within that mode, and this is either an
10509 equality or unsigned comparison, try to do this comparison in
10510 the narrower mode. */
10511 if ((equality_comparison_p || unsigned_comparison_p)
10512 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10513 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10514 & GET_MODE_MASK (mode))
10516 && const_op >> i == 0
10517 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10519 op0 = gen_lowpart (tmode, XEXP (op0, 0));
10523 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
10524 fits in both M1 and M2 and the SUBREG is either paradoxical
10525 or represents the low part, permute the SUBREG and the AND
10527 if (GET_CODE (XEXP (op0, 0)) == SUBREG)
10529 unsigned HOST_WIDE_INT c1;
10530 tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
10531 /* Require an integral mode, to avoid creating something like
10533 if (SCALAR_INT_MODE_P (tmode)
10534 /* It is unsafe to commute the AND into the SUBREG if the
10535 SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
10536 not defined. As originally written the upper bits
10537 have a defined value due to the AND operation.
10538 However, if we commute the AND inside the SUBREG then
10539 they no longer have defined values and the meaning of
10540 the code has been changed. */
10542 #ifdef WORD_REGISTER_OPERATIONS
10543 || (mode_width > GET_MODE_BITSIZE (tmode)
10544 && mode_width <= BITS_PER_WORD)
10546 || (mode_width <= GET_MODE_BITSIZE (tmode)
10547 && subreg_lowpart_p (XEXP (op0, 0))))
10548 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10549 && mode_width <= HOST_BITS_PER_WIDE_INT
10550 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
10551 && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
10552 && (c1 & ~GET_MODE_MASK (tmode)) == 0
10554 && c1 != GET_MODE_MASK (tmode))
10556 op0 = simplify_gen_binary (AND, tmode,
10557 SUBREG_REG (XEXP (op0, 0)),
10558 gen_int_mode (c1, tmode));
10559 op0 = gen_lowpart (mode, op0);
10564 /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */
10565 if (const_op == 0 && equality_comparison_p
10566 && XEXP (op0, 1) == const1_rtx
10567 && GET_CODE (XEXP (op0, 0)) == NOT)
10569 op0 = simplify_and_const_int
10570 (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
10571 code = (code == NE ? EQ : NE);
10575 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10576 (eq (and (lshiftrt X) 1) 0).
10577 Also handle the case where (not X) is expressed using xor. */
10578 if (const_op == 0 && equality_comparison_p
10579 && XEXP (op0, 1) == const1_rtx
10580 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
10582 rtx shift_op = XEXP (XEXP (op0, 0), 0);
10583 rtx shift_count = XEXP (XEXP (op0, 0), 1);
10585 if (GET_CODE (shift_op) == NOT
10586 || (GET_CODE (shift_op) == XOR
10587 && GET_CODE (XEXP (shift_op, 1)) == CONST_INT
10588 && GET_CODE (shift_count) == CONST_INT
10589 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
10590 && (INTVAL (XEXP (shift_op, 1))
10591 == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
10593 op0 = simplify_and_const_int
10595 gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
10596 (HOST_WIDE_INT) 1);
10597 code = (code == NE ? EQ : NE);
10604 /* If we have (compare (ashift FOO N) (const_int C)) and
10605 the high order N bits of FOO (N+1 if an inequality comparison)
10606 are known to be zero, we can do this by comparing FOO with C
10607 shifted right N bits so long as the low-order N bits of C are
10609 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10610 && INTVAL (XEXP (op0, 1)) >= 0
10611 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10612 < HOST_BITS_PER_WIDE_INT)
10614 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10615 && mode_width <= HOST_BITS_PER_WIDE_INT
10616 && (nonzero_bits (XEXP (op0, 0), mode)
10617 & ~(mask >> (INTVAL (XEXP (op0, 1))
10618 + ! equality_comparison_p))) == 0)
10620 /* We must perform a logical shift, not an arithmetic one,
10621 as we want the top N bits of C to be zero. */
10622 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
10624 temp >>= INTVAL (XEXP (op0, 1));
10625 op1 = gen_int_mode (temp, mode);
10626 op0 = XEXP (op0, 0);
10630 /* If we are doing a sign bit comparison, it means we are testing
10631 a particular bit. Convert it to the appropriate AND. */
10632 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10633 && mode_width <= HOST_BITS_PER_WIDE_INT)
10635 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10638 - INTVAL (XEXP (op0, 1)))));
10639 code = (code == LT ? NE : EQ);
10643 /* If this an equality comparison with zero and we are shifting
10644 the low bit to the sign bit, we can convert this to an AND of the
10646 if (const_op == 0 && equality_comparison_p
10647 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10648 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10651 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10652 (HOST_WIDE_INT) 1);
10658 /* If this is an equality comparison with zero, we can do this
10659 as a logical shift, which might be much simpler. */
10660 if (equality_comparison_p && const_op == 0
10661 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10663 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10665 INTVAL (XEXP (op0, 1)));
10669 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10670 do the comparison in a narrower mode. */
10671 if (! unsigned_comparison_p
10672 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10673 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10674 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10675 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10676 MODE_INT, 1)) != BLKmode
10677 && (((unsigned HOST_WIDE_INT) const_op
10678 + (GET_MODE_MASK (tmode) >> 1) + 1)
10679 <= GET_MODE_MASK (tmode)))
10681 op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
10685 /* Likewise if OP0 is a PLUS of a sign extension with a
10686 constant, which is usually represented with the PLUS
10687 between the shifts. */
10688 if (! unsigned_comparison_p
10689 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10690 && GET_CODE (XEXP (op0, 0)) == PLUS
10691 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10692 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10693 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10694 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10695 MODE_INT, 1)) != BLKmode
10696 && (((unsigned HOST_WIDE_INT) const_op
10697 + (GET_MODE_MASK (tmode) >> 1) + 1)
10698 <= GET_MODE_MASK (tmode)))
10700 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10701 rtx add_const = XEXP (XEXP (op0, 0), 1);
10702 rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
10703 add_const, XEXP (op0, 1));
10705 op0 = simplify_gen_binary (PLUS, tmode,
10706 gen_lowpart (tmode, inner),
10711 /* ... fall through ... */
10713 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10714 the low order N bits of FOO are known to be zero, we can do this
10715 by comparing FOO with C shifted left N bits so long as no
10716 overflow occurs. */
10717 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10718 && INTVAL (XEXP (op0, 1)) >= 0
10719 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10720 && mode_width <= HOST_BITS_PER_WIDE_INT
10721 && (nonzero_bits (XEXP (op0, 0), mode)
10722 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10723 && (((unsigned HOST_WIDE_INT) const_op
10724 + (GET_CODE (op0) != LSHIFTRT
10725 ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
10728 <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
10730 /* If the shift was logical, then we must make the condition
10732 if (GET_CODE (op0) == LSHIFTRT)
10733 code = unsigned_condition (code);
10735 const_op <<= INTVAL (XEXP (op0, 1));
10736 op1 = GEN_INT (const_op);
10737 op0 = XEXP (op0, 0);
10741 /* If we are using this shift to extract just the sign bit, we
10742 can replace this with an LT or GE comparison. */
10744 && (equality_comparison_p || sign_bit_comparison_p)
10745 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10746 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10749 op0 = XEXP (op0, 0);
10750 code = (code == NE || code == GT ? LT : GE);
10762 /* Now make any compound operations involved in this comparison. Then,
10763 check for an outmost SUBREG on OP0 that is not doing anything or is
10764 paradoxical. The latter transformation must only be performed when
10765 it is known that the "extra" bits will be the same in op0 and op1 or
10766 that they don't matter. There are three cases to consider:
10768 1. SUBREG_REG (op0) is a register. In this case the bits are don't
10769 care bits and we can assume they have any convenient value. So
10770 making the transformation is safe.
10772 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
10773 In this case the upper bits of op0 are undefined. We should not make
10774 the simplification in that case as we do not know the contents of
10777 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
10778 UNKNOWN. In that case we know those bits are zeros or ones. We must
10779 also be sure that they are the same as the upper bits of op1.
10781 We can never remove a SUBREG for a non-equality comparison because
10782 the sign bit is in a different place in the underlying object. */
10784 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10785 op1 = make_compound_operation (op1, SET);
10787 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10788 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10789 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
10790 && (code == NE || code == EQ))
10792 if (GET_MODE_SIZE (GET_MODE (op0))
10793 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
10795 /* For paradoxical subregs, allow case 1 as above. Case 3 isn't
10797 if (REG_P (SUBREG_REG (op0)))
10799 op0 = SUBREG_REG (op0);
10800 op1 = gen_lowpart (GET_MODE (op0), op1);
10803 else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10804 <= HOST_BITS_PER_WIDE_INT)
10805 && (nonzero_bits (SUBREG_REG (op0),
10806 GET_MODE (SUBREG_REG (op0)))
10807 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10809 tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
10811 if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10812 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10813 op0 = SUBREG_REG (op0), op1 = tem;
10817 /* We now do the opposite procedure: Some machines don't have compare
10818 insns in all modes. If OP0's mode is an integer mode smaller than a
10819 word and we can't do a compare in that mode, see if there is a larger
10820 mode for which we can do the compare. There are a number of cases in
10821 which we can use the wider mode. */
10823 mode = GET_MODE (op0);
10824 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10825 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10826 && ! have_insn_for (COMPARE, mode))
10827 for (tmode = GET_MODE_WIDER_MODE (mode);
10829 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
10830 tmode = GET_MODE_WIDER_MODE (tmode))
10831 if (have_insn_for (COMPARE, tmode))
10835 /* If the only nonzero bits in OP0 and OP1 are those in the
10836 narrower mode and this is an equality or unsigned comparison,
10837 we can use the wider mode. Similarly for sign-extended
10838 values, in which case it is true for all comparisons. */
10839 zero_extended = ((code == EQ || code == NE
10840 || code == GEU || code == GTU
10841 || code == LEU || code == LTU)
10842 && (nonzero_bits (op0, tmode)
10843 & ~GET_MODE_MASK (mode)) == 0
10844 && ((GET_CODE (op1) == CONST_INT
10845 || (nonzero_bits (op1, tmode)
10846 & ~GET_MODE_MASK (mode)) == 0)));
10849 || ((num_sign_bit_copies (op0, tmode)
10850 > (unsigned int) (GET_MODE_BITSIZE (tmode)
10851 - GET_MODE_BITSIZE (mode)))
10852 && (num_sign_bit_copies (op1, tmode)
10853 > (unsigned int) (GET_MODE_BITSIZE (tmode)
10854 - GET_MODE_BITSIZE (mode)))))
10856 /* If OP0 is an AND and we don't have an AND in MODE either,
10857 make a new AND in the proper mode. */
10858 if (GET_CODE (op0) == AND
10859 && !have_insn_for (AND, mode))
10860 op0 = simplify_gen_binary (AND, tmode,
10861 gen_lowpart (tmode,
10863 gen_lowpart (tmode,
10866 op0 = gen_lowpart (tmode, op0);
10867 if (zero_extended && GET_CODE (op1) == CONST_INT)
10868 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
10869 op1 = gen_lowpart (tmode, op1);
10873 /* If this is a test for negative, we can make an explicit
10874 test of the sign bit. */
10876 if (op1 == const0_rtx && (code == LT || code == GE)
10877 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10879 op0 = simplify_gen_binary (AND, tmode,
10880 gen_lowpart (tmode, op0),
10881 GEN_INT ((HOST_WIDE_INT) 1
10882 << (GET_MODE_BITSIZE (mode)
10884 code = (code == LT) ? NE : EQ;
10889 #ifdef CANONICALIZE_COMPARISON
10890 /* If this machine only supports a subset of valid comparisons, see if we
10891 can convert an unsupported one into a supported one. */
10892 CANONICALIZE_COMPARISON (code, op0, op1);
10901 /* Utility function for record_value_for_reg. Count number of
10906 enum rtx_code code = GET_CODE (x);
10910 if (GET_RTX_CLASS (code) == '2'
10911 || GET_RTX_CLASS (code) == 'c')
10913 rtx x0 = XEXP (x, 0);
10914 rtx x1 = XEXP (x, 1);
10917 return 1 + 2 * count_rtxs (x0);
10919 if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
10920 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
10921 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
10922 return 2 + 2 * count_rtxs (x0)
10923 + count_rtxs (x == XEXP (x1, 0)
10924 ? XEXP (x1, 1) : XEXP (x1, 0));
10926 if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
10927 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
10928 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
10929 return 2 + 2 * count_rtxs (x1)
10930 + count_rtxs (x == XEXP (x0, 0)
10931 ? XEXP (x0, 1) : XEXP (x0, 0));
10934 fmt = GET_RTX_FORMAT (code);
10935 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10937 ret += count_rtxs (XEXP (x, i));
10942 /* Utility function for following routine. Called when X is part of a value
10943 being stored into last_set_value. Sets last_set_table_tick
10944 for each register mentioned. Similar to mention_regs in cse.c */
10947 update_table_tick (rtx x)
10949 enum rtx_code code = GET_CODE (x);
10950 const char *fmt = GET_RTX_FORMAT (code);
10955 unsigned int regno = REGNO (x);
10956 unsigned int endregno
10957 = regno + (regno < FIRST_PSEUDO_REGISTER
10958 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
10961 for (r = regno; r < endregno; r++)
10962 reg_stat[r].last_set_table_tick = label_tick;
10967 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10968 /* Note that we can't have an "E" in values stored; see
10969 get_last_value_validate. */
10972 /* Check for identical subexpressions. If x contains
10973 identical subexpression we only have to traverse one of
10975 if (i == 0 && ARITHMETIC_P (x))
10977 /* Note that at this point x1 has already been
10979 rtx x0 = XEXP (x, 0);
10980 rtx x1 = XEXP (x, 1);
10982 /* If x0 and x1 are identical then there is no need to
10987 /* If x0 is identical to a subexpression of x1 then while
10988 processing x1, x0 has already been processed. Thus we
10989 are done with x. */
10990 if (ARITHMETIC_P (x1)
10991 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
10994 /* If x1 is identical to a subexpression of x0 then we
10995 still have to process the rest of x0. */
10996 if (ARITHMETIC_P (x0)
10997 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
10999 update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11004 update_table_tick (XEXP (x, i));
11008 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11009 are saying that the register is clobbered and we no longer know its
11010 value. If INSN is zero, don't update reg_stat[].last_set; this is
11011 only permitted with VALUE also zero and is used to invalidate the
11015 record_value_for_reg (rtx reg, rtx insn, rtx value)
11017 unsigned int regno = REGNO (reg);
11018 unsigned int endregno
11019 = regno + (regno < FIRST_PSEUDO_REGISTER
11020 ? hard_regno_nregs[regno][GET_MODE (reg)] : 1);
11023 /* If VALUE contains REG and we have a previous value for REG, substitute
11024 the previous value. */
11025 if (value && insn && reg_overlap_mentioned_p (reg, value))
11029 /* Set things up so get_last_value is allowed to see anything set up to
11031 subst_low_cuid = INSN_CUID (insn);
11032 tem = get_last_value (reg);
11034 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11035 it isn't going to be useful and will take a lot of time to process,
11036 so just use the CLOBBER. */
11040 if (ARITHMETIC_P (tem)
11041 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11042 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11043 tem = XEXP (tem, 0);
11044 else if (count_occurrences (value, reg, 1) >= 2)
11046 /* If there are two or more occurrences of REG in VALUE,
11047 prevent the value from growing too much. */
11048 if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
11049 tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
11052 value = replace_rtx (copy_rtx (value), reg, tem);
11056 /* For each register modified, show we don't know its value, that
11057 we don't know about its bitwise content, that its value has been
11058 updated, and that we don't know the location of the death of the
11060 for (i = regno; i < endregno; i++)
11063 reg_stat[i].last_set = insn;
11065 reg_stat[i].last_set_value = 0;
11066 reg_stat[i].last_set_mode = 0;
11067 reg_stat[i].last_set_nonzero_bits = 0;
11068 reg_stat[i].last_set_sign_bit_copies = 0;
11069 reg_stat[i].last_death = 0;
11072 /* Mark registers that are being referenced in this value. */
11074 update_table_tick (value);
11076 /* Now update the status of each register being set.
11077 If someone is using this register in this block, set this register
11078 to invalid since we will get confused between the two lives in this
11079 basic block. This makes using this register always invalid. In cse, we
11080 scan the table to invalidate all entries using this register, but this
11081 is too much work for us. */
11083 for (i = regno; i < endregno; i++)
11085 reg_stat[i].last_set_label = label_tick;
11086 if (value && reg_stat[i].last_set_table_tick == label_tick)
11087 reg_stat[i].last_set_invalid = 1;
11089 reg_stat[i].last_set_invalid = 0;
11092 /* The value being assigned might refer to X (like in "x++;"). In that
11093 case, we must replace it with (clobber (const_int 0)) to prevent
11095 if (value && ! get_last_value_validate (&value, insn,
11096 reg_stat[regno].last_set_label, 0))
11098 value = copy_rtx (value);
11099 if (! get_last_value_validate (&value, insn,
11100 reg_stat[regno].last_set_label, 1))
11104 /* For the main register being modified, update the value, the mode, the
11105 nonzero bits, and the number of sign bit copies. */
11107 reg_stat[regno].last_set_value = value;
11111 enum machine_mode mode = GET_MODE (reg);
11112 subst_low_cuid = INSN_CUID (insn);
11113 reg_stat[regno].last_set_mode = mode;
11114 if (GET_MODE_CLASS (mode) == MODE_INT
11115 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11116 mode = nonzero_bits_mode;
11117 reg_stat[regno].last_set_nonzero_bits = nonzero_bits (value, mode);
11118 reg_stat[regno].last_set_sign_bit_copies
11119 = num_sign_bit_copies (value, GET_MODE (reg));
11123 /* Called via note_stores from record_dead_and_set_regs to handle one
11124 SET or CLOBBER in an insn. DATA is the instruction in which the
11125 set is occurring. */
11128 record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data)
11130 rtx record_dead_insn = (rtx) data;
11132 if (GET_CODE (dest) == SUBREG)
11133 dest = SUBREG_REG (dest);
11137 /* If we are setting the whole register, we know its value. Otherwise
11138 show that we don't know the value. We can handle SUBREG in
11140 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11141 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11142 else if (GET_CODE (setter) == SET
11143 && GET_CODE (SET_DEST (setter)) == SUBREG
11144 && SUBREG_REG (SET_DEST (setter)) == dest
11145 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11146 && subreg_lowpart_p (SET_DEST (setter)))
11147 record_value_for_reg (dest, record_dead_insn,
11148 gen_lowpart (GET_MODE (dest),
11149 SET_SRC (setter)));
11151 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11153 else if (MEM_P (dest)
11154 /* Ignore pushes, they clobber nothing. */
11155 && ! push_operand (dest, GET_MODE (dest)))
11156 mem_last_set = INSN_CUID (record_dead_insn);
11159 /* Update the records of when each REG was most recently set or killed
11160 for the things done by INSN. This is the last thing done in processing
11161 INSN in the combiner loop.
11163 We update reg_stat[], in particular fields last_set, last_set_value,
11164 last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11165 last_death, and also the similar information mem_last_set (which insn
11166 most recently modified memory) and last_call_cuid (which insn was the
11167 most recent subroutine call). */
11170 record_dead_and_set_regs (rtx insn)
11175 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11177 if (REG_NOTE_KIND (link) == REG_DEAD
11178 && REG_P (XEXP (link, 0)))
11180 unsigned int regno = REGNO (XEXP (link, 0));
11181 unsigned int endregno
11182 = regno + (regno < FIRST_PSEUDO_REGISTER
11183 ? hard_regno_nregs[regno][GET_MODE (XEXP (link, 0))]
11186 for (i = regno; i < endregno; i++)
11187 reg_stat[i].last_death = insn;
11189 else if (REG_NOTE_KIND (link) == REG_INC)
11190 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11195 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11196 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11198 reg_stat[i].last_set_value = 0;
11199 reg_stat[i].last_set_mode = 0;
11200 reg_stat[i].last_set_nonzero_bits = 0;
11201 reg_stat[i].last_set_sign_bit_copies = 0;
11202 reg_stat[i].last_death = 0;
11205 last_call_cuid = mem_last_set = INSN_CUID (insn);
11207 /* Don't bother recording what this insn does. It might set the
11208 return value register, but we can't combine into a call
11209 pattern anyway, so there's no point trying (and it may cause
11210 a crash, if e.g. we wind up asking for last_set_value of a
11211 SUBREG of the return value register). */
11215 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11218 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11219 register present in the SUBREG, so for each such SUBREG go back and
11220 adjust nonzero and sign bit information of the registers that are
11221 known to have some zero/sign bits set.
11223 This is needed because when combine blows the SUBREGs away, the
11224 information on zero/sign bits is lost and further combines can be
11225 missed because of that. */
11228 record_promoted_value (rtx insn, rtx subreg)
11231 unsigned int regno = REGNO (SUBREG_REG (subreg));
11232 enum machine_mode mode = GET_MODE (subreg);
11234 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11237 for (links = LOG_LINKS (insn); links;)
11239 insn = XEXP (links, 0);
11240 set = single_set (insn);
11242 if (! set || !REG_P (SET_DEST (set))
11243 || REGNO (SET_DEST (set)) != regno
11244 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11246 links = XEXP (links, 1);
11250 if (reg_stat[regno].last_set == insn)
11252 if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11253 reg_stat[regno].last_set_nonzero_bits &= GET_MODE_MASK (mode);
11256 if (REG_P (SET_SRC (set)))
11258 regno = REGNO (SET_SRC (set));
11259 links = LOG_LINKS (insn);
11266 /* Scan X for promoted SUBREGs. For each one found,
11267 note what it implies to the registers used in it. */
11270 check_promoted_subreg (rtx insn, rtx x)
11272 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11273 && REG_P (SUBREG_REG (x)))
11274 record_promoted_value (insn, x);
11277 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11280 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11284 check_promoted_subreg (insn, XEXP (x, i));
11288 if (XVEC (x, i) != 0)
11289 for (j = 0; j < XVECLEN (x, i); j++)
11290 check_promoted_subreg (insn, XVECEXP (x, i, j));
11296 /* Utility routine for the following function. Verify that all the registers
11297 mentioned in *LOC are valid when *LOC was part of a value set when
11298 label_tick == TICK. Return 0 if some are not.
11300 If REPLACE is nonzero, replace the invalid reference with
11301 (clobber (const_int 0)) and return 1. This replacement is useful because
11302 we often can get useful information about the form of a value (e.g., if
11303 it was produced by a shift that always produces -1 or 0) even though
11304 we don't know exactly what registers it was produced from. */
11307 get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
11310 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11311 int len = GET_RTX_LENGTH (GET_CODE (x));
11316 unsigned int regno = REGNO (x);
11317 unsigned int endregno
11318 = regno + (regno < FIRST_PSEUDO_REGISTER
11319 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11322 for (j = regno; j < endregno; j++)
11323 if (reg_stat[j].last_set_invalid
11324 /* If this is a pseudo-register that was only set once and not
11325 live at the beginning of the function, it is always valid. */
11326 || (! (regno >= FIRST_PSEUDO_REGISTER
11327 && REG_N_SETS (regno) == 1
11328 && (! REGNO_REG_SET_P
11329 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
11331 && reg_stat[j].last_set_label > tick))
11334 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11340 /* If this is a memory reference, make sure that there were
11341 no stores after it that might have clobbered the value. We don't
11342 have alias info, so we assume any store invalidates it. */
11343 else if (MEM_P (x) && !MEM_READONLY_P (x)
11344 && INSN_CUID (insn) <= mem_last_set)
11347 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11351 for (i = 0; i < len; i++)
11355 /* Check for identical subexpressions. If x contains
11356 identical subexpression we only have to traverse one of
11358 if (i == 1 && ARITHMETIC_P (x))
11360 /* Note that at this point x0 has already been checked
11361 and found valid. */
11362 rtx x0 = XEXP (x, 0);
11363 rtx x1 = XEXP (x, 1);
11365 /* If x0 and x1 are identical then x is also valid. */
11369 /* If x1 is identical to a subexpression of x0 then
11370 while checking x0, x1 has already been checked. Thus
11371 it is valid and so as x. */
11372 if (ARITHMETIC_P (x0)
11373 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11376 /* If x0 is identical to a subexpression of x1 then x is
11377 valid iff the rest of x1 is valid. */
11378 if (ARITHMETIC_P (x1)
11379 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11381 get_last_value_validate (&XEXP (x1,
11382 x0 == XEXP (x1, 0) ? 1 : 0),
11383 insn, tick, replace);
11386 if (get_last_value_validate (&XEXP (x, i), insn, tick,
11390 /* Don't bother with these. They shouldn't occur anyway. */
11391 else if (fmt[i] == 'E')
11395 /* If we haven't found a reason for it to be invalid, it is valid. */
11399 /* Get the last value assigned to X, if known. Some registers
11400 in the value may be replaced with (clobber (const_int 0)) if their value
11401 is known longer known reliably. */
11404 get_last_value (rtx x)
11406 unsigned int regno;
11409 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11410 then convert it to the desired mode. If this is a paradoxical SUBREG,
11411 we cannot predict what values the "extra" bits might have. */
11412 if (GET_CODE (x) == SUBREG
11413 && subreg_lowpart_p (x)
11414 && (GET_MODE_SIZE (GET_MODE (x))
11415 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11416 && (value = get_last_value (SUBREG_REG (x))) != 0)
11417 return gen_lowpart (GET_MODE (x), value);
11423 value = reg_stat[regno].last_set_value;
11425 /* If we don't have a value, or if it isn't for this basic block and
11426 it's either a hard register, set more than once, or it's a live
11427 at the beginning of the function, return 0.
11429 Because if it's not live at the beginning of the function then the reg
11430 is always set before being used (is never used without being set).
11431 And, if it's set only once, and it's always set before use, then all
11432 uses must have the same last value, even if it's not from this basic
11436 || (reg_stat[regno].last_set_label != label_tick
11437 && (regno < FIRST_PSEUDO_REGISTER
11438 || REG_N_SETS (regno) != 1
11439 || (REGNO_REG_SET_P
11440 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
11444 /* If the value was set in a later insn than the ones we are processing,
11445 we can't use it even if the register was only set once. */
11446 if (INSN_CUID (reg_stat[regno].last_set) >= subst_low_cuid)
11449 /* If the value has all its registers valid, return it. */
11450 if (get_last_value_validate (&value, reg_stat[regno].last_set,
11451 reg_stat[regno].last_set_label, 0))
11454 /* Otherwise, make a copy and replace any invalid register with
11455 (clobber (const_int 0)). If that fails for some reason, return 0. */
11457 value = copy_rtx (value);
11458 if (get_last_value_validate (&value, reg_stat[regno].last_set,
11459 reg_stat[regno].last_set_label, 1))
11465 /* Return nonzero if expression X refers to a REG or to memory
11466 that is set in an instruction more recent than FROM_CUID. */
11469 use_crosses_set_p (rtx x, int from_cuid)
11473 enum rtx_code code = GET_CODE (x);
11477 unsigned int regno = REGNO (x);
11478 unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11479 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11481 #ifdef PUSH_ROUNDING
11482 /* Don't allow uses of the stack pointer to be moved,
11483 because we don't know whether the move crosses a push insn. */
11484 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11487 for (; regno < endreg; regno++)
11488 if (reg_stat[regno].last_set
11489 && INSN_CUID (reg_stat[regno].last_set) > from_cuid)
11494 if (code == MEM && mem_last_set > from_cuid)
11497 fmt = GET_RTX_FORMAT (code);
11499 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11504 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11505 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11508 else if (fmt[i] == 'e'
11509 && use_crosses_set_p (XEXP (x, i), from_cuid))
11515 /* Define three variables used for communication between the following
11518 static unsigned int reg_dead_regno, reg_dead_endregno;
11519 static int reg_dead_flag;
11521 /* Function called via note_stores from reg_dead_at_p.
11523 If DEST is within [reg_dead_regno, reg_dead_endregno), set
11524 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11527 reg_dead_at_p_1 (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
11529 unsigned int regno, endregno;
11534 regno = REGNO (dest);
11535 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11536 ? hard_regno_nregs[regno][GET_MODE (dest)] : 1);
11538 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11539 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11542 /* Return nonzero if REG is known to be dead at INSN.
11544 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11545 referencing REG, it is dead. If we hit a SET referencing REG, it is
11546 live. Otherwise, see if it is live or dead at the start of the basic
11547 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11548 must be assumed to be always live. */
11551 reg_dead_at_p (rtx reg, rtx insn)
11556 /* Set variables for reg_dead_at_p_1. */
11557 reg_dead_regno = REGNO (reg);
11558 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11559 ? hard_regno_nregs[reg_dead_regno]
11565 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. For fixed registers
11566 we allow the machine description to decide whether use-and-clobber
11567 patterns are OK. */
11568 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11570 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11571 if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
11575 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11576 beginning of function. */
11577 for (; insn && !LABEL_P (insn) && !BARRIER_P (insn);
11578 insn = prev_nonnote_insn (insn))
11580 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11582 return reg_dead_flag == 1 ? 1 : 0;
11584 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11588 /* Get the basic block that we were in. */
11590 block = ENTRY_BLOCK_PTR->next_bb;
11593 FOR_EACH_BB (block)
11594 if (insn == BB_HEAD (block))
11597 if (block == EXIT_BLOCK_PTR)
11601 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11602 if (REGNO_REG_SET_P (block->il.rtl->global_live_at_start, i))
11608 /* Note hard registers in X that are used. This code is similar to
11609 that in flow.c, but much simpler since we don't care about pseudos. */
11612 mark_used_regs_combine (rtx x)
11614 RTX_CODE code = GET_CODE (x);
11615 unsigned int regno;
11628 case ADDR_DIFF_VEC:
11631 /* CC0 must die in the insn after it is set, so we don't need to take
11632 special note of it here. */
11638 /* If we are clobbering a MEM, mark any hard registers inside the
11639 address as used. */
11640 if (MEM_P (XEXP (x, 0)))
11641 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11646 /* A hard reg in a wide mode may really be multiple registers.
11647 If so, mark all of them just like the first. */
11648 if (regno < FIRST_PSEUDO_REGISTER)
11650 unsigned int endregno, r;
11652 /* None of this applies to the stack, frame or arg pointers. */
11653 if (regno == STACK_POINTER_REGNUM
11654 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11655 || regno == HARD_FRAME_POINTER_REGNUM
11657 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11658 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11660 || regno == FRAME_POINTER_REGNUM)
11663 endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
11664 for (r = regno; r < endregno; r++)
11665 SET_HARD_REG_BIT (newpat_used_regs, r);
11671 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11673 rtx testreg = SET_DEST (x);
11675 while (GET_CODE (testreg) == SUBREG
11676 || GET_CODE (testreg) == ZERO_EXTRACT
11677 || GET_CODE (testreg) == STRICT_LOW_PART)
11678 testreg = XEXP (testreg, 0);
11680 if (MEM_P (testreg))
11681 mark_used_regs_combine (XEXP (testreg, 0));
11683 mark_used_regs_combine (SET_SRC (x));
11691 /* Recursively scan the operands of this expression. */
11694 const char *fmt = GET_RTX_FORMAT (code);
11696 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11699 mark_used_regs_combine (XEXP (x, i));
11700 else if (fmt[i] == 'E')
11704 for (j = 0; j < XVECLEN (x, i); j++)
11705 mark_used_regs_combine (XVECEXP (x, i, j));
11711 /* Remove register number REGNO from the dead registers list of INSN.
11713 Return the note used to record the death, if there was one. */
11716 remove_death (unsigned int regno, rtx insn)
11718 rtx note = find_regno_note (insn, REG_DEAD, regno);
11722 REG_N_DEATHS (regno)--;
11723 remove_note (insn, note);
11729 /* For each register (hardware or pseudo) used within expression X, if its
11730 death is in an instruction with cuid between FROM_CUID (inclusive) and
11731 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11732 list headed by PNOTES.
11734 That said, don't move registers killed by maybe_kill_insn.
11736 This is done when X is being merged by combination into TO_INSN. These
11737 notes will then be distributed as needed. */
11740 move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn,
11745 enum rtx_code code = GET_CODE (x);
11749 unsigned int regno = REGNO (x);
11750 rtx where_dead = reg_stat[regno].last_death;
11751 rtx before_dead, after_dead;
11753 /* Don't move the register if it gets killed in between from and to. */
11754 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11755 && ! reg_referenced_p (x, maybe_kill_insn))
11758 /* WHERE_DEAD could be a USE insn made by combine, so first we
11759 make sure that we have insns with valid INSN_CUID values. */
11760 before_dead = where_dead;
11761 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11762 before_dead = PREV_INSN (before_dead);
11764 after_dead = where_dead;
11765 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11766 after_dead = NEXT_INSN (after_dead);
11768 if (before_dead && after_dead
11769 && INSN_CUID (before_dead) >= from_cuid
11770 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11771 || (where_dead != after_dead
11772 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11774 rtx note = remove_death (regno, where_dead);
11776 /* It is possible for the call above to return 0. This can occur
11777 when last_death points to I2 or I1 that we combined with.
11778 In that case make a new note.
11780 We must also check for the case where X is a hard register
11781 and NOTE is a death note for a range of hard registers
11782 including X. In that case, we must put REG_DEAD notes for
11783 the remaining registers in place of NOTE. */
11785 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11786 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11787 > GET_MODE_SIZE (GET_MODE (x))))
11789 unsigned int deadregno = REGNO (XEXP (note, 0));
11790 unsigned int deadend
11791 = (deadregno + hard_regno_nregs[deadregno]
11792 [GET_MODE (XEXP (note, 0))]);
11793 unsigned int ourend
11794 = regno + hard_regno_nregs[regno][GET_MODE (x)];
11797 for (i = deadregno; i < deadend; i++)
11798 if (i < regno || i >= ourend)
11799 REG_NOTES (where_dead)
11800 = gen_rtx_EXPR_LIST (REG_DEAD,
11802 REG_NOTES (where_dead));
11805 /* If we didn't find any note, or if we found a REG_DEAD note that
11806 covers only part of the given reg, and we have a multi-reg hard
11807 register, then to be safe we must check for REG_DEAD notes
11808 for each register other than the first. They could have
11809 their own REG_DEAD notes lying around. */
11810 else if ((note == 0
11812 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11813 < GET_MODE_SIZE (GET_MODE (x)))))
11814 && regno < FIRST_PSEUDO_REGISTER
11815 && hard_regno_nregs[regno][GET_MODE (x)] > 1)
11817 unsigned int ourend
11818 = regno + hard_regno_nregs[regno][GET_MODE (x)];
11819 unsigned int i, offset;
11823 offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
11827 for (i = regno + offset; i < ourend; i++)
11828 move_deaths (regno_reg_rtx[i],
11829 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11832 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11834 XEXP (note, 1) = *pnotes;
11838 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11840 REG_N_DEATHS (regno)++;
11846 else if (GET_CODE (x) == SET)
11848 rtx dest = SET_DEST (x);
11850 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11852 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11853 that accesses one word of a multi-word item, some
11854 piece of everything register in the expression is used by
11855 this insn, so remove any old death. */
11856 /* ??? So why do we test for equality of the sizes? */
11858 if (GET_CODE (dest) == ZERO_EXTRACT
11859 || GET_CODE (dest) == STRICT_LOW_PART
11860 || (GET_CODE (dest) == SUBREG
11861 && (((GET_MODE_SIZE (GET_MODE (dest))
11862 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11863 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11864 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11866 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11870 /* If this is some other SUBREG, we know it replaces the entire
11871 value, so use that as the destination. */
11872 if (GET_CODE (dest) == SUBREG)
11873 dest = SUBREG_REG (dest);
11875 /* If this is a MEM, adjust deaths of anything used in the address.
11876 For a REG (the only other possibility), the entire value is
11877 being replaced so the old value is not used in this insn. */
11880 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11885 else if (GET_CODE (x) == CLOBBER)
11888 len = GET_RTX_LENGTH (code);
11889 fmt = GET_RTX_FORMAT (code);
11891 for (i = 0; i < len; i++)
11896 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11897 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11900 else if (fmt[i] == 'e')
11901 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
11905 /* Return 1 if X is the target of a bit-field assignment in BODY, the
11906 pattern of an insn. X must be a REG. */
11909 reg_bitfield_target_p (rtx x, rtx body)
11913 if (GET_CODE (body) == SET)
11915 rtx dest = SET_DEST (body);
11917 unsigned int regno, tregno, endregno, endtregno;
11919 if (GET_CODE (dest) == ZERO_EXTRACT)
11920 target = XEXP (dest, 0);
11921 else if (GET_CODE (dest) == STRICT_LOW_PART)
11922 target = SUBREG_REG (XEXP (dest, 0));
11926 if (GET_CODE (target) == SUBREG)
11927 target = SUBREG_REG (target);
11929 if (!REG_P (target))
11932 tregno = REGNO (target), regno = REGNO (x);
11933 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11934 return target == x;
11936 endtregno = tregno + hard_regno_nregs[tregno][GET_MODE (target)];
11937 endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
11939 return endregno > tregno && regno < endtregno;
11942 else if (GET_CODE (body) == PARALLEL)
11943 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
11944 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
11950 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11951 as appropriate. I3 and I2 are the insns resulting from the combination
11952 insns including FROM (I2 may be zero).
11954 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11955 not need REG_DEAD notes because they are being substituted for. This
11956 saves searching in the most common cases.
11958 Each note in the list is either ignored or placed on some insns, depending
11959 on the type of note. */
11962 distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
11965 rtx note, next_note;
11968 for (note = notes; note; note = next_note)
11970 rtx place = 0, place2 = 0;
11972 /* If this NOTE references a pseudo register, ensure it references
11973 the latest copy of that register. */
11974 if (XEXP (note, 0) && REG_P (XEXP (note, 0))
11975 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11976 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11978 next_note = XEXP (note, 1);
11979 switch (REG_NOTE_KIND (note))
11983 /* Doesn't matter much where we put this, as long as it's somewhere.
11984 It is preferable to keep these notes on branches, which is most
11985 likely to be i3. */
11989 case REG_VALUE_PROFILE:
11990 /* Just get rid of this note, as it is unused later anyway. */
11993 case REG_NON_LOCAL_GOTO:
11998 gcc_assert (i2 && JUMP_P (i2));
12003 case REG_EH_REGION:
12004 /* These notes must remain with the call or trapping instruction. */
12007 else if (i2 && CALL_P (i2))
12011 gcc_assert (flag_non_call_exceptions);
12012 if (may_trap_p (i3))
12014 else if (i2 && may_trap_p (i2))
12016 /* ??? Otherwise assume we've combined things such that we
12017 can now prove that the instructions can't trap. Drop the
12018 note in this case. */
12024 /* These notes must remain with the call. It should not be
12025 possible for both I2 and I3 to be a call. */
12030 gcc_assert (i2 && CALL_P (i2));
12036 /* Any clobbers for i3 may still exist, and so we must process
12037 REG_UNUSED notes from that insn.
12039 Any clobbers from i2 or i1 can only exist if they were added by
12040 recog_for_combine. In that case, recog_for_combine created the
12041 necessary REG_UNUSED notes. Trying to keep any original
12042 REG_UNUSED notes from these insns can cause incorrect output
12043 if it is for the same register as the original i3 dest.
12044 In that case, we will notice that the register is set in i3,
12045 and then add a REG_UNUSED note for the destination of i3, which
12046 is wrong. However, it is possible to have REG_UNUSED notes from
12047 i2 or i1 for register which were both used and clobbered, so
12048 we keep notes from i2 or i1 if they will turn into REG_DEAD
12051 /* If this register is set or clobbered in I3, put the note there
12052 unless there is one already. */
12053 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12055 if (from_insn != i3)
12058 if (! (REG_P (XEXP (note, 0))
12059 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12060 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12063 /* Otherwise, if this register is used by I3, then this register
12064 now dies here, so we must put a REG_DEAD note here unless there
12066 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12067 && ! (REG_P (XEXP (note, 0))
12068 ? find_regno_note (i3, REG_DEAD,
12069 REGNO (XEXP (note, 0)))
12070 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12072 PUT_REG_NOTE_KIND (note, REG_DEAD);
12080 /* These notes say something about results of an insn. We can
12081 only support them if they used to be on I3 in which case they
12082 remain on I3. Otherwise they are ignored.
12084 If the note refers to an expression that is not a constant, we
12085 must also ignore the note since we cannot tell whether the
12086 equivalence is still true. It might be possible to do
12087 slightly better than this (we only have a problem if I2DEST
12088 or I1DEST is present in the expression), but it doesn't
12089 seem worth the trouble. */
12091 if (from_insn == i3
12092 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12097 case REG_NO_CONFLICT:
12098 /* These notes say something about how a register is used. They must
12099 be present on any use of the register in I2 or I3. */
12100 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12103 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12113 /* This can show up in several ways -- either directly in the
12114 pattern, or hidden off in the constant pool with (or without?)
12115 a REG_EQUAL note. */
12116 /* ??? Ignore the without-reg_equal-note problem for now. */
12117 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12118 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12119 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12120 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12124 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12125 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12126 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12127 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12135 /* Don't attach REG_LABEL note to a JUMP_INSN. Add
12136 a JUMP_LABEL instead or decrement LABEL_NUSES. */
12137 if (place && JUMP_P (place))
12139 rtx label = JUMP_LABEL (place);
12142 JUMP_LABEL (place) = XEXP (note, 0);
12145 gcc_assert (label == XEXP (note, 0));
12146 if (LABEL_P (label))
12147 LABEL_NUSES (label)--;
12151 if (place2 && JUMP_P (place2))
12153 rtx label = JUMP_LABEL (place2);
12156 JUMP_LABEL (place2) = XEXP (note, 0);
12159 gcc_assert (label == XEXP (note, 0));
12160 if (LABEL_P (label))
12161 LABEL_NUSES (label)--;
12168 /* This note says something about the value of a register prior
12169 to the execution of an insn. It is too much trouble to see
12170 if the note is still correct in all situations. It is better
12171 to simply delete it. */
12175 /* If the insn previously containing this note still exists,
12176 put it back where it was. Otherwise move it to the previous
12177 insn. Adjust the corresponding REG_LIBCALL note. */
12178 if (!NOTE_P (from_insn))
12182 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12183 place = prev_real_insn (from_insn);
12185 XEXP (tem, 0) = place;
12186 /* If we're deleting the last remaining instruction of a
12187 libcall sequence, don't add the notes. */
12188 else if (XEXP (note, 0) == from_insn)
12190 /* Don't add the dangling REG_RETVAL note. */
12197 /* This is handled similarly to REG_RETVAL. */
12198 if (!NOTE_P (from_insn))
12202 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12203 place = next_real_insn (from_insn);
12205 XEXP (tem, 0) = place;
12206 /* If we're deleting the last remaining instruction of a
12207 libcall sequence, don't add the notes. */
12208 else if (XEXP (note, 0) == from_insn)
12210 /* Don't add the dangling REG_LIBCALL note. */
12217 /* If the register is used as an input in I3, it dies there.
12218 Similarly for I2, if it is nonzero and adjacent to I3.
12220 If the register is not used as an input in either I3 or I2
12221 and it is not one of the registers we were supposed to eliminate,
12222 there are two possibilities. We might have a non-adjacent I2
12223 or we might have somehow eliminated an additional register
12224 from a computation. For example, we might have had A & B where
12225 we discover that B will always be zero. In this case we will
12226 eliminate the reference to A.
12228 In both cases, we must search to see if we can find a previous
12229 use of A and put the death note there. */
12232 && CALL_P (from_insn)
12233 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12235 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12237 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12238 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12242 && (rtx_equal_p (XEXP (note, 0), elim_i2)
12243 || rtx_equal_p (XEXP (note, 0), elim_i1)))
12248 basic_block bb = this_basic_block;
12250 /* You might think you could search back from FROM_INSN
12251 rather than from I3, but combine tries to split invalid
12252 combined instructions. This can result in the old I2
12253 or I1 moving later in the insn sequence. */
12254 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12256 if (! INSN_P (tem))
12258 if (tem == BB_HEAD (bb))
12263 /* If the register is being set at TEM, see if that is all
12264 TEM is doing. If so, delete TEM. Otherwise, make this
12265 into a REG_UNUSED note instead. Don't delete sets to
12266 global register vars. */
12267 if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12268 || !global_regs[REGNO (XEXP (note, 0))])
12269 && reg_set_p (XEXP (note, 0), PATTERN (tem)))
12271 rtx set = single_set (tem);
12272 rtx inner_dest = 0;
12274 rtx cc0_setter = NULL_RTX;
12278 for (inner_dest = SET_DEST (set);
12279 (GET_CODE (inner_dest) == STRICT_LOW_PART
12280 || GET_CODE (inner_dest) == SUBREG
12281 || GET_CODE (inner_dest) == ZERO_EXTRACT);
12282 inner_dest = XEXP (inner_dest, 0))
12285 /* Verify that it was the set, and not a clobber that
12286 modified the register.
12288 CC0 targets must be careful to maintain setter/user
12289 pairs. If we cannot delete the setter due to side
12290 effects, mark the user with an UNUSED note instead
12293 if (set != 0 && ! side_effects_p (SET_SRC (set))
12294 && rtx_equal_p (XEXP (note, 0), inner_dest)
12296 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12297 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12298 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12302 /* Move the notes and links of TEM elsewhere.
12303 This might delete other dead insns recursively.
12304 First set the pattern to something that won't use
12306 rtx old_notes = REG_NOTES (tem);
12308 PATTERN (tem) = pc_rtx;
12309 REG_NOTES (tem) = NULL;
12311 distribute_notes (old_notes, tem, tem, NULL_RTX,
12312 NULL_RTX, NULL_RTX);
12313 distribute_links (LOG_LINKS (tem));
12315 SET_INSN_DELETED (tem);
12318 /* Delete the setter too. */
12321 PATTERN (cc0_setter) = pc_rtx;
12322 old_notes = REG_NOTES (cc0_setter);
12323 REG_NOTES (cc0_setter) = NULL;
12325 distribute_notes (old_notes, cc0_setter,
12326 cc0_setter, NULL_RTX,
12327 NULL_RTX, NULL_RTX);
12328 distribute_links (LOG_LINKS (cc0_setter));
12330 SET_INSN_DELETED (cc0_setter);
12336 PUT_REG_NOTE_KIND (note, REG_UNUSED);
12338 /* If there isn't already a REG_UNUSED note, put one
12339 here. Do not place a REG_DEAD note, even if
12340 the register is also used here; that would not
12341 match the algorithm used in lifetime analysis
12342 and can cause the consistency check in the
12343 scheduler to fail. */
12344 if (! find_regno_note (tem, REG_UNUSED,
12345 REGNO (XEXP (note, 0))))
12350 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12352 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12354 /* This may not be the correct place for the death
12355 note if FROM_INSN is before TEM, and the reg is
12356 set between FROM_INSN and TEM. The reg might
12357 die two or more times. An existing death note
12358 means we are looking at the wrong live range. */
12360 && INSN_CUID (from_insn) < INSN_CUID (tem)
12361 && find_regno_note (tem, REG_DEAD,
12362 REGNO (XEXP (note, 0))))
12365 if (tem == BB_HEAD (bb))
12372 /* If we are doing a 3->2 combination, and we have a
12373 register which formerly died in i3 and was not used
12374 by i2, which now no longer dies in i3 and is used in
12375 i2 but does not die in i2, and place is between i2
12376 and i3, then we may need to move a link from place to
12378 if (i2 && INSN_UID (place) <= max_uid_cuid
12379 && INSN_CUID (place) > INSN_CUID (i2)
12381 && INSN_CUID (from_insn) > INSN_CUID (i2)
12382 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12384 rtx links = LOG_LINKS (place);
12385 LOG_LINKS (place) = 0;
12386 distribute_links (links);
12391 if (tem == BB_HEAD (bb))
12395 /* We haven't found an insn for the death note and it
12396 is still a REG_DEAD note, but we have hit the beginning
12397 of the block. If the existing life info says the reg
12398 was dead, there's nothing left to do. Otherwise, we'll
12399 need to do a global life update after combine. */
12400 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12401 && REGNO_REG_SET_P (bb->il.rtl->global_live_at_start,
12402 REGNO (XEXP (note, 0))))
12403 SET_BIT (refresh_blocks, this_basic_block->index);
12406 /* If the register is set or already dead at PLACE, we needn't do
12407 anything with this note if it is still a REG_DEAD note.
12408 We check here if it is set at all, not if is it totally replaced,
12409 which is what `dead_or_set_p' checks, so also check for it being
12412 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12414 unsigned int regno = REGNO (XEXP (note, 0));
12416 /* Similarly, if the instruction on which we want to place
12417 the note is a noop, we'll need do a global live update
12418 after we remove them in delete_noop_moves. */
12419 if (noop_move_p (place))
12420 SET_BIT (refresh_blocks, this_basic_block->index);
12422 if (dead_or_set_p (place, XEXP (note, 0))
12423 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12425 /* Unless the register previously died in PLACE, clear
12426 last_death. [I no longer understand why this is
12428 if (reg_stat[regno].last_death != place)
12429 reg_stat[regno].last_death = 0;
12433 reg_stat[regno].last_death = place;
12435 /* If this is a death note for a hard reg that is occupying
12436 multiple registers, ensure that we are still using all
12437 parts of the object. If we find a piece of the object
12438 that is unused, we must arrange for an appropriate REG_DEAD
12439 note to be added for it. However, we can't just emit a USE
12440 and tag the note to it, since the register might actually
12441 be dead; so we recourse, and the recursive call then finds
12442 the previous insn that used this register. */
12444 if (place && regno < FIRST_PSEUDO_REGISTER
12445 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
12447 unsigned int endregno
12448 = regno + hard_regno_nregs[regno]
12449 [GET_MODE (XEXP (note, 0))];
12453 for (i = regno; i < endregno; i++)
12454 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12455 && ! find_regno_fusage (place, USE, i))
12456 || dead_or_set_regno_p (place, i))
12461 /* Put only REG_DEAD notes for pieces that are
12462 not already dead or set. */
12464 for (i = regno; i < endregno;
12465 i += hard_regno_nregs[i][reg_raw_mode[i]])
12467 rtx piece = regno_reg_rtx[i];
12468 basic_block bb = this_basic_block;
12470 if (! dead_or_set_p (place, piece)
12471 && ! reg_bitfield_target_p (piece,
12475 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12477 distribute_notes (new_note, place, place,
12478 NULL_RTX, NULL_RTX, NULL_RTX);
12480 else if (! refers_to_regno_p (i, i + 1,
12481 PATTERN (place), 0)
12482 && ! find_regno_fusage (place, USE, i))
12483 for (tem = PREV_INSN (place); ;
12484 tem = PREV_INSN (tem))
12486 if (! INSN_P (tem))
12488 if (tem == BB_HEAD (bb))
12490 SET_BIT (refresh_blocks,
12491 this_basic_block->index);
12496 if (dead_or_set_p (tem, piece)
12497 || reg_bitfield_target_p (piece,
12501 = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12516 /* Any other notes should not be present at this point in the
12518 gcc_unreachable ();
12523 XEXP (note, 1) = REG_NOTES (place);
12524 REG_NOTES (place) = note;
12526 else if ((REG_NOTE_KIND (note) == REG_DEAD
12527 || REG_NOTE_KIND (note) == REG_UNUSED)
12528 && REG_P (XEXP (note, 0)))
12529 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12533 if ((REG_NOTE_KIND (note) == REG_DEAD
12534 || REG_NOTE_KIND (note) == REG_UNUSED)
12535 && REG_P (XEXP (note, 0)))
12536 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12538 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12539 REG_NOTE_KIND (note),
12541 REG_NOTES (place2));
12546 /* Similarly to above, distribute the LOG_LINKS that used to be present on
12547 I3, I2, and I1 to new locations. This is also called to add a link
12548 pointing at I3 when I3's destination is changed. */
12551 distribute_links (rtx links)
12553 rtx link, next_link;
12555 for (link = links; link; link = next_link)
12561 next_link = XEXP (link, 1);
12563 /* If the insn that this link points to is a NOTE or isn't a single
12564 set, ignore it. In the latter case, it isn't clear what we
12565 can do other than ignore the link, since we can't tell which
12566 register it was for. Such links wouldn't be used by combine
12569 It is not possible for the destination of the target of the link to
12570 have been changed by combine. The only potential of this is if we
12571 replace I3, I2, and I1 by I3 and I2. But in that case the
12572 destination of I2 also remains unchanged. */
12574 if (NOTE_P (XEXP (link, 0))
12575 || (set = single_set (XEXP (link, 0))) == 0)
12578 reg = SET_DEST (set);
12579 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12580 || GET_CODE (reg) == STRICT_LOW_PART)
12581 reg = XEXP (reg, 0);
12583 /* A LOG_LINK is defined as being placed on the first insn that uses
12584 a register and points to the insn that sets the register. Start
12585 searching at the next insn after the target of the link and stop
12586 when we reach a set of the register or the end of the basic block.
12588 Note that this correctly handles the link that used to point from
12589 I3 to I2. Also note that not much searching is typically done here
12590 since most links don't point very far away. */
12592 for (insn = NEXT_INSN (XEXP (link, 0));
12593 (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
12594 || BB_HEAD (this_basic_block->next_bb) != insn));
12595 insn = NEXT_INSN (insn))
12596 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12598 if (reg_referenced_p (reg, PATTERN (insn)))
12602 else if (CALL_P (insn)
12603 && find_reg_fusage (insn, USE, reg))
12608 else if (INSN_P (insn) && reg_set_p (reg, insn))
12611 /* If we found a place to put the link, place it there unless there
12612 is already a link to the same insn as LINK at that point. */
12618 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12619 if (XEXP (link2, 0) == XEXP (link, 0))
12624 XEXP (link, 1) = LOG_LINKS (place);
12625 LOG_LINKS (place) = link;
12627 /* Set added_links_insn to the earliest insn we added a
12629 if (added_links_insn == 0
12630 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12631 added_links_insn = place;
12637 /* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
12638 Check whether the expression pointer to by LOC is a register or
12639 memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
12640 Otherwise return zero. */
12643 unmentioned_reg_p_1 (rtx *loc, void *expr)
12648 && (REG_P (x) || MEM_P (x))
12649 && ! reg_mentioned_p (x, (rtx) expr))
12654 /* Check for any register or memory mentioned in EQUIV that is not
12655 mentioned in EXPR. This is used to restrict EQUIV to "specializations"
12656 of EXPR where some registers may have been replaced by constants. */
12659 unmentioned_reg_p (rtx equiv, rtx expr)
12661 return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
12664 /* Compute INSN_CUID for INSN, which is an insn made by combine. */
12667 insn_cuid (rtx insn)
12669 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12670 && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE)
12671 insn = NEXT_INSN (insn);
12673 gcc_assert (INSN_UID (insn) <= max_uid_cuid);
12675 return INSN_CUID (insn);
12679 dump_combine_stats (FILE *file)
12683 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12684 combine_attempts, combine_merges, combine_extras, combine_successes);
12688 dump_combine_total_stats (FILE *file)
12692 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12693 total_attempts, total_merges, total_extras, total_successes);
12698 gate_handle_combine (void)
12700 return (optimize > 0);
12703 /* Try combining insns through substitution. */
12705 rest_of_handle_combine (void)
12707 int rebuild_jump_labels_after_combine
12708 = combine_instructions (get_insns (), max_reg_num ());
12710 /* Combining insns may have turned an indirect jump into a
12711 direct jump. Rebuild the JUMP_LABEL fields of jumping
12713 if (rebuild_jump_labels_after_combine)
12715 timevar_push (TV_JUMP);
12716 rebuild_jump_labels (get_insns ());
12717 timevar_pop (TV_JUMP);
12719 delete_dead_jumptables ();
12720 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
12724 struct tree_opt_pass pass_combine =
12726 "combine", /* name */
12727 gate_handle_combine, /* gate */
12728 rest_of_handle_combine, /* execute */
12731 0, /* static_pass_number */
12732 TV_COMBINE, /* tv_id */
12733 0, /* properties_required */
12734 0, /* properties_provided */
12735 0, /* properties_destroyed */
12736 0, /* todo_flags_start */
12738 TODO_ggc_collect, /* todo_flags_finish */