1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* stdio.h must precede rtl.h for FFS. */
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "insn-config.h"
47 /* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
52 It is too complicated to keep track of the different possibilities
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
62 We use two data structures to record the equivalent expressions:
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
75 Registers and "quantity numbers":
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
86 All real quantity numbers are greater than or equal to zero.
87 If register N has not been assigned a quantity, reg_qty[N] will
88 equal -N - 1, which is always negative.
90 Quantity numbers below zero do not exist and none of the `qty_table'
91 entries should be referenced with a negative index.
93 We also maintain a bidirectional chain of registers for each
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
101 If two registers have the same quantity number, it must be true that
102 REG expressions with qty_table `mode' must be in the hash table for both
103 registers and must be in the same class.
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
110 Constants and quantity numbers
112 When a quantity has a known constant value, that value is stored
113 in the appropriate qty_table `const_rtx'. This is in addition to
114 putting the constant in the hash table as is usual for non-regs.
116 Whether a reg or a constant is preferred is determined by the configuration
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
120 When a quantity has a known nearly constant value (such as an address
121 of a stack slot), that value is stored in the appropriate qty_table
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
141 Register references in an expression are canonicalized before hashing
142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
175 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
176 reg_tick[i] is incremented whenever a value is stored in register i.
177 reg_in_table[i] holds -1 if no references to register i have been
178 entered in the table; otherwise, it contains the value reg_tick[i] had
179 when the references were entered. If we want to enter a reference
180 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
181 Until we want to enter a new entry, the mere fact that the two vectors
182 don't match makes the entries be ignored if anyone tries to match them.
184 Registers themselves are entered in the hash table as well as in
185 the equivalent-register chains. However, the vectors `reg_tick'
186 and `reg_in_table' do not apply to expressions which are simple
187 register references. These expressions are removed from the table
188 immediately when they become invalid, and this can be done even if
189 we do not immediately search for all the expressions that refer to
192 A CLOBBER rtx in an instruction invalidates its operand for further
193 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
194 invalidates everything that resides in memory.
198 Constant expressions that differ only by an additive integer
199 are called related. When a constant expression is put in
200 the table, the related expression with no constant term
201 is also entered. These are made to point at each other
202 so that it is possible to find out if there exists any
203 register equivalent to an expression related to a given expression. */
205 /* One plus largest register number used in this function. */
209 /* One plus largest instruction UID used in this function at time of
212 static int max_insn_uid;
214 /* Length of qty_table vector. We know in advance we will not need
215 a quantity number this big. */
219 /* Next quantity number to be allocated.
220 This is 1 + the largest number needed so far. */
224 /* Per-qty information tracking.
226 `first_reg' and `last_reg' track the head and tail of the
227 chain of registers which currently contain this quantity.
229 `mode' contains the machine mode of this quantity.
231 `const_rtx' holds the rtx of the constant value of this
232 quantity, if known. A summations of the frame/arg pointer
233 and a constant can also be entered here. When this holds
234 a known value, `const_insn' is the insn which stored the
237 `comparison_{code,const,qty}' are used to track when a
238 comparison between a quantity and some constant or register has
239 been passed. In such a case, we know the results of the comparison
240 in case we see it again. These members record a comparison that
241 is known to be true. `comparison_code' holds the rtx code of such
242 a comparison, else it is set to UNKNOWN and the other two
243 comparison members are undefined. `comparison_const' holds
244 the constant being compared against, or zero if the comparison
245 is not against a constant. `comparison_qty' holds the quantity
246 being compared against when the result is known. If the comparison
247 is not with a register, `comparison_qty' is -1. */
249 struct qty_table_elem
253 rtx comparison_const;
255 unsigned int first_reg, last_reg;
256 /* The sizes of these fields should match the sizes of the
257 code and mode fields of struct rtx_def (see rtl.h). */
258 ENUM_BITFIELD(rtx_code) comparison_code : 16;
259 ENUM_BITFIELD(machine_mode) mode : 8;
262 /* The table of all qtys, indexed by qty number. */
263 static struct qty_table_elem *qty_table;
266 /* For machines that have a CC0, we do not record its value in the hash
267 table since its use is guaranteed to be the insn immediately following
268 its definition and any other insn is presumed to invalidate it.
270 Instead, we store below the value last assigned to CC0. If it should
271 happen to be a constant, it is stored in preference to the actual
272 assigned value. In case it is a constant, we store the mode in which
273 the constant should be interpreted. */
275 static rtx prev_insn_cc0;
276 static enum machine_mode prev_insn_cc0_mode;
278 /* Previous actual insn. 0 if at first insn of basic block. */
280 static rtx prev_insn;
283 /* Insn being scanned. */
285 static rtx this_insn;
287 /* Index by register number, gives the number of the next (or
288 previous) register in the chain of registers sharing the same
291 Or -1 if this register is at the end of the chain.
293 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
295 /* Per-register equivalence chain. */
301 /* The table of all register equivalence chains. */
302 static struct reg_eqv_elem *reg_eqv_table;
306 /* Next in hash chain. */
307 struct cse_reg_info *hash_next;
309 /* The next cse_reg_info structure in the free or used list. */
310 struct cse_reg_info *next;
315 /* The quantity number of the register's current contents. */
318 /* The number of times the register has been altered in the current
322 /* The REG_TICK value at which rtx's containing this register are
323 valid in the hash table. If this does not equal the current
324 reg_tick value, such expressions existing in the hash table are
328 /* The SUBREG that was set when REG_TICK was last incremented. Set
329 to -1 if the last store was to the whole register, not a subreg. */
330 unsigned int subreg_ticked;
333 /* A free list of cse_reg_info entries. */
334 static struct cse_reg_info *cse_reg_info_free_list;
336 /* A used list of cse_reg_info entries. */
337 static struct cse_reg_info *cse_reg_info_used_list;
338 static struct cse_reg_info *cse_reg_info_used_list_end;
340 /* A mapping from registers to cse_reg_info data structures. */
341 #define REGHASH_SHIFT 7
342 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
343 #define REGHASH_MASK (REGHASH_SIZE - 1)
344 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
346 #define REGHASH_FN(REGNO) \
347 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
349 /* The last lookup we did into the cse_reg_info_tree. This allows us
350 to cache repeated lookups. */
351 static unsigned int cached_regno;
352 static struct cse_reg_info *cached_cse_reg_info;
354 /* A HARD_REG_SET containing all the hard registers for which there is
355 currently a REG expression in the hash table. Note the difference
356 from the above variables, which indicate if the REG is mentioned in some
357 expression in the table. */
359 static HARD_REG_SET hard_regs_in_table;
361 /* CUID of insn that starts the basic block currently being cse-processed. */
363 static int cse_basic_block_start;
365 /* CUID of insn that ends the basic block currently being cse-processed. */
367 static int cse_basic_block_end;
369 /* Vector mapping INSN_UIDs to cuids.
370 The cuids are like uids but increase monotonically always.
371 We use them to see whether a reg is used outside a given basic block. */
373 static int *uid_cuid;
375 /* Highest UID in UID_CUID. */
378 /* Get the cuid of an insn. */
380 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
382 /* Nonzero if this pass has made changes, and therefore it's
383 worthwhile to run the garbage collector. */
385 static int cse_altered;
387 /* Nonzero if cse has altered conditional jump insns
388 in such a way that jump optimization should be redone. */
390 static int cse_jumps_altered;
392 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
393 REG_LABEL, we have to rerun jump after CSE to put in the note. */
394 static int recorded_label_ref;
396 /* canon_hash stores 1 in do_not_record
397 if it notices a reference to CC0, PC, or some other volatile
400 static int do_not_record;
402 #ifdef LOAD_EXTEND_OP
404 /* Scratch rtl used when looking for load-extended copy of a MEM. */
405 static rtx memory_extend_rtx;
408 /* canon_hash stores 1 in hash_arg_in_memory
409 if it notices a reference to memory within the expression being hashed. */
411 static int hash_arg_in_memory;
413 /* The hash table contains buckets which are chains of `struct table_elt's,
414 each recording one expression's information.
415 That expression is in the `exp' field.
417 The canon_exp field contains a canonical (from the point of view of
418 alias analysis) version of the `exp' field.
420 Those elements with the same hash code are chained in both directions
421 through the `next_same_hash' and `prev_same_hash' fields.
423 Each set of expressions with equivalent values
424 are on a two-way chain through the `next_same_value'
425 and `prev_same_value' fields, and all point with
426 the `first_same_value' field at the first element in
427 that chain. The chain is in order of increasing cost.
428 Each element's cost value is in its `cost' field.
430 The `in_memory' field is nonzero for elements that
431 involve any reference to memory. These elements are removed
432 whenever a write is done to an unidentified location in memory.
433 To be safe, we assume that a memory address is unidentified unless
434 the address is either a symbol constant or a constant plus
435 the frame pointer or argument pointer.
437 The `related_value' field is used to connect related expressions
438 (that differ by adding an integer).
439 The related expressions are chained in a circular fashion.
440 `related_value' is zero for expressions for which this
443 The `cost' field stores the cost of this element's expression.
444 The `regcost' field stores the value returned by approx_reg_cost for
445 this element's expression.
447 The `is_const' flag is set if the element is a constant (including
450 The `flag' field is used as a temporary during some search routines.
452 The `mode' field is usually the same as GET_MODE (`exp'), but
453 if `exp' is a CONST_INT and has no machine mode then the `mode'
454 field is the mode it was being used as. Each constant is
455 recorded separately for each mode it is used with. */
461 struct table_elt *next_same_hash;
462 struct table_elt *prev_same_hash;
463 struct table_elt *next_same_value;
464 struct table_elt *prev_same_value;
465 struct table_elt *first_same_value;
466 struct table_elt *related_value;
469 /* The size of this field should match the size
470 of the mode field of struct rtx_def (see rtl.h). */
471 ENUM_BITFIELD(machine_mode) mode : 8;
477 /* We don't want a lot of buckets, because we rarely have very many
478 things stored in the hash table, and a lot of buckets slows
479 down a lot of loops that happen frequently. */
481 #define HASH_SIZE (1 << HASH_SHIFT)
482 #define HASH_MASK (HASH_SIZE - 1)
484 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
485 register (hard registers may require `do_not_record' to be set). */
488 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
489 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
490 : canon_hash (X, M)) & HASH_MASK)
492 /* Determine whether register number N is considered a fixed register for the
493 purpose of approximating register costs.
494 It is desirable to replace other regs with fixed regs, to reduce need for
496 A reg wins if it is either the frame pointer or designated as fixed. */
497 #define FIXED_REGNO_P(N) \
498 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
499 || fixed_regs[N] || global_regs[N])
501 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
502 hard registers and pointers into the frame are the cheapest with a cost
503 of 0. Next come pseudos with a cost of one and other hard registers with
504 a cost of 2. Aside from these special cases, call `rtx_cost'. */
506 #define CHEAP_REGNO(N) \
507 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
508 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
509 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
510 || ((N) < FIRST_PSEUDO_REGISTER \
511 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
513 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
514 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
516 /* Get the info associated with register N. */
518 #define GET_CSE_REG_INFO(N) \
519 (((N) == cached_regno && cached_cse_reg_info) \
520 ? cached_cse_reg_info : get_cse_reg_info ((N)))
522 /* Get the number of times this register has been updated in this
525 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
527 /* Get the point at which REG was recorded in the table. */
529 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
531 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
534 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
536 /* Get the quantity number for REG. */
538 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
540 /* Determine if the quantity number for register X represents a valid index
541 into the qty_table. */
543 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
545 static struct table_elt *table[HASH_SIZE];
547 /* Chain of `struct table_elt's made so far for this function
548 but currently removed from the table. */
550 static struct table_elt *free_element_chain;
552 /* Number of `struct table_elt' structures made so far for this function. */
554 static int n_elements_made;
556 /* Maximum value `n_elements_made' has had so far in this compilation
557 for functions previously processed. */
559 static int max_elements_made;
561 /* Surviving equivalence class when two equivalence classes are merged
562 by recording the effects of a jump in the last insn. Zero if the
563 last insn was not a conditional jump. */
565 static struct table_elt *last_jump_equiv_class;
567 /* Set to the cost of a constant pool reference if one was found for a
568 symbolic constant. If this was found, it means we should try to
569 convert constants into constant pool entries if they don't fit in
572 static int constant_pool_entries_cost;
573 static int constant_pool_entries_regcost;
575 /* This data describes a block that will be processed by cse_basic_block. */
577 struct cse_basic_block_data
579 /* Lowest CUID value of insns in block. */
581 /* Highest CUID value of insns in block. */
583 /* Total number of SETs in block. */
585 /* Last insn in the block. */
587 /* Size of current branch path, if any. */
589 /* Current branch path, indicating which branches will be taken. */
592 /* The branch insn. */
594 /* Whether it should be taken or not. AROUND is the same as taken
595 except that it is used when the destination label is not preceded
597 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
601 static bool fixed_base_plus_p (rtx x);
602 static int notreg_cost (rtx, enum rtx_code);
603 static int approx_reg_cost_1 (rtx *, void *);
604 static int approx_reg_cost (rtx);
605 static int preferrable (int, int, int, int);
606 static void new_basic_block (void);
607 static void make_new_qty (unsigned int, enum machine_mode);
608 static void make_regs_eqv (unsigned int, unsigned int);
609 static void delete_reg_equiv (unsigned int);
610 static int mention_regs (rtx);
611 static int insert_regs (rtx, struct table_elt *, int);
612 static void remove_from_table (struct table_elt *, unsigned);
613 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
614 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
615 static rtx lookup_as_function (rtx, enum rtx_code);
616 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
618 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
619 static void invalidate (rtx, enum machine_mode);
620 static int cse_rtx_varies_p (rtx, int);
621 static void remove_invalid_refs (unsigned int);
622 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
624 static void rehash_using_reg (rtx);
625 static void invalidate_memory (void);
626 static void invalidate_for_call (void);
627 static rtx use_related_value (rtx, struct table_elt *);
628 static unsigned canon_hash (rtx, enum machine_mode);
629 static unsigned canon_hash_string (const char *);
630 static unsigned safe_hash (rtx, enum machine_mode);
631 static int exp_equiv_p (rtx, rtx, int, int);
632 static rtx canon_reg (rtx, rtx);
633 static void find_best_addr (rtx, rtx *, enum machine_mode);
634 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
636 enum machine_mode *);
637 static rtx fold_rtx (rtx, rtx);
638 static rtx equiv_constant (rtx);
639 static void record_jump_equiv (rtx, int);
640 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
642 static void cse_insn (rtx, rtx);
643 static int addr_affects_sp_p (rtx);
644 static void invalidate_from_clobbers (rtx);
645 static rtx cse_process_notes (rtx, rtx);
646 static void cse_around_loop (rtx);
647 static void invalidate_skipped_set (rtx, rtx, void *);
648 static void invalidate_skipped_block (rtx);
649 static void cse_check_loop_start (rtx, rtx, void *);
650 static void cse_set_around_loop (rtx, rtx, rtx);
651 static rtx cse_basic_block (rtx, rtx, struct branch_path *, int);
652 static void count_reg_usage (rtx, int *, int);
653 static int check_for_label_ref (rtx *, void *);
654 extern void dump_class (struct table_elt*);
655 static struct cse_reg_info * get_cse_reg_info (unsigned int);
656 static int check_dependence (rtx *, void *);
658 static void flush_hash_table (void);
659 static bool insn_live_p (rtx, int *);
660 static bool set_live_p (rtx, rtx, int *);
661 static bool dead_libcall_p (rtx, int *);
662 static int cse_change_cc_mode (rtx *, void *);
663 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
664 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
666 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
667 virtual regs here because the simplify_*_operation routines are called
668 by integrate.c, which is called before virtual register instantiation. */
671 fixed_base_plus_p (rtx x)
673 switch (GET_CODE (x))
676 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
678 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
680 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
681 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
686 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
688 return fixed_base_plus_p (XEXP (x, 0));
698 /* Dump the expressions in the equivalence class indicated by CLASSP.
699 This function is used only for debugging. */
701 dump_class (struct table_elt *classp)
703 struct table_elt *elt;
705 fprintf (stderr, "Equivalence chain for ");
706 print_rtl (stderr, classp->exp);
707 fprintf (stderr, ": \n");
709 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
711 print_rtl (stderr, elt->exp);
712 fprintf (stderr, "\n");
716 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
719 approx_reg_cost_1 (rtx *xp, void *data)
724 if (x && GET_CODE (x) == REG)
726 unsigned int regno = REGNO (x);
728 if (! CHEAP_REGNO (regno))
730 if (regno < FIRST_PSEUDO_REGISTER)
732 if (SMALL_REGISTER_CLASSES)
744 /* Return an estimate of the cost of the registers used in an rtx.
745 This is mostly the number of different REG expressions in the rtx;
746 however for some exceptions like fixed registers we use a cost of
747 0. If any other hard register reference occurs, return MAX_COST. */
750 approx_reg_cost (rtx x)
754 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
760 /* Return a negative value if an rtx A, whose costs are given by COST_A
761 and REGCOST_A, is more desirable than an rtx B.
762 Return a positive value if A is less desirable, or 0 if the two are
765 preferrable (int cost_a, int regcost_a, int cost_b, int regcost_b)
767 /* First, get rid of cases involving expressions that are entirely
769 if (cost_a != cost_b)
771 if (cost_a == MAX_COST)
773 if (cost_b == MAX_COST)
777 /* Avoid extending lifetimes of hardregs. */
778 if (regcost_a != regcost_b)
780 if (regcost_a == MAX_COST)
782 if (regcost_b == MAX_COST)
786 /* Normal operation costs take precedence. */
787 if (cost_a != cost_b)
788 return cost_a - cost_b;
789 /* Only if these are identical consider effects on register pressure. */
790 if (regcost_a != regcost_b)
791 return regcost_a - regcost_b;
795 /* Internal function, to compute cost when X is not a register; called
796 from COST macro to keep it simple. */
799 notreg_cost (rtx x, enum rtx_code outer)
801 return ((GET_CODE (x) == SUBREG
802 && GET_CODE (SUBREG_REG (x)) == REG
803 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
804 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
805 && (GET_MODE_SIZE (GET_MODE (x))
806 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
807 && subreg_lowpart_p (x)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
809 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
811 : rtx_cost (x, outer) * 2);
814 /* Return an estimate of the cost of computing rtx X.
815 One use is in cse, to decide which expression to keep in the hash table.
816 Another is in rtl generation, to pick the cheapest way to multiply.
817 Other uses like the latter are expected in the future. */
820 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
830 /* Compute the default costs of certain things.
831 Note that targetm.rtx_costs can override the defaults. */
837 total = COSTS_N_INSNS (5);
843 total = COSTS_N_INSNS (7);
846 /* Used in loop.c and combine.c as a marker. */
850 total = COSTS_N_INSNS (1);
859 /* If we can't tie these modes, make this expensive. The larger
860 the mode, the more expensive it is. */
861 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
862 return COSTS_N_INSNS (2
863 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
867 if ((*targetm.rtx_costs) (x, code, outer_code, &total))
872 /* Sum the costs of the sub-rtx's, plus cost of this operation,
873 which is already in total. */
875 fmt = GET_RTX_FORMAT (code);
876 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
878 total += rtx_cost (XEXP (x, i), code);
879 else if (fmt[i] == 'E')
880 for (j = 0; j < XVECLEN (x, i); j++)
881 total += rtx_cost (XVECEXP (x, i, j), code);
886 /* Return cost of address expression X.
887 Expect that X is properly formed address reference. */
890 address_cost (rtx x, enum machine_mode mode)
892 /* The address_cost target hook does not deal with ADDRESSOF nodes. But,
893 during CSE, such nodes are present. Using an ADDRESSOF node which
894 refers to the address of a REG is a good thing because we can then
895 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
897 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
900 /* We may be asked for cost of various unusual addresses, such as operands
901 of push instruction. It is not worthwhile to complicate writing
902 of the target hook by such cases. */
904 if (!memory_address_p (mode, x))
907 return (*targetm.address_cost) (x);
910 /* If the target doesn't override, compute the cost as with arithmetic. */
913 default_address_cost (rtx x)
915 return rtx_cost (x, MEM);
918 static struct cse_reg_info *
919 get_cse_reg_info (unsigned int regno)
921 struct cse_reg_info **hash_head = ®_hash[REGHASH_FN (regno)];
922 struct cse_reg_info *p;
924 for (p = *hash_head; p != NULL; p = p->hash_next)
925 if (p->regno == regno)
930 /* Get a new cse_reg_info structure. */
931 if (cse_reg_info_free_list)
933 p = cse_reg_info_free_list;
934 cse_reg_info_free_list = p->next;
937 p = xmalloc (sizeof (struct cse_reg_info));
939 /* Insert into hash table. */
940 p->hash_next = *hash_head;
945 p->reg_in_table = -1;
946 p->subreg_ticked = -1;
947 p->reg_qty = -regno - 1;
949 p->next = cse_reg_info_used_list;
950 cse_reg_info_used_list = p;
951 if (!cse_reg_info_used_list_end)
952 cse_reg_info_used_list_end = p;
955 /* Cache this lookup; we tend to be looking up information about the
956 same register several times in a row. */
957 cached_regno = regno;
958 cached_cse_reg_info = p;
963 /* Clear the hash table and initialize each register with its own quantity,
964 for a new basic block. */
967 new_basic_block (void)
973 /* Clear out hash table state for this pass. */
975 memset (reg_hash, 0, sizeof reg_hash);
977 if (cse_reg_info_used_list)
979 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
980 cse_reg_info_free_list = cse_reg_info_used_list;
981 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
983 cached_cse_reg_info = 0;
985 CLEAR_HARD_REG_SET (hard_regs_in_table);
987 /* The per-quantity values used to be initialized here, but it is
988 much faster to initialize each as it is made in `make_new_qty'. */
990 for (i = 0; i < HASH_SIZE; i++)
992 struct table_elt *first;
997 struct table_elt *last = first;
1001 while (last->next_same_hash != NULL)
1002 last = last->next_same_hash;
1004 /* Now relink this hash entire chain into
1005 the free element list. */
1007 last->next_same_hash = free_element_chain;
1008 free_element_chain = first;
1018 /* Say that register REG contains a quantity in mode MODE not in any
1019 register before and initialize that quantity. */
1022 make_new_qty (unsigned int reg, enum machine_mode mode)
1025 struct qty_table_elem *ent;
1026 struct reg_eqv_elem *eqv;
1028 if (next_qty >= max_qty)
1031 q = REG_QTY (reg) = next_qty++;
1032 ent = &qty_table[q];
1033 ent->first_reg = reg;
1034 ent->last_reg = reg;
1036 ent->const_rtx = ent->const_insn = NULL_RTX;
1037 ent->comparison_code = UNKNOWN;
1039 eqv = ®_eqv_table[reg];
1040 eqv->next = eqv->prev = -1;
1043 /* Make reg NEW equivalent to reg OLD.
1044 OLD is not changing; NEW is. */
1047 make_regs_eqv (unsigned int new, unsigned int old)
1049 unsigned int lastr, firstr;
1050 int q = REG_QTY (old);
1051 struct qty_table_elem *ent;
1053 ent = &qty_table[q];
1055 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1056 if (! REGNO_QTY_VALID_P (old))
1060 firstr = ent->first_reg;
1061 lastr = ent->last_reg;
1063 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1064 hard regs. Among pseudos, if NEW will live longer than any other reg
1065 of the same qty, and that is beyond the current basic block,
1066 make it the new canonical replacement for this qty. */
1067 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1068 /* Certain fixed registers might be of the class NO_REGS. This means
1069 that not only can they not be allocated by the compiler, but
1070 they cannot be used in substitutions or canonicalizations
1072 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1073 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1074 || (new >= FIRST_PSEUDO_REGISTER
1075 && (firstr < FIRST_PSEUDO_REGISTER
1076 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1077 || (uid_cuid[REGNO_FIRST_UID (new)]
1078 < cse_basic_block_start))
1079 && (uid_cuid[REGNO_LAST_UID (new)]
1080 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1082 reg_eqv_table[firstr].prev = new;
1083 reg_eqv_table[new].next = firstr;
1084 reg_eqv_table[new].prev = -1;
1085 ent->first_reg = new;
1089 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1090 Otherwise, insert before any non-fixed hard regs that are at the
1091 end. Registers of class NO_REGS cannot be used as an
1092 equivalent for anything. */
1093 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1094 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1095 && new >= FIRST_PSEUDO_REGISTER)
1096 lastr = reg_eqv_table[lastr].prev;
1097 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1098 if (reg_eqv_table[lastr].next >= 0)
1099 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1101 qty_table[q].last_reg = new;
1102 reg_eqv_table[lastr].next = new;
1103 reg_eqv_table[new].prev = lastr;
1107 /* Remove REG from its equivalence class. */
1110 delete_reg_equiv (unsigned int reg)
1112 struct qty_table_elem *ent;
1113 int q = REG_QTY (reg);
1116 /* If invalid, do nothing. */
1117 if (! REGNO_QTY_VALID_P (reg))
1120 ent = &qty_table[q];
1122 p = reg_eqv_table[reg].prev;
1123 n = reg_eqv_table[reg].next;
1126 reg_eqv_table[n].prev = p;
1130 reg_eqv_table[p].next = n;
1134 REG_QTY (reg) = -reg - 1;
1137 /* Remove any invalid expressions from the hash table
1138 that refer to any of the registers contained in expression X.
1140 Make sure that newly inserted references to those registers
1141 as subexpressions will be considered valid.
1143 mention_regs is not called when a register itself
1144 is being stored in the table.
1146 Return 1 if we have done something that may have changed the hash code
1150 mention_regs (rtx x)
1160 code = GET_CODE (x);
1163 unsigned int regno = REGNO (x);
1164 unsigned int endregno
1165 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1166 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1169 for (i = regno; i < endregno; i++)
1171 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1172 remove_invalid_refs (i);
1174 REG_IN_TABLE (i) = REG_TICK (i);
1175 SUBREG_TICKED (i) = -1;
1181 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1182 pseudo if they don't use overlapping words. We handle only pseudos
1183 here for simplicity. */
1184 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1185 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1187 unsigned int i = REGNO (SUBREG_REG (x));
1189 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1191 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1192 the last store to this register really stored into this
1193 subreg, then remove the memory of this subreg.
1194 Otherwise, remove any memory of the entire register and
1195 all its subregs from the table. */
1196 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1197 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1198 remove_invalid_refs (i);
1200 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1203 REG_IN_TABLE (i) = REG_TICK (i);
1204 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1208 /* If X is a comparison or a COMPARE and either operand is a register
1209 that does not have a quantity, give it one. This is so that a later
1210 call to record_jump_equiv won't cause X to be assigned a different
1211 hash code and not found in the table after that call.
1213 It is not necessary to do this here, since rehash_using_reg can
1214 fix up the table later, but doing this here eliminates the need to
1215 call that expensive function in the most common case where the only
1216 use of the register is in the comparison. */
1218 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1220 if (GET_CODE (XEXP (x, 0)) == REG
1221 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1222 if (insert_regs (XEXP (x, 0), NULL, 0))
1224 rehash_using_reg (XEXP (x, 0));
1228 if (GET_CODE (XEXP (x, 1)) == REG
1229 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1230 if (insert_regs (XEXP (x, 1), NULL, 0))
1232 rehash_using_reg (XEXP (x, 1));
1237 fmt = GET_RTX_FORMAT (code);
1238 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1240 changed |= mention_regs (XEXP (x, i));
1241 else if (fmt[i] == 'E')
1242 for (j = 0; j < XVECLEN (x, i); j++)
1243 changed |= mention_regs (XVECEXP (x, i, j));
1248 /* Update the register quantities for inserting X into the hash table
1249 with a value equivalent to CLASSP.
1250 (If the class does not contain a REG, it is irrelevant.)
1251 If MODIFIED is nonzero, X is a destination; it is being modified.
1252 Note that delete_reg_equiv should be called on a register
1253 before insert_regs is done on that register with MODIFIED != 0.
1255 Nonzero value means that elements of reg_qty have changed
1256 so X's hash code may be different. */
1259 insert_regs (rtx x, struct table_elt *classp, int modified)
1261 if (GET_CODE (x) == REG)
1263 unsigned int regno = REGNO (x);
1266 /* If REGNO is in the equivalence table already but is of the
1267 wrong mode for that equivalence, don't do anything here. */
1269 qty_valid = REGNO_QTY_VALID_P (regno);
1272 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1274 if (ent->mode != GET_MODE (x))
1278 if (modified || ! qty_valid)
1281 for (classp = classp->first_same_value;
1283 classp = classp->next_same_value)
1284 if (GET_CODE (classp->exp) == REG
1285 && GET_MODE (classp->exp) == GET_MODE (x))
1287 make_regs_eqv (regno, REGNO (classp->exp));
1291 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1292 than REG_IN_TABLE to find out if there was only a single preceding
1293 invalidation - for the SUBREG - or another one, which would be
1294 for the full register. However, if we find here that REG_TICK
1295 indicates that the register is invalid, it means that it has
1296 been invalidated in a separate operation. The SUBREG might be used
1297 now (then this is a recursive call), or we might use the full REG
1298 now and a SUBREG of it later. So bump up REG_TICK so that
1299 mention_regs will do the right thing. */
1301 && REG_IN_TABLE (regno) >= 0
1302 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1304 make_new_qty (regno, GET_MODE (x));
1311 /* If X is a SUBREG, we will likely be inserting the inner register in the
1312 table. If that register doesn't have an assigned quantity number at
1313 this point but does later, the insertion that we will be doing now will
1314 not be accessible because its hash code will have changed. So assign
1315 a quantity number now. */
1317 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1318 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1320 insert_regs (SUBREG_REG (x), NULL, 0);
1325 return mention_regs (x);
1328 /* Look in or update the hash table. */
1330 /* Remove table element ELT from use in the table.
1331 HASH is its hash code, made using the HASH macro.
1332 It's an argument because often that is known in advance
1333 and we save much time not recomputing it. */
1336 remove_from_table (struct table_elt *elt, unsigned int hash)
1341 /* Mark this element as removed. See cse_insn. */
1342 elt->first_same_value = 0;
1344 /* Remove the table element from its equivalence class. */
1347 struct table_elt *prev = elt->prev_same_value;
1348 struct table_elt *next = elt->next_same_value;
1351 next->prev_same_value = prev;
1354 prev->next_same_value = next;
1357 struct table_elt *newfirst = next;
1360 next->first_same_value = newfirst;
1361 next = next->next_same_value;
1366 /* Remove the table element from its hash bucket. */
1369 struct table_elt *prev = elt->prev_same_hash;
1370 struct table_elt *next = elt->next_same_hash;
1373 next->prev_same_hash = prev;
1376 prev->next_same_hash = next;
1377 else if (table[hash] == elt)
1381 /* This entry is not in the proper hash bucket. This can happen
1382 when two classes were merged by `merge_equiv_classes'. Search
1383 for the hash bucket that it heads. This happens only very
1384 rarely, so the cost is acceptable. */
1385 for (hash = 0; hash < HASH_SIZE; hash++)
1386 if (table[hash] == elt)
1391 /* Remove the table element from its related-value circular chain. */
1393 if (elt->related_value != 0 && elt->related_value != elt)
1395 struct table_elt *p = elt->related_value;
1397 while (p->related_value != elt)
1398 p = p->related_value;
1399 p->related_value = elt->related_value;
1400 if (p->related_value == p)
1401 p->related_value = 0;
1404 /* Now add it to the free element chain. */
1405 elt->next_same_hash = free_element_chain;
1406 free_element_chain = elt;
1409 /* Look up X in the hash table and return its table element,
1410 or 0 if X is not in the table.
1412 MODE is the machine-mode of X, or if X is an integer constant
1413 with VOIDmode then MODE is the mode with which X will be used.
1415 Here we are satisfied to find an expression whose tree structure
1418 static struct table_elt *
1419 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1421 struct table_elt *p;
1423 for (p = table[hash]; p; p = p->next_same_hash)
1424 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1425 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1431 /* Like `lookup' but don't care whether the table element uses invalid regs.
1432 Also ignore discrepancies in the machine mode of a register. */
1434 static struct table_elt *
1435 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1437 struct table_elt *p;
1439 if (GET_CODE (x) == REG)
1441 unsigned int regno = REGNO (x);
1443 /* Don't check the machine mode when comparing registers;
1444 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1445 for (p = table[hash]; p; p = p->next_same_hash)
1446 if (GET_CODE (p->exp) == REG
1447 && REGNO (p->exp) == regno)
1452 for (p = table[hash]; p; p = p->next_same_hash)
1453 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1460 /* Look for an expression equivalent to X and with code CODE.
1461 If one is found, return that expression. */
1464 lookup_as_function (rtx x, enum rtx_code code)
1467 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1469 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1470 long as we are narrowing. So if we looked in vain for a mode narrower
1471 than word_mode before, look for word_mode now. */
1472 if (p == 0 && code == CONST_INT
1473 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1476 PUT_MODE (x, word_mode);
1477 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1483 for (p = p->first_same_value; p; p = p->next_same_value)
1484 if (GET_CODE (p->exp) == code
1485 /* Make sure this is a valid entry in the table. */
1486 && exp_equiv_p (p->exp, p->exp, 1, 0))
1492 /* Insert X in the hash table, assuming HASH is its hash code
1493 and CLASSP is an element of the class it should go in
1494 (or 0 if a new class should be made).
1495 It is inserted at the proper position to keep the class in
1496 the order cheapest first.
1498 MODE is the machine-mode of X, or if X is an integer constant
1499 with VOIDmode then MODE is the mode with which X will be used.
1501 For elements of equal cheapness, the most recent one
1502 goes in front, except that the first element in the list
1503 remains first unless a cheaper element is added. The order of
1504 pseudo-registers does not matter, as canon_reg will be called to
1505 find the cheapest when a register is retrieved from the table.
1507 The in_memory field in the hash table element is set to 0.
1508 The caller must set it nonzero if appropriate.
1510 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1511 and if insert_regs returns a nonzero value
1512 you must then recompute its hash code before calling here.
1514 If necessary, update table showing constant values of quantities. */
1516 #define CHEAPER(X, Y) \
1517 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1519 static struct table_elt *
1520 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1522 struct table_elt *elt;
1524 /* If X is a register and we haven't made a quantity for it,
1525 something is wrong. */
1526 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1529 /* If X is a hard register, show it is being put in the table. */
1530 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1532 unsigned int regno = REGNO (x);
1533 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1536 for (i = regno; i < endregno; i++)
1537 SET_HARD_REG_BIT (hard_regs_in_table, i);
1540 /* Put an element for X into the right hash bucket. */
1542 elt = free_element_chain;
1544 free_element_chain = elt->next_same_hash;
1548 elt = xmalloc (sizeof (struct table_elt));
1552 elt->canon_exp = NULL_RTX;
1553 elt->cost = COST (x);
1554 elt->regcost = approx_reg_cost (x);
1555 elt->next_same_value = 0;
1556 elt->prev_same_value = 0;
1557 elt->next_same_hash = table[hash];
1558 elt->prev_same_hash = 0;
1559 elt->related_value = 0;
1562 elt->is_const = (CONSTANT_P (x)
1563 /* GNU C++ takes advantage of this for `this'
1564 (and other const values). */
1565 || (GET_CODE (x) == REG
1566 && RTX_UNCHANGING_P (x)
1567 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1568 || fixed_base_plus_p (x));
1571 table[hash]->prev_same_hash = elt;
1574 /* Put it into the proper value-class. */
1577 classp = classp->first_same_value;
1578 if (CHEAPER (elt, classp))
1579 /* Insert at the head of the class. */
1581 struct table_elt *p;
1582 elt->next_same_value = classp;
1583 classp->prev_same_value = elt;
1584 elt->first_same_value = elt;
1586 for (p = classp; p; p = p->next_same_value)
1587 p->first_same_value = elt;
1591 /* Insert not at head of the class. */
1592 /* Put it after the last element cheaper than X. */
1593 struct table_elt *p, *next;
1595 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1598 /* Put it after P and before NEXT. */
1599 elt->next_same_value = next;
1601 next->prev_same_value = elt;
1603 elt->prev_same_value = p;
1604 p->next_same_value = elt;
1605 elt->first_same_value = classp;
1609 elt->first_same_value = elt;
1611 /* If this is a constant being set equivalent to a register or a register
1612 being set equivalent to a constant, note the constant equivalence.
1614 If this is a constant, it cannot be equivalent to a different constant,
1615 and a constant is the only thing that can be cheaper than a register. So
1616 we know the register is the head of the class (before the constant was
1619 If this is a register that is not already known equivalent to a
1620 constant, we must check the entire class.
1622 If this is a register that is already known equivalent to an insn,
1623 update the qtys `const_insn' to show that `this_insn' is the latest
1624 insn making that quantity equivalent to the constant. */
1626 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1627 && GET_CODE (x) != REG)
1629 int exp_q = REG_QTY (REGNO (classp->exp));
1630 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1632 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1633 exp_ent->const_insn = this_insn;
1636 else if (GET_CODE (x) == REG
1638 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1641 struct table_elt *p;
1643 for (p = classp; p != 0; p = p->next_same_value)
1645 if (p->is_const && GET_CODE (p->exp) != REG)
1647 int x_q = REG_QTY (REGNO (x));
1648 struct qty_table_elem *x_ent = &qty_table[x_q];
1651 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1652 x_ent->const_insn = this_insn;
1658 else if (GET_CODE (x) == REG
1659 && qty_table[REG_QTY (REGNO (x))].const_rtx
1660 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1661 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1663 /* If this is a constant with symbolic value,
1664 and it has a term with an explicit integer value,
1665 link it up with related expressions. */
1666 if (GET_CODE (x) == CONST)
1668 rtx subexp = get_related_value (x);
1670 struct table_elt *subelt, *subelt_prev;
1674 /* Get the integer-free subexpression in the hash table. */
1675 subhash = safe_hash (subexp, mode) & HASH_MASK;
1676 subelt = lookup (subexp, subhash, mode);
1678 subelt = insert (subexp, NULL, subhash, mode);
1679 /* Initialize SUBELT's circular chain if it has none. */
1680 if (subelt->related_value == 0)
1681 subelt->related_value = subelt;
1682 /* Find the element in the circular chain that precedes SUBELT. */
1683 subelt_prev = subelt;
1684 while (subelt_prev->related_value != subelt)
1685 subelt_prev = subelt_prev->related_value;
1686 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1687 This way the element that follows SUBELT is the oldest one. */
1688 elt->related_value = subelt_prev->related_value;
1689 subelt_prev->related_value = elt;
1696 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1697 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1698 the two classes equivalent.
1700 CLASS1 will be the surviving class; CLASS2 should not be used after this
1703 Any invalid entries in CLASS2 will not be copied. */
1706 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1708 struct table_elt *elt, *next, *new;
1710 /* Ensure we start with the head of the classes. */
1711 class1 = class1->first_same_value;
1712 class2 = class2->first_same_value;
1714 /* If they were already equal, forget it. */
1715 if (class1 == class2)
1718 for (elt = class2; elt; elt = next)
1722 enum machine_mode mode = elt->mode;
1724 next = elt->next_same_value;
1726 /* Remove old entry, make a new one in CLASS1's class.
1727 Don't do this for invalid entries as we cannot find their
1728 hash code (it also isn't necessary). */
1729 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1731 bool need_rehash = false;
1733 hash_arg_in_memory = 0;
1734 hash = HASH (exp, mode);
1736 if (GET_CODE (exp) == REG)
1738 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1739 delete_reg_equiv (REGNO (exp));
1742 remove_from_table (elt, hash);
1744 if (insert_regs (exp, class1, 0) || need_rehash)
1746 rehash_using_reg (exp);
1747 hash = HASH (exp, mode);
1749 new = insert (exp, class1, hash, mode);
1750 new->in_memory = hash_arg_in_memory;
1755 /* Flush the entire hash table. */
1758 flush_hash_table (void)
1761 struct table_elt *p;
1763 for (i = 0; i < HASH_SIZE; i++)
1764 for (p = table[i]; p; p = table[i])
1766 /* Note that invalidate can remove elements
1767 after P in the current hash chain. */
1768 if (GET_CODE (p->exp) == REG)
1769 invalidate (p->exp, p->mode);
1771 remove_from_table (p, i);
1775 /* Function called for each rtx to check whether true dependence exist. */
1776 struct check_dependence_data
1778 enum machine_mode mode;
1784 check_dependence (rtx *x, void *data)
1786 struct check_dependence_data *d = (struct check_dependence_data *) data;
1787 if (*x && GET_CODE (*x) == MEM)
1788 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1794 /* Remove from the hash table, or mark as invalid, all expressions whose
1795 values could be altered by storing in X. X is a register, a subreg, or
1796 a memory reference with nonvarying address (because, when a memory
1797 reference with a varying address is stored in, all memory references are
1798 removed by invalidate_memory so specific invalidation is superfluous).
1799 FULL_MODE, if not VOIDmode, indicates that this much should be
1800 invalidated instead of just the amount indicated by the mode of X. This
1801 is only used for bitfield stores into memory.
1803 A nonvarying address may be just a register or just a symbol reference,
1804 or it may be either of those plus a numeric offset. */
1807 invalidate (rtx x, enum machine_mode full_mode)
1810 struct table_elt *p;
1813 switch (GET_CODE (x))
1817 /* If X is a register, dependencies on its contents are recorded
1818 through the qty number mechanism. Just change the qty number of
1819 the register, mark it as invalid for expressions that refer to it,
1820 and remove it itself. */
1821 unsigned int regno = REGNO (x);
1822 unsigned int hash = HASH (x, GET_MODE (x));
1824 /* Remove REGNO from any quantity list it might be on and indicate
1825 that its value might have changed. If it is a pseudo, remove its
1826 entry from the hash table.
1828 For a hard register, we do the first two actions above for any
1829 additional hard registers corresponding to X. Then, if any of these
1830 registers are in the table, we must remove any REG entries that
1831 overlap these registers. */
1833 delete_reg_equiv (regno);
1835 SUBREG_TICKED (regno) = -1;
1837 if (regno >= FIRST_PSEUDO_REGISTER)
1839 /* Because a register can be referenced in more than one mode,
1840 we might have to remove more than one table entry. */
1841 struct table_elt *elt;
1843 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1844 remove_from_table (elt, hash);
1848 HOST_WIDE_INT in_table
1849 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1850 unsigned int endregno
1851 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1852 unsigned int tregno, tendregno, rn;
1853 struct table_elt *p, *next;
1855 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1857 for (rn = regno + 1; rn < endregno; rn++)
1859 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1860 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1861 delete_reg_equiv (rn);
1863 SUBREG_TICKED (rn) = -1;
1867 for (hash = 0; hash < HASH_SIZE; hash++)
1868 for (p = table[hash]; p; p = next)
1870 next = p->next_same_hash;
1872 if (GET_CODE (p->exp) != REG
1873 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1876 tregno = REGNO (p->exp);
1878 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1879 if (tendregno > regno && tregno < endregno)
1880 remove_from_table (p, hash);
1887 invalidate (SUBREG_REG (x), VOIDmode);
1891 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1892 invalidate (XVECEXP (x, 0, i), VOIDmode);
1896 /* This is part of a disjoint return value; extract the location in
1897 question ignoring the offset. */
1898 invalidate (XEXP (x, 0), VOIDmode);
1902 addr = canon_rtx (get_addr (XEXP (x, 0)));
1903 /* Calculate the canonical version of X here so that
1904 true_dependence doesn't generate new RTL for X on each call. */
1907 /* Remove all hash table elements that refer to overlapping pieces of
1909 if (full_mode == VOIDmode)
1910 full_mode = GET_MODE (x);
1912 for (i = 0; i < HASH_SIZE; i++)
1914 struct table_elt *next;
1916 for (p = table[i]; p; p = next)
1918 next = p->next_same_hash;
1921 struct check_dependence_data d;
1923 /* Just canonicalize the expression once;
1924 otherwise each time we call invalidate
1925 true_dependence will canonicalize the
1926 expression again. */
1928 p->canon_exp = canon_rtx (p->exp);
1932 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1933 remove_from_table (p, i);
1944 /* Remove all expressions that refer to register REGNO,
1945 since they are already invalid, and we are about to
1946 mark that register valid again and don't want the old
1947 expressions to reappear as valid. */
1950 remove_invalid_refs (unsigned int regno)
1953 struct table_elt *p, *next;
1955 for (i = 0; i < HASH_SIZE; i++)
1956 for (p = table[i]; p; p = next)
1958 next = p->next_same_hash;
1959 if (GET_CODE (p->exp) != REG
1960 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1961 remove_from_table (p, i);
1965 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1968 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1969 enum machine_mode mode)
1972 struct table_elt *p, *next;
1973 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1975 for (i = 0; i < HASH_SIZE; i++)
1976 for (p = table[i]; p; p = next)
1979 next = p->next_same_hash;
1981 if (GET_CODE (exp) != REG
1982 && (GET_CODE (exp) != SUBREG
1983 || GET_CODE (SUBREG_REG (exp)) != REG
1984 || REGNO (SUBREG_REG (exp)) != regno
1985 || (((SUBREG_BYTE (exp)
1986 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1987 && SUBREG_BYTE (exp) <= end))
1988 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1989 remove_from_table (p, i);
1993 /* Recompute the hash codes of any valid entries in the hash table that
1994 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1996 This is called when we make a jump equivalence. */
1999 rehash_using_reg (rtx x)
2002 struct table_elt *p, *next;
2005 if (GET_CODE (x) == SUBREG)
2008 /* If X is not a register or if the register is known not to be in any
2009 valid entries in the table, we have no work to do. */
2011 if (GET_CODE (x) != REG
2012 || REG_IN_TABLE (REGNO (x)) < 0
2013 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2016 /* Scan all hash chains looking for valid entries that mention X.
2017 If we find one and it is in the wrong hash chain, move it. */
2019 for (i = 0; i < HASH_SIZE; i++)
2020 for (p = table[i]; p; p = next)
2022 next = p->next_same_hash;
2023 if (reg_mentioned_p (x, p->exp)
2024 && exp_equiv_p (p->exp, p->exp, 1, 0)
2025 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2027 if (p->next_same_hash)
2028 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2030 if (p->prev_same_hash)
2031 p->prev_same_hash->next_same_hash = p->next_same_hash;
2033 table[i] = p->next_same_hash;
2035 p->next_same_hash = table[hash];
2036 p->prev_same_hash = 0;
2038 table[hash]->prev_same_hash = p;
2044 /* Remove from the hash table any expression that is a call-clobbered
2045 register. Also update their TICK values. */
2048 invalidate_for_call (void)
2050 unsigned int regno, endregno;
2053 struct table_elt *p, *next;
2056 /* Go through all the hard registers. For each that is clobbered in
2057 a CALL_INSN, remove the register from quantity chains and update
2058 reg_tick if defined. Also see if any of these registers is currently
2061 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2062 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2064 delete_reg_equiv (regno);
2065 if (REG_TICK (regno) >= 0)
2068 SUBREG_TICKED (regno) = -1;
2071 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2074 /* In the case where we have no call-clobbered hard registers in the
2075 table, we are done. Otherwise, scan the table and remove any
2076 entry that overlaps a call-clobbered register. */
2079 for (hash = 0; hash < HASH_SIZE; hash++)
2080 for (p = table[hash]; p; p = next)
2082 next = p->next_same_hash;
2084 if (GET_CODE (p->exp) != REG
2085 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2088 regno = REGNO (p->exp);
2089 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2091 for (i = regno; i < endregno; i++)
2092 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2094 remove_from_table (p, hash);
2100 /* Given an expression X of type CONST,
2101 and ELT which is its table entry (or 0 if it
2102 is not in the hash table),
2103 return an alternate expression for X as a register plus integer.
2104 If none can be found, return 0. */
2107 use_related_value (rtx x, struct table_elt *elt)
2109 struct table_elt *relt = 0;
2110 struct table_elt *p, *q;
2111 HOST_WIDE_INT offset;
2113 /* First, is there anything related known?
2114 If we have a table element, we can tell from that.
2115 Otherwise, must look it up. */
2117 if (elt != 0 && elt->related_value != 0)
2119 else if (elt == 0 && GET_CODE (x) == CONST)
2121 rtx subexp = get_related_value (x);
2123 relt = lookup (subexp,
2124 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2131 /* Search all related table entries for one that has an
2132 equivalent register. */
2137 /* This loop is strange in that it is executed in two different cases.
2138 The first is when X is already in the table. Then it is searching
2139 the RELATED_VALUE list of X's class (RELT). The second case is when
2140 X is not in the table. Then RELT points to a class for the related
2143 Ensure that, whatever case we are in, that we ignore classes that have
2144 the same value as X. */
2146 if (rtx_equal_p (x, p->exp))
2149 for (q = p->first_same_value; q; q = q->next_same_value)
2150 if (GET_CODE (q->exp) == REG)
2156 p = p->related_value;
2158 /* We went all the way around, so there is nothing to be found.
2159 Alternatively, perhaps RELT was in the table for some other reason
2160 and it has no related values recorded. */
2161 if (p == relt || p == 0)
2168 offset = (get_integer_term (x) - get_integer_term (p->exp));
2169 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2170 return plus_constant (q->exp, offset);
2173 /* Hash a string. Just add its bytes up. */
2174 static inline unsigned
2175 canon_hash_string (const char *ps)
2178 const unsigned char *p = (const unsigned char *) ps;
2187 /* Hash an rtx. We are careful to make sure the value is never negative.
2188 Equivalent registers hash identically.
2189 MODE is used in hashing for CONST_INTs only;
2190 otherwise the mode of X is used.
2192 Store 1 in do_not_record if any subexpression is volatile.
2194 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2195 which does not have the RTX_UNCHANGING_P bit set.
2197 Note that cse_insn knows that the hash code of a MEM expression
2198 is just (int) MEM plus the hash code of the address. */
2201 canon_hash (rtx x, enum machine_mode mode)
2208 /* repeat is used to turn tail-recursion into iteration. */
2213 code = GET_CODE (x);
2218 unsigned int regno = REGNO (x);
2221 /* On some machines, we can't record any non-fixed hard register,
2222 because extending its life will cause reload problems. We
2223 consider ap, fp, sp, gp to be fixed for this purpose.
2225 We also consider CCmode registers to be fixed for this purpose;
2226 failure to do so leads to failure to simplify 0<100 type of
2229 On all machines, we can't record any global registers.
2230 Nor should we record any register that is in a small
2231 class, as defined by CLASS_LIKELY_SPILLED_P. */
2233 if (regno >= FIRST_PSEUDO_REGISTER)
2235 else if (x == frame_pointer_rtx
2236 || x == hard_frame_pointer_rtx
2237 || x == arg_pointer_rtx
2238 || x == stack_pointer_rtx
2239 || x == pic_offset_table_rtx)
2241 else if (global_regs[regno])
2243 else if (fixed_regs[regno])
2245 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2247 else if (SMALL_REGISTER_CLASSES)
2249 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2260 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2264 /* We handle SUBREG of a REG specially because the underlying
2265 reg changes its hash value with every value change; we don't
2266 want to have to forget unrelated subregs when one subreg changes. */
2269 if (GET_CODE (SUBREG_REG (x)) == REG)
2271 hash += (((unsigned) SUBREG << 7)
2272 + REGNO (SUBREG_REG (x))
2273 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2281 unsigned HOST_WIDE_INT tem = INTVAL (x);
2282 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2287 /* This is like the general case, except that it only counts
2288 the integers representing the constant. */
2289 hash += (unsigned) code + (unsigned) GET_MODE (x);
2290 if (GET_MODE (x) != VOIDmode)
2291 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2293 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2294 + (unsigned) CONST_DOUBLE_HIGH (x));
2302 units = CONST_VECTOR_NUNITS (x);
2304 for (i = 0; i < units; ++i)
2306 elt = CONST_VECTOR_ELT (x, i);
2307 hash += canon_hash (elt, GET_MODE (elt));
2313 /* Assume there is only one rtx object for any given label. */
2315 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2319 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2323 /* We don't record if marked volatile or if BLKmode since we don't
2324 know the size of the move. */
2325 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2330 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2331 hash_arg_in_memory = 1;
2333 /* Now that we have already found this special case,
2334 might as well speed it up as much as possible. */
2335 hash += (unsigned) MEM;
2340 /* A USE that mentions non-volatile memory needs special
2341 handling since the MEM may be BLKmode which normally
2342 prevents an entry from being made. Pure calls are
2343 marked by a USE which mentions BLKmode memory. */
2344 if (GET_CODE (XEXP (x, 0)) == MEM
2345 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2347 hash += (unsigned) USE;
2350 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2351 hash_arg_in_memory = 1;
2353 /* Now that we have already found this special case,
2354 might as well speed it up as much as possible. */
2355 hash += (unsigned) MEM;
2370 case UNSPEC_VOLATILE:
2375 if (MEM_VOLATILE_P (x))
2382 /* We don't want to take the filename and line into account. */
2383 hash += (unsigned) code + (unsigned) GET_MODE (x)
2384 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2385 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2386 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2388 if (ASM_OPERANDS_INPUT_LENGTH (x))
2390 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2392 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2393 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2394 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2398 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2399 x = ASM_OPERANDS_INPUT (x, 0);
2400 mode = GET_MODE (x);
2412 i = GET_RTX_LENGTH (code) - 1;
2413 hash += (unsigned) code + (unsigned) GET_MODE (x);
2414 fmt = GET_RTX_FORMAT (code);
2419 rtx tem = XEXP (x, i);
2421 /* If we are about to do the last recursive call
2422 needed at this level, change it into iteration.
2423 This function is called enough to be worth it. */
2429 hash += canon_hash (tem, 0);
2431 else if (fmt[i] == 'E')
2432 for (j = 0; j < XVECLEN (x, i); j++)
2433 hash += canon_hash (XVECEXP (x, i, j), 0);
2434 else if (fmt[i] == 's')
2435 hash += canon_hash_string (XSTR (x, i));
2436 else if (fmt[i] == 'i')
2438 unsigned tem = XINT (x, i);
2441 else if (fmt[i] == '0' || fmt[i] == 't')
2450 /* Like canon_hash but with no side effects. */
2453 safe_hash (rtx x, enum machine_mode mode)
2455 int save_do_not_record = do_not_record;
2456 int save_hash_arg_in_memory = hash_arg_in_memory;
2457 unsigned hash = canon_hash (x, mode);
2458 hash_arg_in_memory = save_hash_arg_in_memory;
2459 do_not_record = save_do_not_record;
2463 /* Return 1 iff X and Y would canonicalize into the same thing,
2464 without actually constructing the canonicalization of either one.
2465 If VALIDATE is nonzero,
2466 we assume X is an expression being processed from the rtl
2467 and Y was found in the hash table. We check register refs
2468 in Y for being marked as valid.
2470 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2471 that is known to be in the register. Ordinarily, we don't allow them
2472 to match, because letting them match would cause unpredictable results
2473 in all the places that search a hash table chain for an equivalent
2474 for a given value. A possible equivalent that has different structure
2475 has its hash code computed from different data. Whether the hash code
2476 is the same as that of the given value is pure luck. */
2479 exp_equiv_p (rtx x, rtx y, int validate, int equal_values)
2485 /* Note: it is incorrect to assume an expression is equivalent to itself
2486 if VALIDATE is nonzero. */
2487 if (x == y && !validate)
2489 if (x == 0 || y == 0)
2492 code = GET_CODE (x);
2493 if (code != GET_CODE (y))
2498 /* If X is a constant and Y is a register or vice versa, they may be
2499 equivalent. We only have to validate if Y is a register. */
2500 if (CONSTANT_P (x) && GET_CODE (y) == REG
2501 && REGNO_QTY_VALID_P (REGNO (y)))
2503 int y_q = REG_QTY (REGNO (y));
2504 struct qty_table_elem *y_ent = &qty_table[y_q];
2506 if (GET_MODE (y) == y_ent->mode
2507 && rtx_equal_p (x, y_ent->const_rtx)
2508 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2512 if (CONSTANT_P (y) && code == REG
2513 && REGNO_QTY_VALID_P (REGNO (x)))
2515 int x_q = REG_QTY (REGNO (x));
2516 struct qty_table_elem *x_ent = &qty_table[x_q];
2518 if (GET_MODE (x) == x_ent->mode
2519 && rtx_equal_p (y, x_ent->const_rtx))
2526 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2527 if (GET_MODE (x) != GET_MODE (y))
2538 return XEXP (x, 0) == XEXP (y, 0);
2541 return XSTR (x, 0) == XSTR (y, 0);
2545 unsigned int regno = REGNO (y);
2546 unsigned int endregno
2547 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2548 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2551 /* If the quantities are not the same, the expressions are not
2552 equivalent. If there are and we are not to validate, they
2553 are equivalent. Otherwise, ensure all regs are up-to-date. */
2555 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2561 for (i = regno; i < endregno; i++)
2562 if (REG_IN_TABLE (i) != REG_TICK (i))
2568 /* For commutative operations, check both orders. */
2576 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2577 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2578 validate, equal_values))
2579 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2580 validate, equal_values)
2581 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2582 validate, equal_values)));
2585 /* We don't use the generic code below because we want to
2586 disregard filename and line numbers. */
2588 /* A volatile asm isn't equivalent to any other. */
2589 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2592 if (GET_MODE (x) != GET_MODE (y)
2593 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2594 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2595 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2596 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2597 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2600 if (ASM_OPERANDS_INPUT_LENGTH (x))
2602 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2603 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2604 ASM_OPERANDS_INPUT (y, i),
2605 validate, equal_values)
2606 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2607 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2617 /* Compare the elements. If any pair of corresponding elements
2618 fail to match, return 0 for the whole things. */
2620 fmt = GET_RTX_FORMAT (code);
2621 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2626 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2631 if (XVECLEN (x, i) != XVECLEN (y, i))
2633 for (j = 0; j < XVECLEN (x, i); j++)
2634 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2635 validate, equal_values))
2640 if (strcmp (XSTR (x, i), XSTR (y, i)))
2645 if (XINT (x, i) != XINT (y, i))
2650 if (XWINT (x, i) != XWINT (y, i))
2666 /* Return 1 if X has a value that can vary even between two
2667 executions of the program. 0 means X can be compared reliably
2668 against certain constants or near-constants. */
2671 cse_rtx_varies_p (rtx x, int from_alias)
2673 /* We need not check for X and the equivalence class being of the same
2674 mode because if X is equivalent to a constant in some mode, it
2675 doesn't vary in any mode. */
2677 if (GET_CODE (x) == REG
2678 && REGNO_QTY_VALID_P (REGNO (x)))
2680 int x_q = REG_QTY (REGNO (x));
2681 struct qty_table_elem *x_ent = &qty_table[x_q];
2683 if (GET_MODE (x) == x_ent->mode
2684 && x_ent->const_rtx != NULL_RTX)
2688 if (GET_CODE (x) == PLUS
2689 && GET_CODE (XEXP (x, 1)) == CONST_INT
2690 && GET_CODE (XEXP (x, 0)) == REG
2691 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2693 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2694 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2696 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2697 && x0_ent->const_rtx != NULL_RTX)
2701 /* This can happen as the result of virtual register instantiation, if
2702 the initial constant is too large to be a valid address. This gives
2703 us a three instruction sequence, load large offset into a register,
2704 load fp minus a constant into a register, then a MEM which is the
2705 sum of the two `constant' registers. */
2706 if (GET_CODE (x) == PLUS
2707 && GET_CODE (XEXP (x, 0)) == REG
2708 && GET_CODE (XEXP (x, 1)) == REG
2709 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2710 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2712 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2713 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2714 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2715 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2717 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2718 && x0_ent->const_rtx != NULL_RTX
2719 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2720 && x1_ent->const_rtx != NULL_RTX)
2724 return rtx_varies_p (x, from_alias);
2727 /* Canonicalize an expression:
2728 replace each register reference inside it
2729 with the "oldest" equivalent register.
2731 If INSN is nonzero and we are replacing a pseudo with a hard register
2732 or vice versa, validate_change is used to ensure that INSN remains valid
2733 after we make our substitution. The calls are made with IN_GROUP nonzero
2734 so apply_change_group must be called upon the outermost return from this
2735 function (unless INSN is zero). The result of apply_change_group can
2736 generally be discarded since the changes we are making are optional. */
2739 canon_reg (rtx x, rtx insn)
2748 code = GET_CODE (x);
2767 struct qty_table_elem *ent;
2769 /* Never replace a hard reg, because hard regs can appear
2770 in more than one machine mode, and we must preserve the mode
2771 of each occurrence. Also, some hard regs appear in
2772 MEMs that are shared and mustn't be altered. Don't try to
2773 replace any reg that maps to a reg of class NO_REGS. */
2774 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2775 || ! REGNO_QTY_VALID_P (REGNO (x)))
2778 q = REG_QTY (REGNO (x));
2779 ent = &qty_table[q];
2780 first = ent->first_reg;
2781 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2782 : REGNO_REG_CLASS (first) == NO_REGS ? x
2783 : gen_rtx_REG (ent->mode, first));
2790 fmt = GET_RTX_FORMAT (code);
2791 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2797 rtx new = canon_reg (XEXP (x, i), insn);
2800 /* If replacing pseudo with hard reg or vice versa, ensure the
2801 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2802 if (insn != 0 && new != 0
2803 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2804 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2805 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2806 || (insn_code = recog_memoized (insn)) < 0
2807 || insn_data[insn_code].n_dups > 0))
2808 validate_change (insn, &XEXP (x, i), new, 1);
2812 else if (fmt[i] == 'E')
2813 for (j = 0; j < XVECLEN (x, i); j++)
2814 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2820 /* LOC is a location within INSN that is an operand address (the contents of
2821 a MEM). Find the best equivalent address to use that is valid for this
2824 On most CISC machines, complicated address modes are costly, and rtx_cost
2825 is a good approximation for that cost. However, most RISC machines have
2826 only a few (usually only one) memory reference formats. If an address is
2827 valid at all, it is often just as cheap as any other address. Hence, for
2828 RISC machines, we use `address_cost' to compare the costs of various
2829 addresses. For two addresses of equal cost, choose the one with the
2830 highest `rtx_cost' value as that has the potential of eliminating the
2831 most insns. For equal costs, we choose the first in the equivalence
2832 class. Note that we ignore the fact that pseudo registers are cheaper than
2833 hard registers here because we would also prefer the pseudo registers. */
2836 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2838 struct table_elt *elt;
2840 struct table_elt *p;
2841 int found_better = 1;
2842 int save_do_not_record = do_not_record;
2843 int save_hash_arg_in_memory = hash_arg_in_memory;
2848 /* Do not try to replace constant addresses or addresses of local and
2849 argument slots. These MEM expressions are made only once and inserted
2850 in many instructions, as well as being used to control symbol table
2851 output. It is not safe to clobber them.
2853 There are some uncommon cases where the address is already in a register
2854 for some reason, but we cannot take advantage of that because we have
2855 no easy way to unshare the MEM. In addition, looking up all stack
2856 addresses is costly. */
2857 if ((GET_CODE (addr) == PLUS
2858 && GET_CODE (XEXP (addr, 0)) == REG
2859 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2860 && (regno = REGNO (XEXP (addr, 0)),
2861 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2862 || regno == ARG_POINTER_REGNUM))
2863 || (GET_CODE (addr) == REG
2864 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2865 || regno == HARD_FRAME_POINTER_REGNUM
2866 || regno == ARG_POINTER_REGNUM))
2867 || GET_CODE (addr) == ADDRESSOF
2868 || CONSTANT_ADDRESS_P (addr))
2871 /* If this address is not simply a register, try to fold it. This will
2872 sometimes simplify the expression. Many simplifications
2873 will not be valid, but some, usually applying the associative rule, will
2874 be valid and produce better code. */
2875 if (GET_CODE (addr) != REG)
2877 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2878 int addr_folded_cost = address_cost (folded, mode);
2879 int addr_cost = address_cost (addr, mode);
2881 if ((addr_folded_cost < addr_cost
2882 || (addr_folded_cost == addr_cost
2883 /* ??? The rtx_cost comparison is left over from an older
2884 version of this code. It is probably no longer helpful. */
2885 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2886 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2887 && validate_change (insn, loc, folded, 0))
2891 /* If this address is not in the hash table, we can't look for equivalences
2892 of the whole address. Also, ignore if volatile. */
2895 hash = HASH (addr, Pmode);
2896 addr_volatile = do_not_record;
2897 do_not_record = save_do_not_record;
2898 hash_arg_in_memory = save_hash_arg_in_memory;
2903 elt = lookup (addr, hash, Pmode);
2907 /* We need to find the best (under the criteria documented above) entry
2908 in the class that is valid. We use the `flag' field to indicate
2909 choices that were invalid and iterate until we can't find a better
2910 one that hasn't already been tried. */
2912 for (p = elt->first_same_value; p; p = p->next_same_value)
2915 while (found_better)
2917 int best_addr_cost = address_cost (*loc, mode);
2918 int best_rtx_cost = (elt->cost + 1) >> 1;
2920 struct table_elt *best_elt = elt;
2923 for (p = elt->first_same_value; p; p = p->next_same_value)
2926 if ((GET_CODE (p->exp) == REG
2927 || exp_equiv_p (p->exp, p->exp, 1, 0))
2928 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2929 || (exp_cost == best_addr_cost
2930 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2933 best_addr_cost = exp_cost;
2934 best_rtx_cost = (p->cost + 1) >> 1;
2941 if (validate_change (insn, loc,
2942 canon_reg (copy_rtx (best_elt->exp),
2951 /* If the address is a binary operation with the first operand a register
2952 and the second a constant, do the same as above, but looking for
2953 equivalences of the register. Then try to simplify before checking for
2954 the best address to use. This catches a few cases: First is when we
2955 have REG+const and the register is another REG+const. We can often merge
2956 the constants and eliminate one insn and one register. It may also be
2957 that a machine has a cheap REG+REG+const. Finally, this improves the
2958 code on the Alpha for unaligned byte stores. */
2960 if (flag_expensive_optimizations
2961 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2962 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2963 && GET_CODE (XEXP (*loc, 0)) == REG)
2965 rtx op1 = XEXP (*loc, 1);
2968 hash = HASH (XEXP (*loc, 0), Pmode);
2969 do_not_record = save_do_not_record;
2970 hash_arg_in_memory = save_hash_arg_in_memory;
2972 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2976 /* We need to find the best (under the criteria documented above) entry
2977 in the class that is valid. We use the `flag' field to indicate
2978 choices that were invalid and iterate until we can't find a better
2979 one that hasn't already been tried. */
2981 for (p = elt->first_same_value; p; p = p->next_same_value)
2984 while (found_better)
2986 int best_addr_cost = address_cost (*loc, mode);
2987 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2988 struct table_elt *best_elt = elt;
2989 rtx best_rtx = *loc;
2992 /* This is at worst case an O(n^2) algorithm, so limit our search
2993 to the first 32 elements on the list. This avoids trouble
2994 compiling code with very long basic blocks that can easily
2995 call simplify_gen_binary so many times that we run out of
2999 for (p = elt->first_same_value, count = 0;
3001 p = p->next_same_value, count++)
3003 && (GET_CODE (p->exp) == REG
3004 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3006 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3009 new_cost = address_cost (new, mode);
3011 if (new_cost < best_addr_cost
3012 || (new_cost == best_addr_cost
3013 && (COST (new) + 1) >> 1 > best_rtx_cost))
3016 best_addr_cost = new_cost;
3017 best_rtx_cost = (COST (new) + 1) >> 1;
3025 if (validate_change (insn, loc,
3026 canon_reg (copy_rtx (best_rtx),
3036 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3037 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3038 what values are being compared.
3040 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3041 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3042 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3043 compared to produce cc0.
3045 The return value is the comparison operator and is either the code of
3046 A or the code corresponding to the inverse of the comparison. */
3048 static enum rtx_code
3049 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3050 enum machine_mode *pmode1, enum machine_mode *pmode2)
3054 arg1 = *parg1, arg2 = *parg2;
3056 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3058 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3060 /* Set nonzero when we find something of interest. */
3062 int reverse_code = 0;
3063 struct table_elt *p = 0;
3065 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3066 On machines with CC0, this is the only case that can occur, since
3067 fold_rtx will return the COMPARE or item being compared with zero
3070 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3073 /* If ARG1 is a comparison operator and CODE is testing for
3074 STORE_FLAG_VALUE, get the inner arguments. */
3076 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3078 #ifdef FLOAT_STORE_FLAG_VALUE
3079 REAL_VALUE_TYPE fsfv;
3083 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3084 && code == LT && STORE_FLAG_VALUE == -1)
3085 #ifdef FLOAT_STORE_FLAG_VALUE
3086 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3087 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3088 REAL_VALUE_NEGATIVE (fsfv)))
3093 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3094 && code == GE && STORE_FLAG_VALUE == -1)
3095 #ifdef FLOAT_STORE_FLAG_VALUE
3096 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3097 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3098 REAL_VALUE_NEGATIVE (fsfv)))
3101 x = arg1, reverse_code = 1;
3104 /* ??? We could also check for
3106 (ne (and (eq (...) (const_int 1))) (const_int 0))
3108 and related forms, but let's wait until we see them occurring. */
3111 /* Look up ARG1 in the hash table and see if it has an equivalence
3112 that lets us see what is being compared. */
3113 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3117 p = p->first_same_value;
3119 /* If what we compare is already known to be constant, that is as
3121 We need to break the loop in this case, because otherwise we
3122 can have an infinite loop when looking at a reg that is known
3123 to be a constant which is the same as a comparison of a reg
3124 against zero which appears later in the insn stream, which in
3125 turn is constant and the same as the comparison of the first reg
3131 for (; p; p = p->next_same_value)
3133 enum machine_mode inner_mode = GET_MODE (p->exp);
3134 #ifdef FLOAT_STORE_FLAG_VALUE
3135 REAL_VALUE_TYPE fsfv;
3138 /* If the entry isn't valid, skip it. */
3139 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3142 if (GET_CODE (p->exp) == COMPARE
3143 /* Another possibility is that this machine has a compare insn
3144 that includes the comparison code. In that case, ARG1 would
3145 be equivalent to a comparison operation that would set ARG1 to
3146 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3147 ORIG_CODE is the actual comparison being done; if it is an EQ,
3148 we must reverse ORIG_CODE. On machine with a negative value
3149 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3152 && GET_MODE_CLASS (inner_mode) == MODE_INT
3153 && (GET_MODE_BITSIZE (inner_mode)
3154 <= HOST_BITS_PER_WIDE_INT)
3155 && (STORE_FLAG_VALUE
3156 & ((HOST_WIDE_INT) 1
3157 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3158 #ifdef FLOAT_STORE_FLAG_VALUE
3160 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3161 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3162 REAL_VALUE_NEGATIVE (fsfv)))
3165 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3170 else if ((code == EQ
3172 && GET_MODE_CLASS (inner_mode) == MODE_INT
3173 && (GET_MODE_BITSIZE (inner_mode)
3174 <= HOST_BITS_PER_WIDE_INT)
3175 && (STORE_FLAG_VALUE
3176 & ((HOST_WIDE_INT) 1
3177 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3178 #ifdef FLOAT_STORE_FLAG_VALUE
3180 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3181 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3182 REAL_VALUE_NEGATIVE (fsfv)))
3185 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3192 /* If this non-trapping address, e.g. fp + constant, the
3193 equivalent is a better operand since it may let us predict
3194 the value of the comparison. */
3195 else if (!rtx_addr_can_trap_p (p->exp))
3202 /* If we didn't find a useful equivalence for ARG1, we are done.
3203 Otherwise, set up for the next iteration. */
3207 /* If we need to reverse the comparison, make sure that that is
3208 possible -- we can't necessarily infer the value of GE from LT
3209 with floating-point operands. */
3212 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3213 if (reversed == UNKNOWN)
3218 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3219 code = GET_CODE (x);
3220 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3223 /* Return our results. Return the modes from before fold_rtx
3224 because fold_rtx might produce const_int, and then it's too late. */
3225 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3226 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3231 /* If X is a nontrivial arithmetic operation on an argument
3232 for which a constant value can be determined, return
3233 the result of operating on that value, as a constant.
3234 Otherwise, return X, possibly with one or more operands
3235 modified by recursive calls to this function.
3237 If X is a register whose contents are known, we do NOT
3238 return those contents here. equiv_constant is called to
3241 INSN is the insn that we may be modifying. If it is 0, make a copy
3242 of X before modifying it. */
3245 fold_rtx (rtx x, rtx insn)
3248 enum machine_mode mode;
3255 /* Folded equivalents of first two operands of X. */
3259 /* Constant equivalents of first three operands of X;
3260 0 when no such equivalent is known. */
3265 /* The mode of the first operand of X. We need this for sign and zero
3267 enum machine_mode mode_arg0;
3272 mode = GET_MODE (x);
3273 code = GET_CODE (x);
3283 /* No use simplifying an EXPR_LIST
3284 since they are used only for lists of args
3285 in a function call's REG_EQUAL note. */
3287 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3288 want to (e.g.,) make (addressof (const_int 0)) just because
3289 the location is known to be zero. */
3295 return prev_insn_cc0;
3299 /* If the next insn is a CODE_LABEL followed by a jump table,
3300 PC's value is a LABEL_REF pointing to that label. That
3301 lets us fold switch statements on the VAX. */
3304 if (insn && tablejump_p (insn, &next, NULL))
3305 return gen_rtx_LABEL_REF (Pmode, next);
3310 /* See if we previously assigned a constant value to this SUBREG. */
3311 if ((new = lookup_as_function (x, CONST_INT)) != 0
3312 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3315 /* If this is a paradoxical SUBREG, we have no idea what value the
3316 extra bits would have. However, if the operand is equivalent
3317 to a SUBREG whose operand is the same as our mode, and all the
3318 modes are within a word, we can just use the inner operand
3319 because these SUBREGs just say how to treat the register.
3321 Similarly if we find an integer constant. */
3323 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3325 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3326 struct table_elt *elt;
3328 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3329 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3330 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3332 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3334 if (CONSTANT_P (elt->exp)
3335 && GET_MODE (elt->exp) == VOIDmode)
3338 if (GET_CODE (elt->exp) == SUBREG
3339 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3340 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3341 return copy_rtx (SUBREG_REG (elt->exp));
3347 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3348 We might be able to if the SUBREG is extracting a single word in an
3349 integral mode or extracting the low part. */
3351 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3352 const_arg0 = equiv_constant (folded_arg0);
3354 folded_arg0 = const_arg0;
3356 if (folded_arg0 != SUBREG_REG (x))
3358 new = simplify_subreg (mode, folded_arg0,
3359 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3364 /* If this is a narrowing SUBREG and our operand is a REG, see if
3365 we can find an equivalence for REG that is an arithmetic operation
3366 in a wider mode where both operands are paradoxical SUBREGs
3367 from objects of our result mode. In that case, we couldn't report
3368 an equivalent value for that operation, since we don't know what the
3369 extra bits will be. But we can find an equivalence for this SUBREG
3370 by folding that operation is the narrow mode. This allows us to
3371 fold arithmetic in narrow modes when the machine only supports
3372 word-sized arithmetic.
3374 Also look for a case where we have a SUBREG whose operand is the
3375 same as our result. If both modes are smaller than a word, we
3376 are simply interpreting a register in different modes and we
3377 can use the inner value. */
3379 if (GET_CODE (folded_arg0) == REG
3380 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3381 && subreg_lowpart_p (x))
3383 struct table_elt *elt;
3385 /* We can use HASH here since we know that canon_hash won't be
3387 elt = lookup (folded_arg0,
3388 HASH (folded_arg0, GET_MODE (folded_arg0)),
3389 GET_MODE (folded_arg0));
3392 elt = elt->first_same_value;
3394 for (; elt; elt = elt->next_same_value)
3396 enum rtx_code eltcode = GET_CODE (elt->exp);
3398 /* Just check for unary and binary operations. */
3399 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3400 && GET_CODE (elt->exp) != SIGN_EXTEND
3401 && GET_CODE (elt->exp) != ZERO_EXTEND
3402 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3403 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3404 && (GET_MODE_CLASS (mode)
3405 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3407 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3409 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3410 op0 = fold_rtx (op0, NULL_RTX);
3412 op0 = equiv_constant (op0);
3414 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3417 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3418 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3419 && eltcode != DIV && eltcode != MOD
3420 && eltcode != UDIV && eltcode != UMOD
3421 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3422 && eltcode != ROTATE && eltcode != ROTATERT
3423 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3424 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3426 || CONSTANT_P (XEXP (elt->exp, 0)))
3427 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3428 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3430 || CONSTANT_P (XEXP (elt->exp, 1))))
3432 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3433 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3435 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3436 op0 = fold_rtx (op0, NULL_RTX);
3439 op0 = equiv_constant (op0);
3441 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3442 op1 = fold_rtx (op1, NULL_RTX);
3445 op1 = equiv_constant (op1);
3447 /* If we are looking for the low SImode part of
3448 (ashift:DI c (const_int 32)), it doesn't work
3449 to compute that in SImode, because a 32-bit shift
3450 in SImode is unpredictable. We know the value is 0. */
3452 && GET_CODE (elt->exp) == ASHIFT
3453 && GET_CODE (op1) == CONST_INT
3454 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3456 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3458 /* If the count fits in the inner mode's width,
3459 but exceeds the outer mode's width,
3460 the value will get truncated to 0
3464 /* If the count exceeds even the inner mode's width,
3465 don't fold this expression. */
3468 else if (op0 && op1)
3469 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3473 else if (GET_CODE (elt->exp) == SUBREG
3474 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3475 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3477 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3478 new = copy_rtx (SUBREG_REG (elt->exp));
3489 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3490 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3491 new = lookup_as_function (XEXP (x, 0), code);
3493 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3497 /* If we are not actually processing an insn, don't try to find the
3498 best address. Not only don't we care, but we could modify the
3499 MEM in an invalid way since we have no insn to validate against. */
3501 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3504 /* Even if we don't fold in the insn itself,
3505 we can safely do so here, in hopes of getting a constant. */
3506 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3508 HOST_WIDE_INT offset = 0;
3510 if (GET_CODE (addr) == REG
3511 && REGNO_QTY_VALID_P (REGNO (addr)))
3513 int addr_q = REG_QTY (REGNO (addr));
3514 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3516 if (GET_MODE (addr) == addr_ent->mode
3517 && addr_ent->const_rtx != NULL_RTX)
3518 addr = addr_ent->const_rtx;
3521 /* If address is constant, split it into a base and integer offset. */
3522 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3524 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3525 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3527 base = XEXP (XEXP (addr, 0), 0);
3528 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3530 else if (GET_CODE (addr) == LO_SUM
3531 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3532 base = XEXP (addr, 1);
3533 else if (GET_CODE (addr) == ADDRESSOF)
3534 return change_address (x, VOIDmode, addr);
3536 /* If this is a constant pool reference, we can fold it into its
3537 constant to allow better value tracking. */
3538 if (base && GET_CODE (base) == SYMBOL_REF
3539 && CONSTANT_POOL_ADDRESS_P (base))
3541 rtx constant = get_pool_constant (base);
3542 enum machine_mode const_mode = get_pool_mode (base);
3545 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3547 constant_pool_entries_cost = COST (constant);
3548 constant_pool_entries_regcost = approx_reg_cost (constant);
3551 /* If we are loading the full constant, we have an equivalence. */
3552 if (offset == 0 && mode == const_mode)
3555 /* If this actually isn't a constant (weird!), we can't do
3556 anything. Otherwise, handle the two most common cases:
3557 extracting a word from a multi-word constant, and extracting
3558 the low-order bits. Other cases don't seem common enough to
3560 if (! CONSTANT_P (constant))
3563 if (GET_MODE_CLASS (mode) == MODE_INT
3564 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3565 && offset % UNITS_PER_WORD == 0
3566 && (new = operand_subword (constant,
3567 offset / UNITS_PER_WORD,
3568 0, const_mode)) != 0)
3571 if (((BYTES_BIG_ENDIAN
3572 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3573 || (! BYTES_BIG_ENDIAN && offset == 0))
3574 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3578 /* If this is a reference to a label at a known position in a jump
3579 table, we also know its value. */
3580 if (base && GET_CODE (base) == LABEL_REF)
3582 rtx label = XEXP (base, 0);
3583 rtx table_insn = NEXT_INSN (label);
3585 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3586 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3588 rtx table = PATTERN (table_insn);
3591 && (offset / GET_MODE_SIZE (GET_MODE (table))
3592 < XVECLEN (table, 0)))
3593 return XVECEXP (table, 0,
3594 offset / GET_MODE_SIZE (GET_MODE (table)));
3596 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3597 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3599 rtx table = PATTERN (table_insn);
3602 && (offset / GET_MODE_SIZE (GET_MODE (table))
3603 < XVECLEN (table, 1)))
3605 offset /= GET_MODE_SIZE (GET_MODE (table));
3606 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3609 if (GET_MODE (table) != Pmode)
3610 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3612 /* Indicate this is a constant. This isn't a
3613 valid form of CONST, but it will only be used
3614 to fold the next insns and then discarded, so
3617 Note this expression must be explicitly discarded,
3618 by cse_insn, else it may end up in a REG_EQUAL note
3619 and "escape" to cause problems elsewhere. */
3620 return gen_rtx_CONST (GET_MODE (new), new);
3628 #ifdef NO_FUNCTION_CSE
3630 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3636 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3637 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3638 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3648 mode_arg0 = VOIDmode;
3650 /* Try folding our operands.
3651 Then see which ones have constant values known. */
3653 fmt = GET_RTX_FORMAT (code);
3654 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3657 rtx arg = XEXP (x, i);
3658 rtx folded_arg = arg, const_arg = 0;
3659 enum machine_mode mode_arg = GET_MODE (arg);
3660 rtx cheap_arg, expensive_arg;
3661 rtx replacements[2];
3663 int old_cost = COST_IN (XEXP (x, i), code);
3665 /* Most arguments are cheap, so handle them specially. */
3666 switch (GET_CODE (arg))
3669 /* This is the same as calling equiv_constant; it is duplicated
3671 if (REGNO_QTY_VALID_P (REGNO (arg)))
3673 int arg_q = REG_QTY (REGNO (arg));
3674 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3676 if (arg_ent->const_rtx != NULL_RTX
3677 && GET_CODE (arg_ent->const_rtx) != REG
3678 && GET_CODE (arg_ent->const_rtx) != PLUS)
3680 = gen_lowpart_if_possible (GET_MODE (arg),
3681 arg_ent->const_rtx);
3696 folded_arg = prev_insn_cc0;
3697 mode_arg = prev_insn_cc0_mode;
3698 const_arg = equiv_constant (folded_arg);
3703 folded_arg = fold_rtx (arg, insn);
3704 const_arg = equiv_constant (folded_arg);
3707 /* For the first three operands, see if the operand
3708 is constant or equivalent to a constant. */
3712 folded_arg0 = folded_arg;
3713 const_arg0 = const_arg;
3714 mode_arg0 = mode_arg;
3717 folded_arg1 = folded_arg;
3718 const_arg1 = const_arg;
3721 const_arg2 = const_arg;
3725 /* Pick the least expensive of the folded argument and an
3726 equivalent constant argument. */
3727 if (const_arg == 0 || const_arg == folded_arg
3728 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3729 cheap_arg = folded_arg, expensive_arg = const_arg;
3731 cheap_arg = const_arg, expensive_arg = folded_arg;
3733 /* Try to replace the operand with the cheapest of the two
3734 possibilities. If it doesn't work and this is either of the first
3735 two operands of a commutative operation, try swapping them.
3736 If THAT fails, try the more expensive, provided it is cheaper
3737 than what is already there. */
3739 if (cheap_arg == XEXP (x, i))
3742 if (insn == 0 && ! copied)
3748 /* Order the replacements from cheapest to most expensive. */
3749 replacements[0] = cheap_arg;
3750 replacements[1] = expensive_arg;
3752 for (j = 0; j < 2 && replacements[j]; j++)
3754 int new_cost = COST_IN (replacements[j], code);
3756 /* Stop if what existed before was cheaper. Prefer constants
3757 in the case of a tie. */
3758 if (new_cost > old_cost
3759 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3762 /* It's not safe to substitute the operand of a conversion
3763 operator with a constant, as the conversion's identity
3764 depends upon the mode of it's operand. This optimization
3765 is handled by the call to simplify_unary_operation. */
3766 if (GET_RTX_CLASS (code) == '1'
3767 && GET_MODE (replacements[j]) != mode_arg0
3768 && (code == ZERO_EXTEND
3769 || code == SIGN_EXTEND
3771 || code == FLOAT_TRUNCATE
3772 || code == FLOAT_EXTEND
3775 || code == UNSIGNED_FLOAT
3776 || code == UNSIGNED_FIX))
3779 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3782 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3783 || code == LTGT || code == UNEQ || code == ORDERED
3784 || code == UNORDERED)
3786 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3787 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3789 if (apply_change_group ())
3791 /* Swap them back to be invalid so that this loop can
3792 continue and flag them to be swapped back later. */
3795 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3807 /* Don't try to fold inside of a vector of expressions.
3808 Doing nothing is harmless. */
3812 /* If a commutative operation, place a constant integer as the second
3813 operand unless the first operand is also a constant integer. Otherwise,
3814 place any constant second unless the first operand is also a constant. */
3816 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3817 || code == LTGT || code == UNEQ || code == ORDERED
3818 || code == UNORDERED)
3821 || swap_commutative_operands_p (const_arg0 ? const_arg0
3823 const_arg1 ? const_arg1
3826 rtx tem = XEXP (x, 0);
3828 if (insn == 0 && ! copied)
3834 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3835 validate_change (insn, &XEXP (x, 1), tem, 1);
3836 if (apply_change_group ())
3838 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3839 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3844 /* If X is an arithmetic operation, see if we can simplify it. */
3846 switch (GET_RTX_CLASS (code))
3852 /* We can't simplify extension ops unless we know the
3854 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3855 && mode_arg0 == VOIDmode)
3858 /* If we had a CONST, strip it off and put it back later if we
3860 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3861 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3863 new = simplify_unary_operation (code, mode,
3864 const_arg0 ? const_arg0 : folded_arg0,
3866 if (new != 0 && is_const)
3867 new = gen_rtx_CONST (mode, new);
3872 /* Don't perform any simplifications of vector mode comparisons. */
3873 if (VECTOR_MODE_P (mode))
3876 /* See what items are actually being compared and set FOLDED_ARG[01]
3877 to those values and CODE to the actual comparison code. If any are
3878 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3879 do anything if both operands are already known to be constant. */
3881 if (const_arg0 == 0 || const_arg1 == 0)
3883 struct table_elt *p0, *p1;
3884 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3885 enum machine_mode mode_arg1;
3887 #ifdef FLOAT_STORE_FLAG_VALUE
3888 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3890 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3891 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3892 false_rtx = CONST0_RTX (mode);
3896 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3897 &mode_arg0, &mode_arg1);
3898 const_arg0 = equiv_constant (folded_arg0);
3899 const_arg1 = equiv_constant (folded_arg1);
3901 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3902 what kinds of things are being compared, so we can't do
3903 anything with this comparison. */
3905 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3908 /* If we do not now have two constants being compared, see
3909 if we can nevertheless deduce some things about the
3911 if (const_arg0 == 0 || const_arg1 == 0)
3913 /* Some addresses are known to be nonzero. We don't know
3914 their sign, but equality comparisons are known. */
3915 if (const_arg1 == const0_rtx
3916 && nonzero_address_p (folded_arg0))
3920 else if (code == NE)
3924 /* See if the two operands are the same. */
3926 if (folded_arg0 == folded_arg1
3927 || (GET_CODE (folded_arg0) == REG
3928 && GET_CODE (folded_arg1) == REG
3929 && (REG_QTY (REGNO (folded_arg0))
3930 == REG_QTY (REGNO (folded_arg1))))
3931 || ((p0 = lookup (folded_arg0,
3932 (safe_hash (folded_arg0, mode_arg0)
3933 & HASH_MASK), mode_arg0))
3934 && (p1 = lookup (folded_arg1,
3935 (safe_hash (folded_arg1, mode_arg0)
3936 & HASH_MASK), mode_arg0))
3937 && p0->first_same_value == p1->first_same_value))
3939 /* Sadly two equal NaNs are not equivalent. */
3940 if (!HONOR_NANS (mode_arg0))
3941 return ((code == EQ || code == LE || code == GE
3942 || code == LEU || code == GEU || code == UNEQ
3943 || code == UNLE || code == UNGE
3945 ? true_rtx : false_rtx);
3946 /* Take care for the FP compares we can resolve. */
3947 if (code == UNEQ || code == UNLE || code == UNGE)
3949 if (code == LTGT || code == LT || code == GT)
3953 /* If FOLDED_ARG0 is a register, see if the comparison we are
3954 doing now is either the same as we did before or the reverse
3955 (we only check the reverse if not floating-point). */
3956 else if (GET_CODE (folded_arg0) == REG)
3958 int qty = REG_QTY (REGNO (folded_arg0));
3960 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3962 struct qty_table_elem *ent = &qty_table[qty];
3964 if ((comparison_dominates_p (ent->comparison_code, code)
3965 || (! FLOAT_MODE_P (mode_arg0)
3966 && comparison_dominates_p (ent->comparison_code,
3967 reverse_condition (code))))
3968 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3970 && rtx_equal_p (ent->comparison_const,
3972 || (GET_CODE (folded_arg1) == REG
3973 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3974 return (comparison_dominates_p (ent->comparison_code, code)
3975 ? true_rtx : false_rtx);
3981 /* If we are comparing against zero, see if the first operand is
3982 equivalent to an IOR with a constant. If so, we may be able to
3983 determine the result of this comparison. */
3985 if (const_arg1 == const0_rtx)
3987 rtx y = lookup_as_function (folded_arg0, IOR);
3991 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3992 && GET_CODE (inner_const) == CONST_INT
3993 && INTVAL (inner_const) != 0)
3995 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3996 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3997 && (INTVAL (inner_const)
3998 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3999 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4001 #ifdef FLOAT_STORE_FLAG_VALUE
4002 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4004 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4005 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4006 false_rtx = CONST0_RTX (mode);
4030 new = simplify_relational_operation (code,
4031 (mode_arg0 != VOIDmode
4033 : (GET_MODE (const_arg0
4037 ? GET_MODE (const_arg0
4040 : GET_MODE (const_arg1
4043 const_arg0 ? const_arg0 : folded_arg0,
4044 const_arg1 ? const_arg1 : folded_arg1);
4045 #ifdef FLOAT_STORE_FLAG_VALUE
4046 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4048 if (new == const0_rtx)
4049 new = CONST0_RTX (mode);
4051 new = (CONST_DOUBLE_FROM_REAL_VALUE
4052 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4062 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4063 with that LABEL_REF as its second operand. If so, the result is
4064 the first operand of that MINUS. This handles switches with an
4065 ADDR_DIFF_VEC table. */
4066 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4069 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4070 : lookup_as_function (folded_arg0, MINUS);
4072 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4073 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4076 /* Now try for a CONST of a MINUS like the above. */
4077 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4078 : lookup_as_function (folded_arg0, CONST))) != 0
4079 && GET_CODE (XEXP (y, 0)) == MINUS
4080 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4081 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4082 return XEXP (XEXP (y, 0), 0);
4085 /* Likewise if the operands are in the other order. */
4086 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4089 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4090 : lookup_as_function (folded_arg1, MINUS);
4092 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4093 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4096 /* Now try for a CONST of a MINUS like the above. */
4097 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4098 : lookup_as_function (folded_arg1, CONST))) != 0
4099 && GET_CODE (XEXP (y, 0)) == MINUS
4100 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4101 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4102 return XEXP (XEXP (y, 0), 0);
4105 /* If second operand is a register equivalent to a negative
4106 CONST_INT, see if we can find a register equivalent to the
4107 positive constant. Make a MINUS if so. Don't do this for
4108 a non-negative constant since we might then alternate between
4109 choosing positive and negative constants. Having the positive
4110 constant previously-used is the more common case. Be sure
4111 the resulting constant is non-negative; if const_arg1 were
4112 the smallest negative number this would overflow: depending
4113 on the mode, this would either just be the same value (and
4114 hence not save anything) or be incorrect. */
4115 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4116 && INTVAL (const_arg1) < 0
4117 /* This used to test
4119 -INTVAL (const_arg1) >= 0
4121 But The Sun V5.0 compilers mis-compiled that test. So
4122 instead we test for the problematic value in a more direct
4123 manner and hope the Sun compilers get it correct. */
4124 && INTVAL (const_arg1) !=
4125 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4126 && GET_CODE (folded_arg1) == REG)
4128 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4130 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4134 for (p = p->first_same_value; p; p = p->next_same_value)
4135 if (GET_CODE (p->exp) == REG)
4136 return simplify_gen_binary (MINUS, mode, folded_arg0,
4137 canon_reg (p->exp, NULL_RTX));
4142 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4143 If so, produce (PLUS Z C2-C). */
4144 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4146 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4147 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4148 return fold_rtx (plus_constant (copy_rtx (y),
4149 -INTVAL (const_arg1)),
4156 case SMIN: case SMAX: case UMIN: case UMAX:
4157 case IOR: case AND: case XOR:
4159 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4160 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4161 is known to be of similar form, we may be able to replace the
4162 operation with a combined operation. This may eliminate the
4163 intermediate operation if every use is simplified in this way.
4164 Note that the similar optimization done by combine.c only works
4165 if the intermediate operation's result has only one reference. */
4167 if (GET_CODE (folded_arg0) == REG
4168 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4171 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4172 rtx y = lookup_as_function (folded_arg0, code);
4174 enum rtx_code associate_code;
4178 || 0 == (inner_const
4179 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4180 || GET_CODE (inner_const) != CONST_INT
4181 /* If we have compiled a statement like
4182 "if (x == (x & mask1))", and now are looking at
4183 "x & mask2", we will have a case where the first operand
4184 of Y is the same as our first operand. Unless we detect
4185 this case, an infinite loop will result. */
4186 || XEXP (y, 0) == folded_arg0)
4189 /* Don't associate these operations if they are a PLUS with the
4190 same constant and it is a power of two. These might be doable
4191 with a pre- or post-increment. Similarly for two subtracts of
4192 identical powers of two with post decrement. */
4194 if (code == PLUS && const_arg1 == inner_const
4195 && ((HAVE_PRE_INCREMENT
4196 && exact_log2 (INTVAL (const_arg1)) >= 0)
4197 || (HAVE_POST_INCREMENT
4198 && exact_log2 (INTVAL (const_arg1)) >= 0)
4199 || (HAVE_PRE_DECREMENT
4200 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4201 || (HAVE_POST_DECREMENT
4202 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4205 /* Compute the code used to compose the constants. For example,
4206 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4208 associate_code = (is_shift || code == MINUS ? PLUS : code);
4210 new_const = simplify_binary_operation (associate_code, mode,
4211 const_arg1, inner_const);
4216 /* If we are associating shift operations, don't let this
4217 produce a shift of the size of the object or larger.
4218 This could occur when we follow a sign-extend by a right
4219 shift on a machine that does a sign-extend as a pair
4222 if (is_shift && GET_CODE (new_const) == CONST_INT
4223 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4225 /* As an exception, we can turn an ASHIFTRT of this
4226 form into a shift of the number of bits - 1. */
4227 if (code == ASHIFTRT)
4228 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4233 y = copy_rtx (XEXP (y, 0));
4235 /* If Y contains our first operand (the most common way this
4236 can happen is if Y is a MEM), we would do into an infinite
4237 loop if we tried to fold it. So don't in that case. */
4239 if (! reg_mentioned_p (folded_arg0, y))
4240 y = fold_rtx (y, insn);
4242 return simplify_gen_binary (code, mode, y, new_const);
4246 case DIV: case UDIV:
4247 /* ??? The associative optimization performed immediately above is
4248 also possible for DIV and UDIV using associate_code of MULT.
4249 However, we would need extra code to verify that the
4250 multiplication does not overflow, that is, there is no overflow
4251 in the calculation of new_const. */
4258 new = simplify_binary_operation (code, mode,
4259 const_arg0 ? const_arg0 : folded_arg0,
4260 const_arg1 ? const_arg1 : folded_arg1);
4264 /* (lo_sum (high X) X) is simply X. */
4265 if (code == LO_SUM && const_arg0 != 0
4266 && GET_CODE (const_arg0) == HIGH
4267 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4273 new = simplify_ternary_operation (code, mode, mode_arg0,
4274 const_arg0 ? const_arg0 : folded_arg0,
4275 const_arg1 ? const_arg1 : folded_arg1,
4276 const_arg2 ? const_arg2 : XEXP (x, 2));
4280 /* Eliminate CONSTANT_P_RTX if its constant. */
4281 if (code == CONSTANT_P_RTX)
4285 if (optimize == 0 || !flag_gcse)
4291 return new ? new : x;
4294 /* Return a constant value currently equivalent to X.
4295 Return 0 if we don't know one. */
4298 equiv_constant (rtx x)
4300 if (GET_CODE (x) == REG
4301 && REGNO_QTY_VALID_P (REGNO (x)))
4303 int x_q = REG_QTY (REGNO (x));
4304 struct qty_table_elem *x_ent = &qty_table[x_q];
4306 if (x_ent->const_rtx)
4307 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4310 if (x == 0 || CONSTANT_P (x))
4313 /* If X is a MEM, try to fold it outside the context of any insn to see if
4314 it might be equivalent to a constant. That handles the case where it
4315 is a constant-pool reference. Then try to look it up in the hash table
4316 in case it is something whose value we have seen before. */
4318 if (GET_CODE (x) == MEM)
4320 struct table_elt *elt;
4322 x = fold_rtx (x, NULL_RTX);
4326 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4330 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4331 if (elt->is_const && CONSTANT_P (elt->exp))
4338 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4339 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4340 least-significant part of X.
4341 MODE specifies how big a part of X to return.
4343 If the requested operation cannot be done, 0 is returned.
4345 This is similar to gen_lowpart in emit-rtl.c. */
4348 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4350 rtx result = gen_lowpart_common (mode, x);
4354 else if (GET_CODE (x) == MEM)
4356 /* This is the only other case we handle. */
4360 if (WORDS_BIG_ENDIAN)
4361 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4362 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4363 if (BYTES_BIG_ENDIAN)
4364 /* Adjust the address so that the address-after-the-data is
4366 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4367 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4369 new = adjust_address_nv (x, mode, offset);
4370 if (! memory_address_p (mode, XEXP (new, 0)))
4379 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4380 branch. It will be zero if not.
4382 In certain cases, this can cause us to add an equivalence. For example,
4383 if we are following the taken case of
4385 we can add the fact that `i' and '2' are now equivalent.
4387 In any case, we can record that this comparison was passed. If the same
4388 comparison is seen later, we will know its value. */
4391 record_jump_equiv (rtx insn, int taken)
4393 int cond_known_true;
4396 enum machine_mode mode, mode0, mode1;
4397 int reversed_nonequality = 0;
4400 /* Ensure this is the right kind of insn. */
4401 if (! any_condjump_p (insn))
4403 set = pc_set (insn);
4405 /* See if this jump condition is known true or false. */
4407 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4409 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4411 /* Get the type of comparison being done and the operands being compared.
4412 If we had to reverse a non-equality condition, record that fact so we
4413 know that it isn't valid for floating-point. */
4414 code = GET_CODE (XEXP (SET_SRC (set), 0));
4415 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4416 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4418 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4419 if (! cond_known_true)
4421 code = reversed_comparison_code_parts (code, op0, op1, insn);
4423 /* Don't remember if we can't find the inverse. */
4424 if (code == UNKNOWN)
4428 /* The mode is the mode of the non-constant. */
4430 if (mode1 != VOIDmode)
4433 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4436 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4437 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4438 Make any useful entries we can with that information. Called from
4439 above function and called recursively. */
4442 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4443 rtx op1, int reversed_nonequality)
4445 unsigned op0_hash, op1_hash;
4446 int op0_in_memory, op1_in_memory;
4447 struct table_elt *op0_elt, *op1_elt;
4449 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4450 we know that they are also equal in the smaller mode (this is also
4451 true for all smaller modes whether or not there is a SUBREG, but
4452 is not worth testing for with no SUBREG). */
4454 /* Note that GET_MODE (op0) may not equal MODE. */
4455 if (code == EQ && GET_CODE (op0) == SUBREG
4456 && (GET_MODE_SIZE (GET_MODE (op0))
4457 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4459 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4460 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4462 record_jump_cond (code, mode, SUBREG_REG (op0),
4463 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4464 reversed_nonequality);
4467 if (code == EQ && GET_CODE (op1) == SUBREG
4468 && (GET_MODE_SIZE (GET_MODE (op1))
4469 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4471 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4472 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4474 record_jump_cond (code, mode, SUBREG_REG (op1),
4475 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4476 reversed_nonequality);
4479 /* Similarly, if this is an NE comparison, and either is a SUBREG
4480 making a smaller mode, we know the whole thing is also NE. */
4482 /* Note that GET_MODE (op0) may not equal MODE;
4483 if we test MODE instead, we can get an infinite recursion
4484 alternating between two modes each wider than MODE. */
4486 if (code == NE && GET_CODE (op0) == SUBREG
4487 && subreg_lowpart_p (op0)
4488 && (GET_MODE_SIZE (GET_MODE (op0))
4489 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4491 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4492 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4494 record_jump_cond (code, mode, SUBREG_REG (op0),
4495 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4496 reversed_nonequality);
4499 if (code == NE && GET_CODE (op1) == SUBREG
4500 && subreg_lowpart_p (op1)
4501 && (GET_MODE_SIZE (GET_MODE (op1))
4502 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4504 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4505 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4507 record_jump_cond (code, mode, SUBREG_REG (op1),
4508 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4509 reversed_nonequality);
4512 /* Hash both operands. */
4515 hash_arg_in_memory = 0;
4516 op0_hash = HASH (op0, mode);
4517 op0_in_memory = hash_arg_in_memory;
4523 hash_arg_in_memory = 0;
4524 op1_hash = HASH (op1, mode);
4525 op1_in_memory = hash_arg_in_memory;
4530 /* Look up both operands. */
4531 op0_elt = lookup (op0, op0_hash, mode);
4532 op1_elt = lookup (op1, op1_hash, mode);
4534 /* If both operands are already equivalent or if they are not in the
4535 table but are identical, do nothing. */
4536 if ((op0_elt != 0 && op1_elt != 0
4537 && op0_elt->first_same_value == op1_elt->first_same_value)
4538 || op0 == op1 || rtx_equal_p (op0, op1))
4541 /* If we aren't setting two things equal all we can do is save this
4542 comparison. Similarly if this is floating-point. In the latter
4543 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4544 If we record the equality, we might inadvertently delete code
4545 whose intent was to change -0 to +0. */
4547 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4549 struct qty_table_elem *ent;
4552 /* If we reversed a floating-point comparison, if OP0 is not a
4553 register, or if OP1 is neither a register or constant, we can't
4556 if (GET_CODE (op1) != REG)
4557 op1 = equiv_constant (op1);
4559 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4560 || GET_CODE (op0) != REG || op1 == 0)
4563 /* Put OP0 in the hash table if it isn't already. This gives it a
4564 new quantity number. */
4567 if (insert_regs (op0, NULL, 0))
4569 rehash_using_reg (op0);
4570 op0_hash = HASH (op0, mode);
4572 /* If OP0 is contained in OP1, this changes its hash code
4573 as well. Faster to rehash than to check, except
4574 for the simple case of a constant. */
4575 if (! CONSTANT_P (op1))
4576 op1_hash = HASH (op1,mode);
4579 op0_elt = insert (op0, NULL, op0_hash, mode);
4580 op0_elt->in_memory = op0_in_memory;
4583 qty = REG_QTY (REGNO (op0));
4584 ent = &qty_table[qty];
4586 ent->comparison_code = code;
4587 if (GET_CODE (op1) == REG)
4589 /* Look it up again--in case op0 and op1 are the same. */
4590 op1_elt = lookup (op1, op1_hash, mode);
4592 /* Put OP1 in the hash table so it gets a new quantity number. */
4595 if (insert_regs (op1, NULL, 0))
4597 rehash_using_reg (op1);
4598 op1_hash = HASH (op1, mode);
4601 op1_elt = insert (op1, NULL, op1_hash, mode);
4602 op1_elt->in_memory = op1_in_memory;
4605 ent->comparison_const = NULL_RTX;
4606 ent->comparison_qty = REG_QTY (REGNO (op1));
4610 ent->comparison_const = op1;
4611 ent->comparison_qty = -1;
4617 /* If either side is still missing an equivalence, make it now,
4618 then merge the equivalences. */
4622 if (insert_regs (op0, NULL, 0))
4624 rehash_using_reg (op0);
4625 op0_hash = HASH (op0, mode);
4628 op0_elt = insert (op0, NULL, op0_hash, mode);
4629 op0_elt->in_memory = op0_in_memory;
4634 if (insert_regs (op1, NULL, 0))
4636 rehash_using_reg (op1);
4637 op1_hash = HASH (op1, mode);
4640 op1_elt = insert (op1, NULL, op1_hash, mode);
4641 op1_elt->in_memory = op1_in_memory;
4644 merge_equiv_classes (op0_elt, op1_elt);
4645 last_jump_equiv_class = op0_elt;
4648 /* CSE processing for one instruction.
4649 First simplify sources and addresses of all assignments
4650 in the instruction, using previously-computed equivalents values.
4651 Then install the new sources and destinations in the table
4652 of available values.
4654 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4655 the insn. It means that INSN is inside libcall block. In this
4656 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4658 /* Data on one SET contained in the instruction. */
4662 /* The SET rtx itself. */
4664 /* The SET_SRC of the rtx (the original value, if it is changing). */
4666 /* The hash-table element for the SET_SRC of the SET. */
4667 struct table_elt *src_elt;
4668 /* Hash value for the SET_SRC. */
4670 /* Hash value for the SET_DEST. */
4672 /* The SET_DEST, with SUBREG, etc., stripped. */
4674 /* Nonzero if the SET_SRC is in memory. */
4676 /* Nonzero if the SET_SRC contains something
4677 whose value cannot be predicted and understood. */
4679 /* Original machine mode, in case it becomes a CONST_INT.
4680 The size of this field should match the size of the mode
4681 field of struct rtx_def (see rtl.h). */
4682 ENUM_BITFIELD(machine_mode) mode : 8;
4683 /* A constant equivalent for SET_SRC, if any. */
4685 /* Original SET_SRC value used for libcall notes. */
4687 /* Hash value of constant equivalent for SET_SRC. */
4688 unsigned src_const_hash;
4689 /* Table entry for constant equivalent for SET_SRC, if any. */
4690 struct table_elt *src_const_elt;
4694 cse_insn (rtx insn, rtx libcall_insn)
4696 rtx x = PATTERN (insn);
4702 /* Records what this insn does to set CC0. */
4703 rtx this_insn_cc0 = 0;
4704 enum machine_mode this_insn_cc0_mode = VOIDmode;
4708 struct table_elt *src_eqv_elt = 0;
4709 int src_eqv_volatile = 0;
4710 int src_eqv_in_memory = 0;
4711 unsigned src_eqv_hash = 0;
4713 struct set *sets = (struct set *) 0;
4717 /* Find all the SETs and CLOBBERs in this instruction.
4718 Record all the SETs in the array `set' and count them.
4719 Also determine whether there is a CLOBBER that invalidates
4720 all memory references, or all references at varying addresses. */
4722 if (GET_CODE (insn) == CALL_INSN)
4724 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4726 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4727 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4728 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4732 if (GET_CODE (x) == SET)
4734 sets = alloca (sizeof (struct set));
4737 /* Ignore SETs that are unconditional jumps.
4738 They never need cse processing, so this does not hurt.
4739 The reason is not efficiency but rather
4740 so that we can test at the end for instructions
4741 that have been simplified to unconditional jumps
4742 and not be misled by unchanged instructions
4743 that were unconditional jumps to begin with. */
4744 if (SET_DEST (x) == pc_rtx
4745 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4748 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4749 The hard function value register is used only once, to copy to
4750 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4751 Ensure we invalidate the destination register. On the 80386 no
4752 other code would invalidate it since it is a fixed_reg.
4753 We need not check the return of apply_change_group; see canon_reg. */
4755 else if (GET_CODE (SET_SRC (x)) == CALL)
4757 canon_reg (SET_SRC (x), insn);
4758 apply_change_group ();
4759 fold_rtx (SET_SRC (x), insn);
4760 invalidate (SET_DEST (x), VOIDmode);
4765 else if (GET_CODE (x) == PARALLEL)
4767 int lim = XVECLEN (x, 0);
4769 sets = alloca (lim * sizeof (struct set));
4771 /* Find all regs explicitly clobbered in this insn,
4772 and ensure they are not replaced with any other regs
4773 elsewhere in this insn.
4774 When a reg that is clobbered is also used for input,
4775 we should presume that that is for a reason,
4776 and we should not substitute some other register
4777 which is not supposed to be clobbered.
4778 Therefore, this loop cannot be merged into the one below
4779 because a CALL may precede a CLOBBER and refer to the
4780 value clobbered. We must not let a canonicalization do
4781 anything in that case. */
4782 for (i = 0; i < lim; i++)
4784 rtx y = XVECEXP (x, 0, i);
4785 if (GET_CODE (y) == CLOBBER)
4787 rtx clobbered = XEXP (y, 0);
4789 if (GET_CODE (clobbered) == REG
4790 || GET_CODE (clobbered) == SUBREG)
4791 invalidate (clobbered, VOIDmode);
4792 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4793 || GET_CODE (clobbered) == ZERO_EXTRACT)
4794 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4798 for (i = 0; i < lim; i++)
4800 rtx y = XVECEXP (x, 0, i);
4801 if (GET_CODE (y) == SET)
4803 /* As above, we ignore unconditional jumps and call-insns and
4804 ignore the result of apply_change_group. */
4805 if (GET_CODE (SET_SRC (y)) == CALL)
4807 canon_reg (SET_SRC (y), insn);
4808 apply_change_group ();
4809 fold_rtx (SET_SRC (y), insn);
4810 invalidate (SET_DEST (y), VOIDmode);
4812 else if (SET_DEST (y) == pc_rtx
4813 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4816 sets[n_sets++].rtl = y;
4818 else if (GET_CODE (y) == CLOBBER)
4820 /* If we clobber memory, canon the address.
4821 This does nothing when a register is clobbered
4822 because we have already invalidated the reg. */
4823 if (GET_CODE (XEXP (y, 0)) == MEM)
4824 canon_reg (XEXP (y, 0), NULL_RTX);
4826 else if (GET_CODE (y) == USE
4827 && ! (GET_CODE (XEXP (y, 0)) == REG
4828 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4829 canon_reg (y, NULL_RTX);
4830 else if (GET_CODE (y) == CALL)
4832 /* The result of apply_change_group can be ignored; see
4834 canon_reg (y, insn);
4835 apply_change_group ();
4840 else if (GET_CODE (x) == CLOBBER)
4842 if (GET_CODE (XEXP (x, 0)) == MEM)
4843 canon_reg (XEXP (x, 0), NULL_RTX);
4846 /* Canonicalize a USE of a pseudo register or memory location. */
4847 else if (GET_CODE (x) == USE
4848 && ! (GET_CODE (XEXP (x, 0)) == REG
4849 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4850 canon_reg (XEXP (x, 0), NULL_RTX);
4851 else if (GET_CODE (x) == CALL)
4853 /* The result of apply_change_group can be ignored; see canon_reg. */
4854 canon_reg (x, insn);
4855 apply_change_group ();
4859 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4860 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4861 is handled specially for this case, and if it isn't set, then there will
4862 be no equivalence for the destination. */
4863 if (n_sets == 1 && REG_NOTES (insn) != 0
4864 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4865 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4866 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4868 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4869 XEXP (tem, 0) = src_eqv;
4872 /* Canonicalize sources and addresses of destinations.
4873 We do this in a separate pass to avoid problems when a MATCH_DUP is
4874 present in the insn pattern. In that case, we want to ensure that
4875 we don't break the duplicate nature of the pattern. So we will replace
4876 both operands at the same time. Otherwise, we would fail to find an
4877 equivalent substitution in the loop calling validate_change below.
4879 We used to suppress canonicalization of DEST if it appears in SRC,
4880 but we don't do this any more. */
4882 for (i = 0; i < n_sets; i++)
4884 rtx dest = SET_DEST (sets[i].rtl);
4885 rtx src = SET_SRC (sets[i].rtl);
4886 rtx new = canon_reg (src, insn);
4889 sets[i].orig_src = src;
4890 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4891 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4892 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4893 || (insn_code = recog_memoized (insn)) < 0
4894 || insn_data[insn_code].n_dups > 0)
4895 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4897 SET_SRC (sets[i].rtl) = new;
4899 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4901 validate_change (insn, &XEXP (dest, 1),
4902 canon_reg (XEXP (dest, 1), insn), 1);
4903 validate_change (insn, &XEXP (dest, 2),
4904 canon_reg (XEXP (dest, 2), insn), 1);
4907 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4908 || GET_CODE (dest) == ZERO_EXTRACT
4909 || GET_CODE (dest) == SIGN_EXTRACT)
4910 dest = XEXP (dest, 0);
4912 if (GET_CODE (dest) == MEM)
4913 canon_reg (dest, insn);
4916 /* Now that we have done all the replacements, we can apply the change
4917 group and see if they all work. Note that this will cause some
4918 canonicalizations that would have worked individually not to be applied
4919 because some other canonicalization didn't work, but this should not
4922 The result of apply_change_group can be ignored; see canon_reg. */
4924 apply_change_group ();
4926 /* Set sets[i].src_elt to the class each source belongs to.
4927 Detect assignments from or to volatile things
4928 and set set[i] to zero so they will be ignored
4929 in the rest of this function.
4931 Nothing in this loop changes the hash table or the register chains. */
4933 for (i = 0; i < n_sets; i++)
4937 struct table_elt *elt = 0, *p;
4938 enum machine_mode mode;
4941 rtx src_related = 0;
4942 struct table_elt *src_const_elt = 0;
4943 int src_cost = MAX_COST;
4944 int src_eqv_cost = MAX_COST;
4945 int src_folded_cost = MAX_COST;
4946 int src_related_cost = MAX_COST;
4947 int src_elt_cost = MAX_COST;
4948 int src_regcost = MAX_COST;
4949 int src_eqv_regcost = MAX_COST;
4950 int src_folded_regcost = MAX_COST;
4951 int src_related_regcost = MAX_COST;
4952 int src_elt_regcost = MAX_COST;
4953 /* Set nonzero if we need to call force_const_mem on with the
4954 contents of src_folded before using it. */
4955 int src_folded_force_flag = 0;
4957 dest = SET_DEST (sets[i].rtl);
4958 src = SET_SRC (sets[i].rtl);
4960 /* If SRC is a constant that has no machine mode,
4961 hash it with the destination's machine mode.
4962 This way we can keep different modes separate. */
4964 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4965 sets[i].mode = mode;
4969 enum machine_mode eqvmode = mode;
4970 if (GET_CODE (dest) == STRICT_LOW_PART)
4971 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4973 hash_arg_in_memory = 0;
4974 src_eqv_hash = HASH (src_eqv, eqvmode);
4976 /* Find the equivalence class for the equivalent expression. */
4979 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4981 src_eqv_volatile = do_not_record;
4982 src_eqv_in_memory = hash_arg_in_memory;
4985 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4986 value of the INNER register, not the destination. So it is not
4987 a valid substitution for the source. But save it for later. */
4988 if (GET_CODE (dest) == STRICT_LOW_PART)
4991 src_eqv_here = src_eqv;
4993 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4994 simplified result, which may not necessarily be valid. */
4995 src_folded = fold_rtx (src, insn);
4998 /* ??? This caused bad code to be generated for the m68k port with -O2.
4999 Suppose src is (CONST_INT -1), and that after truncation src_folded
5000 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5001 At the end we will add src and src_const to the same equivalence
5002 class. We now have 3 and -1 on the same equivalence class. This
5003 causes later instructions to be mis-optimized. */
5004 /* If storing a constant in a bitfield, pre-truncate the constant
5005 so we will be able to record it later. */
5006 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5007 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5009 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5011 if (GET_CODE (src) == CONST_INT
5012 && GET_CODE (width) == CONST_INT
5013 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5014 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5016 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5017 << INTVAL (width)) - 1));
5021 /* Compute SRC's hash code, and also notice if it
5022 should not be recorded at all. In that case,
5023 prevent any further processing of this assignment. */
5025 hash_arg_in_memory = 0;
5028 sets[i].src_hash = HASH (src, mode);
5029 sets[i].src_volatile = do_not_record;
5030 sets[i].src_in_memory = hash_arg_in_memory;
5032 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5033 a pseudo, do not record SRC. Using SRC as a replacement for
5034 anything else will be incorrect in that situation. Note that
5035 this usually occurs only for stack slots, in which case all the
5036 RTL would be referring to SRC, so we don't lose any optimization
5037 opportunities by not having SRC in the hash table. */
5039 if (GET_CODE (src) == MEM
5040 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5041 && GET_CODE (dest) == REG
5042 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5043 sets[i].src_volatile = 1;
5046 /* It is no longer clear why we used to do this, but it doesn't
5047 appear to still be needed. So let's try without it since this
5048 code hurts cse'ing widened ops. */
5049 /* If source is a perverse subreg (such as QI treated as an SI),
5050 treat it as volatile. It may do the work of an SI in one context
5051 where the extra bits are not being used, but cannot replace an SI
5053 if (GET_CODE (src) == SUBREG
5054 && (GET_MODE_SIZE (GET_MODE (src))
5055 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5056 sets[i].src_volatile = 1;
5059 /* Locate all possible equivalent forms for SRC. Try to replace
5060 SRC in the insn with each cheaper equivalent.
5062 We have the following types of equivalents: SRC itself, a folded
5063 version, a value given in a REG_EQUAL note, or a value related
5066 Each of these equivalents may be part of an additional class
5067 of equivalents (if more than one is in the table, they must be in
5068 the same class; we check for this).
5070 If the source is volatile, we don't do any table lookups.
5072 We note any constant equivalent for possible later use in a
5075 if (!sets[i].src_volatile)
5076 elt = lookup (src, sets[i].src_hash, mode);
5078 sets[i].src_elt = elt;
5080 if (elt && src_eqv_here && src_eqv_elt)
5082 if (elt->first_same_value != src_eqv_elt->first_same_value)
5084 /* The REG_EQUAL is indicating that two formerly distinct
5085 classes are now equivalent. So merge them. */
5086 merge_equiv_classes (elt, src_eqv_elt);
5087 src_eqv_hash = HASH (src_eqv, elt->mode);
5088 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5094 else if (src_eqv_elt)
5097 /* Try to find a constant somewhere and record it in `src_const'.
5098 Record its table element, if any, in `src_const_elt'. Look in
5099 any known equivalences first. (If the constant is not in the
5100 table, also set `sets[i].src_const_hash'). */
5102 for (p = elt->first_same_value; p; p = p->next_same_value)
5106 src_const_elt = elt;
5111 && (CONSTANT_P (src_folded)
5112 /* Consider (minus (label_ref L1) (label_ref L2)) as
5113 "constant" here so we will record it. This allows us
5114 to fold switch statements when an ADDR_DIFF_VEC is used. */
5115 || (GET_CODE (src_folded) == MINUS
5116 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5117 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5118 src_const = src_folded, src_const_elt = elt;
5119 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5120 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5122 /* If we don't know if the constant is in the table, get its
5123 hash code and look it up. */
5124 if (src_const && src_const_elt == 0)
5126 sets[i].src_const_hash = HASH (src_const, mode);
5127 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5130 sets[i].src_const = src_const;
5131 sets[i].src_const_elt = src_const_elt;
5133 /* If the constant and our source are both in the table, mark them as
5134 equivalent. Otherwise, if a constant is in the table but the source
5135 isn't, set ELT to it. */
5136 if (src_const_elt && elt
5137 && src_const_elt->first_same_value != elt->first_same_value)
5138 merge_equiv_classes (elt, src_const_elt);
5139 else if (src_const_elt && elt == 0)
5140 elt = src_const_elt;
5142 /* See if there is a register linearly related to a constant
5143 equivalent of SRC. */
5145 && (GET_CODE (src_const) == CONST
5146 || (src_const_elt && src_const_elt->related_value != 0)))
5148 src_related = use_related_value (src_const, src_const_elt);
5151 struct table_elt *src_related_elt
5152 = lookup (src_related, HASH (src_related, mode), mode);
5153 if (src_related_elt && elt)
5155 if (elt->first_same_value
5156 != src_related_elt->first_same_value)
5157 /* This can occur when we previously saw a CONST
5158 involving a SYMBOL_REF and then see the SYMBOL_REF
5159 twice. Merge the involved classes. */
5160 merge_equiv_classes (elt, src_related_elt);
5163 src_related_elt = 0;
5165 else if (src_related_elt && elt == 0)
5166 elt = src_related_elt;
5170 /* See if we have a CONST_INT that is already in a register in a
5173 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5174 && GET_MODE_CLASS (mode) == MODE_INT
5175 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5177 enum machine_mode wider_mode;
5179 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5180 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5181 && src_related == 0;
5182 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5184 struct table_elt *const_elt
5185 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5190 for (const_elt = const_elt->first_same_value;
5191 const_elt; const_elt = const_elt->next_same_value)
5192 if (GET_CODE (const_elt->exp) == REG)
5194 src_related = gen_lowpart_if_possible (mode,
5201 /* Another possibility is that we have an AND with a constant in
5202 a mode narrower than a word. If so, it might have been generated
5203 as part of an "if" which would narrow the AND. If we already
5204 have done the AND in a wider mode, we can use a SUBREG of that
5207 if (flag_expensive_optimizations && ! src_related
5208 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5209 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5211 enum machine_mode tmode;
5212 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5214 for (tmode = GET_MODE_WIDER_MODE (mode);
5215 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5216 tmode = GET_MODE_WIDER_MODE (tmode))
5218 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5219 struct table_elt *larger_elt;
5223 PUT_MODE (new_and, tmode);
5224 XEXP (new_and, 0) = inner;
5225 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5226 if (larger_elt == 0)
5229 for (larger_elt = larger_elt->first_same_value;
5230 larger_elt; larger_elt = larger_elt->next_same_value)
5231 if (GET_CODE (larger_elt->exp) == REG)
5234 = gen_lowpart_if_possible (mode, larger_elt->exp);
5244 #ifdef LOAD_EXTEND_OP
5245 /* See if a MEM has already been loaded with a widening operation;
5246 if it has, we can use a subreg of that. Many CISC machines
5247 also have such operations, but this is only likely to be
5248 beneficial these machines. */
5250 if (flag_expensive_optimizations && src_related == 0
5251 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5252 && GET_MODE_CLASS (mode) == MODE_INT
5253 && GET_CODE (src) == MEM && ! do_not_record
5254 && LOAD_EXTEND_OP (mode) != NIL)
5256 enum machine_mode tmode;
5258 /* Set what we are trying to extend and the operation it might
5259 have been extended with. */
5260 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5261 XEXP (memory_extend_rtx, 0) = src;
5263 for (tmode = GET_MODE_WIDER_MODE (mode);
5264 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5265 tmode = GET_MODE_WIDER_MODE (tmode))
5267 struct table_elt *larger_elt;
5269 PUT_MODE (memory_extend_rtx, tmode);
5270 larger_elt = lookup (memory_extend_rtx,
5271 HASH (memory_extend_rtx, tmode), tmode);
5272 if (larger_elt == 0)
5275 for (larger_elt = larger_elt->first_same_value;
5276 larger_elt; larger_elt = larger_elt->next_same_value)
5277 if (GET_CODE (larger_elt->exp) == REG)
5279 src_related = gen_lowpart_if_possible (mode,
5288 #endif /* LOAD_EXTEND_OP */
5290 if (src == src_folded)
5293 /* At this point, ELT, if nonzero, points to a class of expressions
5294 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5295 and SRC_RELATED, if nonzero, each contain additional equivalent
5296 expressions. Prune these latter expressions by deleting expressions
5297 already in the equivalence class.
5299 Check for an equivalent identical to the destination. If found,
5300 this is the preferred equivalent since it will likely lead to
5301 elimination of the insn. Indicate this by placing it in
5305 elt = elt->first_same_value;
5306 for (p = elt; p; p = p->next_same_value)
5308 enum rtx_code code = GET_CODE (p->exp);
5310 /* If the expression is not valid, ignore it. Then we do not
5311 have to check for validity below. In most cases, we can use
5312 `rtx_equal_p', since canonicalization has already been done. */
5313 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5316 /* Also skip paradoxical subregs, unless that's what we're
5319 && (GET_MODE_SIZE (GET_MODE (p->exp))
5320 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5322 && GET_CODE (src) == SUBREG
5323 && GET_MODE (src) == GET_MODE (p->exp)
5324 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5325 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5328 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5330 else if (src_folded && GET_CODE (src_folded) == code
5331 && rtx_equal_p (src_folded, p->exp))
5333 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5334 && rtx_equal_p (src_eqv_here, p->exp))
5336 else if (src_related && GET_CODE (src_related) == code
5337 && rtx_equal_p (src_related, p->exp))
5340 /* This is the same as the destination of the insns, we want
5341 to prefer it. Copy it to src_related. The code below will
5342 then give it a negative cost. */
5343 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5347 /* Find the cheapest valid equivalent, trying all the available
5348 possibilities. Prefer items not in the hash table to ones
5349 that are when they are equal cost. Note that we can never
5350 worsen an insn as the current contents will also succeed.
5351 If we find an equivalent identical to the destination, use it as best,
5352 since this insn will probably be eliminated in that case. */
5355 if (rtx_equal_p (src, dest))
5356 src_cost = src_regcost = -1;
5359 src_cost = COST (src);
5360 src_regcost = approx_reg_cost (src);
5366 if (rtx_equal_p (src_eqv_here, dest))
5367 src_eqv_cost = src_eqv_regcost = -1;
5370 src_eqv_cost = COST (src_eqv_here);
5371 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5377 if (rtx_equal_p (src_folded, dest))
5378 src_folded_cost = src_folded_regcost = -1;
5381 src_folded_cost = COST (src_folded);
5382 src_folded_regcost = approx_reg_cost (src_folded);
5388 if (rtx_equal_p (src_related, dest))
5389 src_related_cost = src_related_regcost = -1;
5392 src_related_cost = COST (src_related);
5393 src_related_regcost = approx_reg_cost (src_related);
5397 /* If this was an indirect jump insn, a known label will really be
5398 cheaper even though it looks more expensive. */
5399 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5400 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5402 /* Terminate loop when replacement made. This must terminate since
5403 the current contents will be tested and will always be valid. */
5408 /* Skip invalid entries. */
5409 while (elt && GET_CODE (elt->exp) != REG
5410 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5411 elt = elt->next_same_value;
5413 /* A paradoxical subreg would be bad here: it'll be the right
5414 size, but later may be adjusted so that the upper bits aren't
5415 what we want. So reject it. */
5417 && GET_CODE (elt->exp) == SUBREG
5418 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5419 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5420 /* It is okay, though, if the rtx we're trying to match
5421 will ignore any of the bits we can't predict. */
5423 && GET_CODE (src) == SUBREG
5424 && GET_MODE (src) == GET_MODE (elt->exp)
5425 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5426 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5428 elt = elt->next_same_value;
5434 src_elt_cost = elt->cost;
5435 src_elt_regcost = elt->regcost;
5438 /* Find cheapest and skip it for the next time. For items
5439 of equal cost, use this order:
5440 src_folded, src, src_eqv, src_related and hash table entry. */
5442 && preferrable (src_folded_cost, src_folded_regcost,
5443 src_cost, src_regcost) <= 0
5444 && preferrable (src_folded_cost, src_folded_regcost,
5445 src_eqv_cost, src_eqv_regcost) <= 0
5446 && preferrable (src_folded_cost, src_folded_regcost,
5447 src_related_cost, src_related_regcost) <= 0
5448 && preferrable (src_folded_cost, src_folded_regcost,
5449 src_elt_cost, src_elt_regcost) <= 0)
5451 trial = src_folded, src_folded_cost = MAX_COST;
5452 if (src_folded_force_flag)
5454 rtx forced = force_const_mem (mode, trial);
5460 && preferrable (src_cost, src_regcost,
5461 src_eqv_cost, src_eqv_regcost) <= 0
5462 && preferrable (src_cost, src_regcost,
5463 src_related_cost, src_related_regcost) <= 0
5464 && preferrable (src_cost, src_regcost,
5465 src_elt_cost, src_elt_regcost) <= 0)
5466 trial = src, src_cost = MAX_COST;
5467 else if (src_eqv_here
5468 && preferrable (src_eqv_cost, src_eqv_regcost,
5469 src_related_cost, src_related_regcost) <= 0
5470 && preferrable (src_eqv_cost, src_eqv_regcost,
5471 src_elt_cost, src_elt_regcost) <= 0)
5472 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5473 else if (src_related
5474 && preferrable (src_related_cost, src_related_regcost,
5475 src_elt_cost, src_elt_regcost) <= 0)
5476 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5479 trial = copy_rtx (elt->exp);
5480 elt = elt->next_same_value;
5481 src_elt_cost = MAX_COST;
5484 /* We don't normally have an insn matching (set (pc) (pc)), so
5485 check for this separately here. We will delete such an
5488 For other cases such as a table jump or conditional jump
5489 where we know the ultimate target, go ahead and replace the
5490 operand. While that may not make a valid insn, we will
5491 reemit the jump below (and also insert any necessary
5493 if (n_sets == 1 && dest == pc_rtx
5495 || (GET_CODE (trial) == LABEL_REF
5496 && ! condjump_p (insn))))
5498 SET_SRC (sets[i].rtl) = trial;
5499 cse_jumps_altered = 1;
5503 /* Look for a substitution that makes a valid insn. */
5504 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5506 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5508 /* If we just made a substitution inside a libcall, then we
5509 need to make the same substitution in any notes attached
5510 to the RETVAL insn. */
5512 && (GET_CODE (sets[i].orig_src) == REG
5513 || GET_CODE (sets[i].orig_src) == SUBREG
5514 || GET_CODE (sets[i].orig_src) == MEM))
5515 simplify_replace_rtx (REG_NOTES (libcall_insn),
5516 sets[i].orig_src, copy_rtx (new));
5518 /* The result of apply_change_group can be ignored; see
5521 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5522 apply_change_group ();
5526 /* If we previously found constant pool entries for
5527 constants and this is a constant, try making a
5528 pool entry. Put it in src_folded unless we already have done
5529 this since that is where it likely came from. */
5531 else if (constant_pool_entries_cost
5532 && CONSTANT_P (trial)
5533 /* Reject cases that will abort in decode_rtx_const.
5534 On the alpha when simplifying a switch, we get
5535 (const (truncate (minus (label_ref) (label_ref)))). */
5536 && ! (GET_CODE (trial) == CONST
5537 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5538 /* Likewise on IA-64, except without the truncate. */
5539 && ! (GET_CODE (trial) == CONST
5540 && GET_CODE (XEXP (trial, 0)) == MINUS
5541 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5542 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5544 || (GET_CODE (src_folded) != MEM
5545 && ! src_folded_force_flag))
5546 && GET_MODE_CLASS (mode) != MODE_CC
5547 && mode != VOIDmode)
5549 src_folded_force_flag = 1;
5551 src_folded_cost = constant_pool_entries_cost;
5552 src_folded_regcost = constant_pool_entries_regcost;
5556 src = SET_SRC (sets[i].rtl);
5558 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5559 However, there is an important exception: If both are registers
5560 that are not the head of their equivalence class, replace SET_SRC
5561 with the head of the class. If we do not do this, we will have
5562 both registers live over a portion of the basic block. This way,
5563 their lifetimes will likely abut instead of overlapping. */
5564 if (GET_CODE (dest) == REG
5565 && REGNO_QTY_VALID_P (REGNO (dest)))
5567 int dest_q = REG_QTY (REGNO (dest));
5568 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5570 if (dest_ent->mode == GET_MODE (dest)
5571 && dest_ent->first_reg != REGNO (dest)
5572 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5573 /* Don't do this if the original insn had a hard reg as
5574 SET_SRC or SET_DEST. */
5575 && (GET_CODE (sets[i].src) != REG
5576 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5577 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5578 /* We can't call canon_reg here because it won't do anything if
5579 SRC is a hard register. */
5581 int src_q = REG_QTY (REGNO (src));
5582 struct qty_table_elem *src_ent = &qty_table[src_q];
5583 int first = src_ent->first_reg;
5585 = (first >= FIRST_PSEUDO_REGISTER
5586 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5588 /* We must use validate-change even for this, because this
5589 might be a special no-op instruction, suitable only to
5591 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5594 /* If we had a constant that is cheaper than what we are now
5595 setting SRC to, use that constant. We ignored it when we
5596 thought we could make this into a no-op. */
5597 if (src_const && COST (src_const) < COST (src)
5598 && validate_change (insn, &SET_SRC (sets[i].rtl),
5605 /* If we made a change, recompute SRC values. */
5606 if (src != sets[i].src)
5610 hash_arg_in_memory = 0;
5612 sets[i].src_hash = HASH (src, mode);
5613 sets[i].src_volatile = do_not_record;
5614 sets[i].src_in_memory = hash_arg_in_memory;
5615 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5618 /* If this is a single SET, we are setting a register, and we have an
5619 equivalent constant, we want to add a REG_NOTE. We don't want
5620 to write a REG_EQUAL note for a constant pseudo since verifying that
5621 that pseudo hasn't been eliminated is a pain. Such a note also
5622 won't help anything.
5624 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5625 which can be created for a reference to a compile time computable
5626 entry in a jump table. */
5628 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5629 && GET_CODE (src_const) != REG
5630 && ! (GET_CODE (src_const) == CONST
5631 && GET_CODE (XEXP (src_const, 0)) == MINUS
5632 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5633 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5635 /* We only want a REG_EQUAL note if src_const != src. */
5636 if (! rtx_equal_p (src, src_const))
5638 /* Make sure that the rtx is not shared. */
5639 src_const = copy_rtx (src_const);
5641 /* Record the actual constant value in a REG_EQUAL note,
5642 making a new one if one does not already exist. */
5643 set_unique_reg_note (insn, REG_EQUAL, src_const);
5647 /* Now deal with the destination. */
5650 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5651 to the MEM or REG within it. */
5652 while (GET_CODE (dest) == SIGN_EXTRACT
5653 || GET_CODE (dest) == ZERO_EXTRACT
5654 || GET_CODE (dest) == SUBREG
5655 || GET_CODE (dest) == STRICT_LOW_PART)
5656 dest = XEXP (dest, 0);
5658 sets[i].inner_dest = dest;
5660 if (GET_CODE (dest) == MEM)
5662 #ifdef PUSH_ROUNDING
5663 /* Stack pushes invalidate the stack pointer. */
5664 rtx addr = XEXP (dest, 0);
5665 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5666 && XEXP (addr, 0) == stack_pointer_rtx)
5667 invalidate (stack_pointer_rtx, Pmode);
5669 dest = fold_rtx (dest, insn);
5672 /* Compute the hash code of the destination now,
5673 before the effects of this instruction are recorded,
5674 since the register values used in the address computation
5675 are those before this instruction. */
5676 sets[i].dest_hash = HASH (dest, mode);
5678 /* Don't enter a bit-field in the hash table
5679 because the value in it after the store
5680 may not equal what was stored, due to truncation. */
5682 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5683 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5685 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5687 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5688 && GET_CODE (width) == CONST_INT
5689 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5690 && ! (INTVAL (src_const)
5691 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5692 /* Exception: if the value is constant,
5693 and it won't be truncated, record it. */
5697 /* This is chosen so that the destination will be invalidated
5698 but no new value will be recorded.
5699 We must invalidate because sometimes constant
5700 values can be recorded for bitfields. */
5701 sets[i].src_elt = 0;
5702 sets[i].src_volatile = 1;
5708 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5710 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5712 /* One less use of the label this insn used to jump to. */
5714 cse_jumps_altered = 1;
5715 /* No more processing for this set. */
5719 /* If this SET is now setting PC to a label, we know it used to
5720 be a conditional or computed branch. */
5721 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5723 /* Now emit a BARRIER after the unconditional jump. */
5724 if (NEXT_INSN (insn) == 0
5725 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5726 emit_barrier_after (insn);
5728 /* We reemit the jump in as many cases as possible just in
5729 case the form of an unconditional jump is significantly
5730 different than a computed jump or conditional jump.
5732 If this insn has multiple sets, then reemitting the
5733 jump is nontrivial. So instead we just force rerecognition
5734 and hope for the best. */
5737 rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5739 JUMP_LABEL (new) = XEXP (src, 0);
5740 LABEL_NUSES (XEXP (src, 0))++;
5744 /* Now emit a BARRIER after the unconditional jump. */
5745 if (NEXT_INSN (insn) == 0
5746 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5747 emit_barrier_after (insn);
5750 INSN_CODE (insn) = -1;
5752 never_reached_warning (insn, NULL);
5754 /* Do not bother deleting any unreachable code,
5755 let jump/flow do that. */
5757 cse_jumps_altered = 1;
5761 /* If destination is volatile, invalidate it and then do no further
5762 processing for this assignment. */
5764 else if (do_not_record)
5766 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5767 invalidate (dest, VOIDmode);
5768 else if (GET_CODE (dest) == MEM)
5770 /* Outgoing arguments for a libcall don't
5771 affect any recorded expressions. */
5772 if (! libcall_insn || insn == libcall_insn)
5773 invalidate (dest, VOIDmode);
5775 else if (GET_CODE (dest) == STRICT_LOW_PART
5776 || GET_CODE (dest) == ZERO_EXTRACT)
5777 invalidate (XEXP (dest, 0), GET_MODE (dest));
5781 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5782 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5785 /* If setting CC0, record what it was set to, or a constant, if it
5786 is equivalent to a constant. If it is being set to a floating-point
5787 value, make a COMPARE with the appropriate constant of 0. If we
5788 don't do this, later code can interpret this as a test against
5789 const0_rtx, which can cause problems if we try to put it into an
5790 insn as a floating-point operand. */
5791 if (dest == cc0_rtx)
5793 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5794 this_insn_cc0_mode = mode;
5795 if (FLOAT_MODE_P (mode))
5796 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5802 /* Now enter all non-volatile source expressions in the hash table
5803 if they are not already present.
5804 Record their equivalence classes in src_elt.
5805 This way we can insert the corresponding destinations into
5806 the same classes even if the actual sources are no longer in them
5807 (having been invalidated). */
5809 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5810 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5812 struct table_elt *elt;
5813 struct table_elt *classp = sets[0].src_elt;
5814 rtx dest = SET_DEST (sets[0].rtl);
5815 enum machine_mode eqvmode = GET_MODE (dest);
5817 if (GET_CODE (dest) == STRICT_LOW_PART)
5819 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5822 if (insert_regs (src_eqv, classp, 0))
5824 rehash_using_reg (src_eqv);
5825 src_eqv_hash = HASH (src_eqv, eqvmode);
5827 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5828 elt->in_memory = src_eqv_in_memory;
5831 /* Check to see if src_eqv_elt is the same as a set source which
5832 does not yet have an elt, and if so set the elt of the set source
5834 for (i = 0; i < n_sets; i++)
5835 if (sets[i].rtl && sets[i].src_elt == 0
5836 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5837 sets[i].src_elt = src_eqv_elt;
5840 for (i = 0; i < n_sets; i++)
5841 if (sets[i].rtl && ! sets[i].src_volatile
5842 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5844 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5846 /* REG_EQUAL in setting a STRICT_LOW_PART
5847 gives an equivalent for the entire destination register,
5848 not just for the subreg being stored in now.
5849 This is a more interesting equivalence, so we arrange later
5850 to treat the entire reg as the destination. */
5851 sets[i].src_elt = src_eqv_elt;
5852 sets[i].src_hash = src_eqv_hash;
5856 /* Insert source and constant equivalent into hash table, if not
5858 struct table_elt *classp = src_eqv_elt;
5859 rtx src = sets[i].src;
5860 rtx dest = SET_DEST (sets[i].rtl);
5861 enum machine_mode mode
5862 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5864 /* It's possible that we have a source value known to be
5865 constant but don't have a REG_EQUAL note on the insn.
5866 Lack of a note will mean src_eqv_elt will be NULL. This
5867 can happen where we've generated a SUBREG to access a
5868 CONST_INT that is already in a register in a wider mode.
5869 Ensure that the source expression is put in the proper
5872 classp = sets[i].src_const_elt;
5874 if (sets[i].src_elt == 0)
5876 /* Don't put a hard register source into the table if this is
5877 the last insn of a libcall. In this case, we only need
5878 to put src_eqv_elt in src_elt. */
5879 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5881 struct table_elt *elt;
5883 /* Note that these insert_regs calls cannot remove
5884 any of the src_elt's, because they would have failed to
5885 match if not still valid. */
5886 if (insert_regs (src, classp, 0))
5888 rehash_using_reg (src);
5889 sets[i].src_hash = HASH (src, mode);
5891 elt = insert (src, classp, sets[i].src_hash, mode);
5892 elt->in_memory = sets[i].src_in_memory;
5893 sets[i].src_elt = classp = elt;
5896 sets[i].src_elt = classp;
5898 if (sets[i].src_const && sets[i].src_const_elt == 0
5899 && src != sets[i].src_const
5900 && ! rtx_equal_p (sets[i].src_const, src))
5901 sets[i].src_elt = insert (sets[i].src_const, classp,
5902 sets[i].src_const_hash, mode);
5905 else if (sets[i].src_elt == 0)
5906 /* If we did not insert the source into the hash table (e.g., it was
5907 volatile), note the equivalence class for the REG_EQUAL value, if any,
5908 so that the destination goes into that class. */
5909 sets[i].src_elt = src_eqv_elt;
5911 invalidate_from_clobbers (x);
5913 /* Some registers are invalidated by subroutine calls. Memory is
5914 invalidated by non-constant calls. */
5916 if (GET_CODE (insn) == CALL_INSN)
5918 if (! CONST_OR_PURE_CALL_P (insn))
5919 invalidate_memory ();
5920 invalidate_for_call ();
5923 /* Now invalidate everything set by this instruction.
5924 If a SUBREG or other funny destination is being set,
5925 sets[i].rtl is still nonzero, so here we invalidate the reg
5926 a part of which is being set. */
5928 for (i = 0; i < n_sets; i++)
5931 /* We can't use the inner dest, because the mode associated with
5932 a ZERO_EXTRACT is significant. */
5933 rtx dest = SET_DEST (sets[i].rtl);
5935 /* Needed for registers to remove the register from its
5936 previous quantity's chain.
5937 Needed for memory if this is a nonvarying address, unless
5938 we have just done an invalidate_memory that covers even those. */
5939 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5940 invalidate (dest, VOIDmode);
5941 else if (GET_CODE (dest) == MEM)
5943 /* Outgoing arguments for a libcall don't
5944 affect any recorded expressions. */
5945 if (! libcall_insn || insn == libcall_insn)
5946 invalidate (dest, VOIDmode);
5948 else if (GET_CODE (dest) == STRICT_LOW_PART
5949 || GET_CODE (dest) == ZERO_EXTRACT)
5950 invalidate (XEXP (dest, 0), GET_MODE (dest));
5953 /* A volatile ASM invalidates everything. */
5954 if (GET_CODE (insn) == INSN
5955 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5956 && MEM_VOLATILE_P (PATTERN (insn)))
5957 flush_hash_table ();
5959 /* Make sure registers mentioned in destinations
5960 are safe for use in an expression to be inserted.
5961 This removes from the hash table
5962 any invalid entry that refers to one of these registers.
5964 We don't care about the return value from mention_regs because
5965 we are going to hash the SET_DEST values unconditionally. */
5967 for (i = 0; i < n_sets; i++)
5971 rtx x = SET_DEST (sets[i].rtl);
5973 if (GET_CODE (x) != REG)
5977 /* We used to rely on all references to a register becoming
5978 inaccessible when a register changes to a new quantity,
5979 since that changes the hash code. However, that is not
5980 safe, since after HASH_SIZE new quantities we get a
5981 hash 'collision' of a register with its own invalid
5982 entries. And since SUBREGs have been changed not to
5983 change their hash code with the hash code of the register,
5984 it wouldn't work any longer at all. So we have to check
5985 for any invalid references lying around now.
5986 This code is similar to the REG case in mention_regs,
5987 but it knows that reg_tick has been incremented, and
5988 it leaves reg_in_table as -1 . */
5989 unsigned int regno = REGNO (x);
5990 unsigned int endregno
5991 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5992 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
5995 for (i = regno; i < endregno; i++)
5997 if (REG_IN_TABLE (i) >= 0)
5999 remove_invalid_refs (i);
6000 REG_IN_TABLE (i) = -1;
6007 /* We may have just removed some of the src_elt's from the hash table.
6008 So replace each one with the current head of the same class. */
6010 for (i = 0; i < n_sets; i++)
6013 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6014 /* If elt was removed, find current head of same class,
6015 or 0 if nothing remains of that class. */
6017 struct table_elt *elt = sets[i].src_elt;
6019 while (elt && elt->prev_same_value)
6020 elt = elt->prev_same_value;
6022 while (elt && elt->first_same_value == 0)
6023 elt = elt->next_same_value;
6024 sets[i].src_elt = elt ? elt->first_same_value : 0;
6028 /* Now insert the destinations into their equivalence classes. */
6030 for (i = 0; i < n_sets; i++)
6033 rtx dest = SET_DEST (sets[i].rtl);
6034 rtx inner_dest = sets[i].inner_dest;
6035 struct table_elt *elt;
6037 /* Don't record value if we are not supposed to risk allocating
6038 floating-point values in registers that might be wider than
6040 if ((flag_float_store
6041 && GET_CODE (dest) == MEM
6042 && FLOAT_MODE_P (GET_MODE (dest)))
6043 /* Don't record BLKmode values, because we don't know the
6044 size of it, and can't be sure that other BLKmode values
6045 have the same or smaller size. */
6046 || GET_MODE (dest) == BLKmode
6047 /* Don't record values of destinations set inside a libcall block
6048 since we might delete the libcall. Things should have been set
6049 up so we won't want to reuse such a value, but we play it safe
6052 /* If we didn't put a REG_EQUAL value or a source into the hash
6053 table, there is no point is recording DEST. */
6054 || sets[i].src_elt == 0
6055 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6056 or SIGN_EXTEND, don't record DEST since it can cause
6057 some tracking to be wrong.
6059 ??? Think about this more later. */
6060 || (GET_CODE (dest) == SUBREG
6061 && (GET_MODE_SIZE (GET_MODE (dest))
6062 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6063 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6064 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6067 /* STRICT_LOW_PART isn't part of the value BEING set,
6068 and neither is the SUBREG inside it.
6069 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6070 if (GET_CODE (dest) == STRICT_LOW_PART)
6071 dest = SUBREG_REG (XEXP (dest, 0));
6073 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6074 /* Registers must also be inserted into chains for quantities. */
6075 if (insert_regs (dest, sets[i].src_elt, 1))
6077 /* If `insert_regs' changes something, the hash code must be
6079 rehash_using_reg (dest);
6080 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6083 if (GET_CODE (inner_dest) == MEM
6084 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6085 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6086 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6087 Consider the case in which the address of the MEM is
6088 passed to a function, which alters the MEM. Then, if we
6089 later use Y instead of the MEM we'll miss the update. */
6090 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6092 elt = insert (dest, sets[i].src_elt,
6093 sets[i].dest_hash, GET_MODE (dest));
6095 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6096 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6097 || fixed_base_plus_p (XEXP (sets[i].inner_dest,
6100 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6101 narrower than M2, and both M1 and M2 are the same number of words,
6102 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6103 make that equivalence as well.
6105 However, BAR may have equivalences for which gen_lowpart_if_possible
6106 will produce a simpler value than gen_lowpart_if_possible applied to
6107 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6108 BAR's equivalences. If we don't get a simplified form, make
6109 the SUBREG. It will not be used in an equivalence, but will
6110 cause two similar assignments to be detected.
6112 Note the loop below will find SUBREG_REG (DEST) since we have
6113 already entered SRC and DEST of the SET in the table. */
6115 if (GET_CODE (dest) == SUBREG
6116 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6118 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6119 && (GET_MODE_SIZE (GET_MODE (dest))
6120 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6121 && sets[i].src_elt != 0)
6123 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6124 struct table_elt *elt, *classp = 0;
6126 for (elt = sets[i].src_elt->first_same_value; elt;
6127 elt = elt->next_same_value)
6131 struct table_elt *src_elt;
6134 /* Ignore invalid entries. */
6135 if (GET_CODE (elt->exp) != REG
6136 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6139 /* We may have already been playing subreg games. If the
6140 mode is already correct for the destination, use it. */
6141 if (GET_MODE (elt->exp) == new_mode)
6145 /* Calculate big endian correction for the SUBREG_BYTE.
6146 We have already checked that M1 (GET_MODE (dest))
6147 is not narrower than M2 (new_mode). */
6148 if (BYTES_BIG_ENDIAN)
6149 byte = (GET_MODE_SIZE (GET_MODE (dest))
6150 - GET_MODE_SIZE (new_mode));
6152 new_src = simplify_gen_subreg (new_mode, elt->exp,
6153 GET_MODE (dest), byte);
6156 /* The call to simplify_gen_subreg fails if the value
6157 is VOIDmode, yet we can't do any simplification, e.g.
6158 for EXPR_LISTs denoting function call results.
6159 It is invalid to construct a SUBREG with a VOIDmode
6160 SUBREG_REG, hence a zero new_src means we can't do
6161 this substitution. */
6165 src_hash = HASH (new_src, new_mode);
6166 src_elt = lookup (new_src, src_hash, new_mode);
6168 /* Put the new source in the hash table is if isn't
6172 if (insert_regs (new_src, classp, 0))
6174 rehash_using_reg (new_src);
6175 src_hash = HASH (new_src, new_mode);
6177 src_elt = insert (new_src, classp, src_hash, new_mode);
6178 src_elt->in_memory = elt->in_memory;
6180 else if (classp && classp != src_elt->first_same_value)
6181 /* Show that two things that we've seen before are
6182 actually the same. */
6183 merge_equiv_classes (src_elt, classp);
6185 classp = src_elt->first_same_value;
6186 /* Ignore invalid entries. */
6188 && GET_CODE (classp->exp) != REG
6189 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6190 classp = classp->next_same_value;
6195 /* Special handling for (set REG0 REG1) where REG0 is the
6196 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6197 be used in the sequel, so (if easily done) change this insn to
6198 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6199 that computed their value. Then REG1 will become a dead store
6200 and won't cloud the situation for later optimizations.
6202 Do not make this change if REG1 is a hard register, because it will
6203 then be used in the sequel and we may be changing a two-operand insn
6204 into a three-operand insn.
6206 Also do not do this if we are operating on a copy of INSN.
6208 Also don't do this if INSN ends a libcall; this would cause an unrelated
6209 register to be set in the middle of a libcall, and we then get bad code
6210 if the libcall is deleted. */
6212 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6213 && NEXT_INSN (PREV_INSN (insn)) == insn
6214 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6215 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6216 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6218 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6219 struct qty_table_elem *src_ent = &qty_table[src_q];
6221 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6222 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6225 /* Scan for the previous nonnote insn, but stop at a basic
6229 prev = PREV_INSN (prev);
6231 while (prev && GET_CODE (prev) == NOTE
6232 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6234 /* Do not swap the registers around if the previous instruction
6235 attaches a REG_EQUIV note to REG1.
6237 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6238 from the pseudo that originally shadowed an incoming argument
6239 to another register. Some uses of REG_EQUIV might rely on it
6240 being attached to REG1 rather than REG2.
6242 This section previously turned the REG_EQUIV into a REG_EQUAL
6243 note. We cannot do that because REG_EQUIV may provide an
6244 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6246 if (prev != 0 && GET_CODE (prev) == INSN
6247 && GET_CODE (PATTERN (prev)) == SET
6248 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6249 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6251 rtx dest = SET_DEST (sets[0].rtl);
6252 rtx src = SET_SRC (sets[0].rtl);
6255 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6256 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6257 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6258 apply_change_group ();
6260 /* If INSN has a REG_EQUAL note, and this note mentions
6261 REG0, then we must delete it, because the value in
6262 REG0 has changed. If the note's value is REG1, we must
6263 also delete it because that is now this insn's dest. */
6264 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6266 && (reg_mentioned_p (dest, XEXP (note, 0))
6267 || rtx_equal_p (src, XEXP (note, 0))))
6268 remove_note (insn, note);
6273 /* If this is a conditional jump insn, record any known equivalences due to
6274 the condition being tested. */
6276 last_jump_equiv_class = 0;
6277 if (GET_CODE (insn) == JUMP_INSN
6278 && n_sets == 1 && GET_CODE (x) == SET
6279 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6280 record_jump_equiv (insn, 0);
6283 /* If the previous insn set CC0 and this insn no longer references CC0,
6284 delete the previous insn. Here we use the fact that nothing expects CC0
6285 to be valid over an insn, which is true until the final pass. */
6286 if (prev_insn && GET_CODE (prev_insn) == INSN
6287 && (tem = single_set (prev_insn)) != 0
6288 && SET_DEST (tem) == cc0_rtx
6289 && ! reg_mentioned_p (cc0_rtx, x))
6290 delete_insn (prev_insn);
6292 prev_insn_cc0 = this_insn_cc0;
6293 prev_insn_cc0_mode = this_insn_cc0_mode;
6298 /* Remove from the hash table all expressions that reference memory. */
6301 invalidate_memory (void)
6304 struct table_elt *p, *next;
6306 for (i = 0; i < HASH_SIZE; i++)
6307 for (p = table[i]; p; p = next)
6309 next = p->next_same_hash;
6311 remove_from_table (p, i);
6315 /* If ADDR is an address that implicitly affects the stack pointer, return
6316 1 and update the register tables to show the effect. Else, return 0. */
6319 addr_affects_sp_p (rtx addr)
6321 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6322 && GET_CODE (XEXP (addr, 0)) == REG
6323 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6325 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6327 REG_TICK (STACK_POINTER_REGNUM)++;
6328 /* Is it possible to use a subreg of SP? */
6329 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6332 /* This should be *very* rare. */
6333 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6334 invalidate (stack_pointer_rtx, VOIDmode);
6342 /* Perform invalidation on the basis of everything about an insn
6343 except for invalidating the actual places that are SET in it.
6344 This includes the places CLOBBERed, and anything that might
6345 alias with something that is SET or CLOBBERed.
6347 X is the pattern of the insn. */
6350 invalidate_from_clobbers (rtx x)
6352 if (GET_CODE (x) == CLOBBER)
6354 rtx ref = XEXP (x, 0);
6357 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6358 || GET_CODE (ref) == MEM)
6359 invalidate (ref, VOIDmode);
6360 else if (GET_CODE (ref) == STRICT_LOW_PART
6361 || GET_CODE (ref) == ZERO_EXTRACT)
6362 invalidate (XEXP (ref, 0), GET_MODE (ref));
6365 else if (GET_CODE (x) == PARALLEL)
6368 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6370 rtx y = XVECEXP (x, 0, i);
6371 if (GET_CODE (y) == CLOBBER)
6373 rtx ref = XEXP (y, 0);
6374 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6375 || GET_CODE (ref) == MEM)
6376 invalidate (ref, VOIDmode);
6377 else if (GET_CODE (ref) == STRICT_LOW_PART
6378 || GET_CODE (ref) == ZERO_EXTRACT)
6379 invalidate (XEXP (ref, 0), GET_MODE (ref));
6385 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6386 and replace any registers in them with either an equivalent constant
6387 or the canonical form of the register. If we are inside an address,
6388 only do this if the address remains valid.
6390 OBJECT is 0 except when within a MEM in which case it is the MEM.
6392 Return the replacement for X. */
6395 cse_process_notes (rtx x, rtx object)
6397 enum rtx_code code = GET_CODE (x);
6398 const char *fmt = GET_RTX_FORMAT (code);
6415 validate_change (x, &XEXP (x, 0),
6416 cse_process_notes (XEXP (x, 0), x), 0);
6421 if (REG_NOTE_KIND (x) == REG_EQUAL)
6422 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6424 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6431 rtx new = cse_process_notes (XEXP (x, 0), object);
6432 /* We don't substitute VOIDmode constants into these rtx,
6433 since they would impede folding. */
6434 if (GET_MODE (new) != VOIDmode)
6435 validate_change (object, &XEXP (x, 0), new, 0);
6440 i = REG_QTY (REGNO (x));
6442 /* Return a constant or a constant register. */
6443 if (REGNO_QTY_VALID_P (REGNO (x)))
6445 struct qty_table_elem *ent = &qty_table[i];
6447 if (ent->const_rtx != NULL_RTX
6448 && (CONSTANT_P (ent->const_rtx)
6449 || GET_CODE (ent->const_rtx) == REG))
6451 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6457 /* Otherwise, canonicalize this register. */
6458 return canon_reg (x, NULL_RTX);
6464 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6466 validate_change (object, &XEXP (x, i),
6467 cse_process_notes (XEXP (x, i), object), 0);
6472 /* Find common subexpressions between the end test of a loop and the beginning
6473 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6475 Often we have a loop where an expression in the exit test is used
6476 in the body of the loop. For example "while (*p) *q++ = *p++;".
6477 Because of the way we duplicate the loop exit test in front of the loop,
6478 however, we don't detect that common subexpression. This will be caught
6479 when global cse is implemented, but this is a quite common case.
6481 This function handles the most common cases of these common expressions.
6482 It is called after we have processed the basic block ending with the
6483 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6484 jumps to a label used only once. */
6487 cse_around_loop (rtx loop_start)
6491 struct table_elt *p;
6493 /* If the jump at the end of the loop doesn't go to the start, we don't
6495 for (insn = PREV_INSN (loop_start);
6496 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6497 insn = PREV_INSN (insn))
6501 || GET_CODE (insn) != NOTE
6502 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6505 /* If the last insn of the loop (the end test) was an NE comparison,
6506 we will interpret it as an EQ comparison, since we fell through
6507 the loop. Any equivalences resulting from that comparison are
6508 therefore not valid and must be invalidated. */
6509 if (last_jump_equiv_class)
6510 for (p = last_jump_equiv_class->first_same_value; p;
6511 p = p->next_same_value)
6513 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6514 || (GET_CODE (p->exp) == SUBREG
6515 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6516 invalidate (p->exp, VOIDmode);
6517 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6518 || GET_CODE (p->exp) == ZERO_EXTRACT)
6519 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6522 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6523 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6525 The only thing we do with SET_DEST is invalidate entries, so we
6526 can safely process each SET in order. It is slightly less efficient
6527 to do so, but we only want to handle the most common cases.
6529 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6530 These pseudos won't have valid entries in any of the tables indexed
6531 by register number, such as reg_qty. We avoid out-of-range array
6532 accesses by not processing any instructions created after cse started. */
6534 for (insn = NEXT_INSN (loop_start);
6535 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6536 && INSN_UID (insn) < max_insn_uid
6537 && ! (GET_CODE (insn) == NOTE
6538 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6539 insn = NEXT_INSN (insn))
6542 && (GET_CODE (PATTERN (insn)) == SET
6543 || GET_CODE (PATTERN (insn)) == CLOBBER))
6544 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6545 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6546 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6547 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6548 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6549 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6554 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6555 since they are done elsewhere. This function is called via note_stores. */
6558 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6560 enum rtx_code code = GET_CODE (dest);
6563 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6564 /* There are times when an address can appear varying and be a PLUS
6565 during this scan when it would be a fixed address were we to know
6566 the proper equivalences. So invalidate all memory if there is
6567 a BLKmode or nonscalar memory reference or a reference to a
6568 variable address. */
6569 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6570 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6572 invalidate_memory ();
6576 if (GET_CODE (set) == CLOBBER
6581 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6582 invalidate (XEXP (dest, 0), GET_MODE (dest));
6583 else if (code == REG || code == SUBREG || code == MEM)
6584 invalidate (dest, VOIDmode);
6587 /* Invalidate all insns from START up to the end of the function or the
6588 next label. This called when we wish to CSE around a block that is
6589 conditionally executed. */
6592 invalidate_skipped_block (rtx start)
6596 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6597 insn = NEXT_INSN (insn))
6599 if (! INSN_P (insn))
6602 if (GET_CODE (insn) == CALL_INSN)
6604 if (! CONST_OR_PURE_CALL_P (insn))
6605 invalidate_memory ();
6606 invalidate_for_call ();
6609 invalidate_from_clobbers (PATTERN (insn));
6610 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6614 /* If modifying X will modify the value in *DATA (which is really an
6615 `rtx *'), indicate that fact by setting the pointed to value to
6619 cse_check_loop_start (rtx x, rtx set ATTRIBUTE_UNUSED, void *data)
6621 rtx *cse_check_loop_start_value = (rtx *) data;
6623 if (*cse_check_loop_start_value == NULL_RTX
6624 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6627 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6628 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6629 *cse_check_loop_start_value = NULL_RTX;
6632 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6633 a loop that starts with the label at LOOP_START.
6635 If X is a SET, we see if its SET_SRC is currently in our hash table.
6636 If so, we see if it has a value equal to some register used only in the
6637 loop exit code (as marked by jump.c).
6639 If those two conditions are true, we search backwards from the start of
6640 the loop to see if that same value was loaded into a register that still
6641 retains its value at the start of the loop.
6643 If so, we insert an insn after the load to copy the destination of that
6644 load into the equivalent register and (try to) replace our SET_SRC with that
6647 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6650 cse_set_around_loop (rtx x, rtx insn, rtx loop_start)
6652 struct table_elt *src_elt;
6654 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6655 are setting PC or CC0 or whose SET_SRC is already a register. */
6656 if (GET_CODE (x) == SET
6657 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6658 && GET_CODE (SET_SRC (x)) != REG)
6660 src_elt = lookup (SET_SRC (x),
6661 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6662 GET_MODE (SET_DEST (x)));
6665 for (src_elt = src_elt->first_same_value; src_elt;
6666 src_elt = src_elt->next_same_value)
6667 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6668 && COST (src_elt->exp) < COST (SET_SRC (x)))
6672 /* Look for an insn in front of LOOP_START that sets
6673 something in the desired mode to SET_SRC (x) before we hit
6674 a label or CALL_INSN. */
6676 for (p = prev_nonnote_insn (loop_start);
6677 p && GET_CODE (p) != CALL_INSN
6678 && GET_CODE (p) != CODE_LABEL;
6679 p = prev_nonnote_insn (p))
6680 if ((set = single_set (p)) != 0
6681 && GET_CODE (SET_DEST (set)) == REG
6682 && GET_MODE (SET_DEST (set)) == src_elt->mode
6683 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6685 /* We now have to ensure that nothing between P
6686 and LOOP_START modified anything referenced in
6687 SET_SRC (x). We know that nothing within the loop
6688 can modify it, or we would have invalidated it in
6691 rtx cse_check_loop_start_value = SET_SRC (x);
6692 for (q = p; q != loop_start; q = NEXT_INSN (q))
6694 note_stores (PATTERN (q),
6695 cse_check_loop_start,
6696 &cse_check_loop_start_value);
6698 /* If nothing was changed and we can replace our
6699 SET_SRC, add an insn after P to copy its destination
6700 to what we will be replacing SET_SRC with. */
6701 if (cse_check_loop_start_value
6703 && !can_throw_internal (insn)
6704 && validate_change (insn, &SET_SRC (x),
6707 /* If this creates new pseudos, this is unsafe,
6708 because the regno of new pseudo is unsuitable
6709 to index into reg_qty when cse_insn processes
6710 the new insn. Therefore, if a new pseudo was
6711 created, discard this optimization. */
6712 int nregs = max_reg_num ();
6714 = gen_move_insn (src_elt->exp, SET_DEST (set));
6715 if (nregs != max_reg_num ())
6717 if (! validate_change (insn, &SET_SRC (x),
6723 if (CONSTANT_P (SET_SRC (set))
6724 && ! find_reg_equal_equiv_note (insn))
6725 set_unique_reg_note (insn, REG_EQUAL,
6727 if (control_flow_insn_p (p))
6728 /* p can cause a control flow transfer so it
6729 is the last insn of a basic block. We can't
6730 therefore use emit_insn_after. */
6731 emit_insn_before (move, next_nonnote_insn (p));
6733 emit_insn_after (move, p);
6741 /* Deal with the destination of X affecting the stack pointer. */
6742 addr_affects_sp_p (SET_DEST (x));
6744 /* See comment on similar code in cse_insn for explanation of these
6746 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6747 || GET_CODE (SET_DEST (x)) == MEM)
6748 invalidate (SET_DEST (x), VOIDmode);
6749 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6750 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6751 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6754 /* Find the end of INSN's basic block and return its range,
6755 the total number of SETs in all the insns of the block, the last insn of the
6756 block, and the branch path.
6758 The branch path indicates which branches should be followed. If a nonzero
6759 path size is specified, the block should be rescanned and a different set
6760 of branches will be taken. The branch path is only used if
6761 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6763 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6764 used to describe the block. It is filled in with the information about
6765 the current block. The incoming structure's branch path, if any, is used
6766 to construct the output branch path. */
6769 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6770 int follow_jumps, int after_loop, int skip_blocks)
6774 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6775 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6776 int path_size = data->path_size;
6780 /* Update the previous branch path, if any. If the last branch was
6781 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6782 shorten the path by one and look at the previous branch. We know that
6783 at least one branch must have been taken if PATH_SIZE is nonzero. */
6784 while (path_size > 0)
6786 if (data->path[path_size - 1].status != NOT_TAKEN)
6788 data->path[path_size - 1].status = NOT_TAKEN;
6795 /* If the first instruction is marked with QImode, that means we've
6796 already processed this block. Our caller will look at DATA->LAST
6797 to figure out where to go next. We want to return the next block
6798 in the instruction stream, not some branched-to block somewhere
6799 else. We accomplish this by pretending our called forbid us to
6800 follow jumps, or skip blocks. */
6801 if (GET_MODE (insn) == QImode)
6802 follow_jumps = skip_blocks = 0;
6804 /* Scan to end of this basic block. */
6805 while (p && GET_CODE (p) != CODE_LABEL)
6807 /* Don't cse out the end of a loop. This makes a difference
6808 only for the unusual loops that always execute at least once;
6809 all other loops have labels there so we will stop in any case.
6810 Cse'ing out the end of the loop is dangerous because it
6811 might cause an invariant expression inside the loop
6812 to be reused after the end of the loop. This would make it
6813 hard to move the expression out of the loop in loop.c,
6814 especially if it is one of several equivalent expressions
6815 and loop.c would like to eliminate it.
6817 If we are running after loop.c has finished, we can ignore
6818 the NOTE_INSN_LOOP_END. */
6820 if (! after_loop && GET_CODE (p) == NOTE
6821 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6824 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6825 the regs restored by the longjmp come from
6826 a later time than the setjmp. */
6827 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6828 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6831 /* A PARALLEL can have lots of SETs in it,
6832 especially if it is really an ASM_OPERANDS. */
6833 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6834 nsets += XVECLEN (PATTERN (p), 0);
6835 else if (GET_CODE (p) != NOTE)
6838 /* Ignore insns made by CSE; they cannot affect the boundaries of
6841 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6842 high_cuid = INSN_CUID (p);
6843 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6844 low_cuid = INSN_CUID (p);
6846 /* See if this insn is in our branch path. If it is and we are to
6848 if (path_entry < path_size && data->path[path_entry].branch == p)
6850 if (data->path[path_entry].status != NOT_TAKEN)
6853 /* Point to next entry in path, if any. */
6857 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6858 was specified, we haven't reached our maximum path length, there are
6859 insns following the target of the jump, this is the only use of the
6860 jump label, and the target label is preceded by a BARRIER.
6862 Alternatively, we can follow the jump if it branches around a
6863 block of code and there are no other branches into the block.
6864 In this case invalidate_skipped_block will be called to invalidate any
6865 registers set in the block when following the jump. */
6867 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6868 && GET_CODE (p) == JUMP_INSN
6869 && GET_CODE (PATTERN (p)) == SET
6870 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6871 && JUMP_LABEL (p) != 0
6872 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6873 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6875 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6876 if ((GET_CODE (q) != NOTE
6877 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6878 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6879 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6880 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6883 /* If we ran into a BARRIER, this code is an extension of the
6884 basic block when the branch is taken. */
6885 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6887 /* Don't allow ourself to keep walking around an
6888 always-executed loop. */
6889 if (next_real_insn (q) == next)
6895 /* Similarly, don't put a branch in our path more than once. */
6896 for (i = 0; i < path_entry; i++)
6897 if (data->path[i].branch == p)
6900 if (i != path_entry)
6903 data->path[path_entry].branch = p;
6904 data->path[path_entry++].status = TAKEN;
6906 /* This branch now ends our path. It was possible that we
6907 didn't see this branch the last time around (when the
6908 insn in front of the target was a JUMP_INSN that was
6909 turned into a no-op). */
6910 path_size = path_entry;
6913 /* Mark block so we won't scan it again later. */
6914 PUT_MODE (NEXT_INSN (p), QImode);
6916 /* Detect a branch around a block of code. */
6917 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6921 if (next_real_insn (q) == next)
6927 for (i = 0; i < path_entry; i++)
6928 if (data->path[i].branch == p)
6931 if (i != path_entry)
6934 /* This is no_labels_between_p (p, q) with an added check for
6935 reaching the end of a function (in case Q precedes P). */
6936 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6937 if (GET_CODE (tmp) == CODE_LABEL)
6942 data->path[path_entry].branch = p;
6943 data->path[path_entry++].status = AROUND;
6945 path_size = path_entry;
6948 /* Mark block so we won't scan it again later. */
6949 PUT_MODE (NEXT_INSN (p), QImode);
6956 data->low_cuid = low_cuid;
6957 data->high_cuid = high_cuid;
6958 data->nsets = nsets;
6961 /* If all jumps in the path are not taken, set our path length to zero
6962 so a rescan won't be done. */
6963 for (i = path_size - 1; i >= 0; i--)
6964 if (data->path[i].status != NOT_TAKEN)
6968 data->path_size = 0;
6970 data->path_size = path_size;
6972 /* End the current branch path. */
6973 data->path[path_size].branch = 0;
6976 /* Perform cse on the instructions of a function.
6977 F is the first instruction.
6978 NREGS is one plus the highest pseudo-reg number used in the instruction.
6980 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6981 (only if -frerun-cse-after-loop).
6983 Returns 1 if jump_optimize should be redone due to simplifications
6984 in conditional jump instructions. */
6987 cse_main (rtx f, int nregs, int after_loop, FILE *file)
6989 struct cse_basic_block_data val;
6993 val.path = xmalloc (sizeof (struct branch_path)
6994 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6996 cse_jumps_altered = 0;
6997 recorded_label_ref = 0;
6998 constant_pool_entries_cost = 0;
6999 constant_pool_entries_regcost = 0;
7003 init_alias_analysis ();
7007 max_insn_uid = get_max_uid ();
7009 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
7011 #ifdef LOAD_EXTEND_OP
7013 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7014 and change the code and mode as appropriate. */
7015 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7018 /* Reset the counter indicating how many elements have been made
7020 n_elements_made = 0;
7022 /* Find the largest uid. */
7024 max_uid = get_max_uid ();
7025 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
7027 /* Compute the mapping from uids to cuids.
7028 CUIDs are numbers assigned to insns, like uids,
7029 except that cuids increase monotonically through the code.
7030 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7031 between two insns is not affected by -g. */
7033 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7035 if (GET_CODE (insn) != NOTE
7036 || NOTE_LINE_NUMBER (insn) < 0)
7037 INSN_CUID (insn) = ++i;
7039 /* Give a line number note the same cuid as preceding insn. */
7040 INSN_CUID (insn) = i;
7043 ggc_push_context ();
7045 /* Loop over basic blocks.
7046 Compute the maximum number of qty's needed for each basic block
7047 (which is 2 for each SET). */
7052 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7053 flag_cse_skip_blocks);
7055 /* If this basic block was already processed or has no sets, skip it. */
7056 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7058 PUT_MODE (insn, VOIDmode);
7059 insn = (val.last ? NEXT_INSN (val.last) : 0);
7064 cse_basic_block_start = val.low_cuid;
7065 cse_basic_block_end = val.high_cuid;
7066 max_qty = val.nsets * 2;
7069 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7070 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7073 /* Make MAX_QTY bigger to give us room to optimize
7074 past the end of this basic block, if that should prove useful. */
7078 /* If this basic block is being extended by following certain jumps,
7079 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7080 Otherwise, we start after this basic block. */
7081 if (val.path_size > 0)
7082 cse_basic_block (insn, val.last, val.path, 0);
7085 int old_cse_jumps_altered = cse_jumps_altered;
7088 /* When cse changes a conditional jump to an unconditional
7089 jump, we want to reprocess the block, since it will give
7090 us a new branch path to investigate. */
7091 cse_jumps_altered = 0;
7092 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7093 if (cse_jumps_altered == 0
7094 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7097 cse_jumps_altered |= old_cse_jumps_altered;
7110 if (max_elements_made < n_elements_made)
7111 max_elements_made = n_elements_made;
7114 end_alias_analysis ();
7116 free (reg_eqv_table);
7119 return cse_jumps_altered || recorded_label_ref;
7122 /* Process a single basic block. FROM and TO and the limits of the basic
7123 block. NEXT_BRANCH points to the branch path when following jumps or
7124 a null path when not following jumps.
7126 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7127 loop. This is true when we are being called for the last time on a
7128 block and this CSE pass is before loop.c. */
7131 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
7136 rtx libcall_insn = NULL_RTX;
7138 int no_conflict = 0;
7140 /* Allocate the space needed by qty_table. */
7141 qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
7145 /* TO might be a label. If so, protect it from being deleted. */
7146 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7149 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7151 enum rtx_code code = GET_CODE (insn);
7153 /* If we have processed 1,000 insns, flush the hash table to
7154 avoid extreme quadratic behavior. We must not include NOTEs
7155 in the count since there may be more of them when generating
7156 debugging information. If we clear the table at different
7157 times, code generated with -g -O might be different than code
7158 generated with -O but not -g.
7160 ??? This is a real kludge and needs to be done some other way.
7162 if (code != NOTE && num_insns++ > 1000)
7164 flush_hash_table ();
7168 /* See if this is a branch that is part of the path. If so, and it is
7169 to be taken, do so. */
7170 if (next_branch->branch == insn)
7172 enum taken status = next_branch++->status;
7173 if (status != NOT_TAKEN)
7175 if (status == TAKEN)
7176 record_jump_equiv (insn, 1);
7178 invalidate_skipped_block (NEXT_INSN (insn));
7180 /* Set the last insn as the jump insn; it doesn't affect cc0.
7181 Then follow this branch. */
7186 insn = JUMP_LABEL (insn);
7191 if (GET_MODE (insn) == QImode)
7192 PUT_MODE (insn, VOIDmode);
7194 if (GET_RTX_CLASS (code) == 'i')
7198 /* Process notes first so we have all notes in canonical forms when
7199 looking for duplicate operations. */
7201 if (REG_NOTES (insn))
7202 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7204 /* Track when we are inside in LIBCALL block. Inside such a block,
7205 we do not want to record destinations. The last insn of a
7206 LIBCALL block is not considered to be part of the block, since
7207 its destination is the result of the block and hence should be
7210 if (REG_NOTES (insn) != 0)
7212 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7213 libcall_insn = XEXP (p, 0);
7214 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7216 /* Keep libcall_insn for the last SET insn of a no-conflict
7217 block to prevent changing the destination. */
7223 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7227 cse_insn (insn, libcall_insn);
7229 if (no_conflict == -1)
7235 /* If we haven't already found an insn where we added a LABEL_REF,
7237 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7238 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7240 recorded_label_ref = 1;
7243 /* If INSN is now an unconditional jump, skip to the end of our
7244 basic block by pretending that we just did the last insn in the
7245 basic block. If we are jumping to the end of our block, show
7246 that we can have one usage of TO. */
7248 if (any_uncondjump_p (insn))
7256 if (JUMP_LABEL (insn) == to)
7259 /* Maybe TO was deleted because the jump is unconditional.
7260 If so, there is nothing left in this basic block. */
7261 /* ??? Perhaps it would be smarter to set TO
7262 to whatever follows this insn,
7263 and pretend the basic block had always ended here. */
7264 if (INSN_DELETED_P (to))
7267 insn = PREV_INSN (to);
7270 /* See if it is ok to keep on going past the label
7271 which used to end our basic block. Remember that we incremented
7272 the count of that label, so we decrement it here. If we made
7273 a jump unconditional, TO_USAGE will be one; in that case, we don't
7274 want to count the use in that jump. */
7276 if (to != 0 && NEXT_INSN (insn) == to
7277 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7279 struct cse_basic_block_data val;
7282 insn = NEXT_INSN (to);
7284 /* If TO was the last insn in the function, we are done. */
7291 /* If TO was preceded by a BARRIER we are done with this block
7292 because it has no continuation. */
7293 prev = prev_nonnote_insn (to);
7294 if (prev && GET_CODE (prev) == BARRIER)
7300 /* Find the end of the following block. Note that we won't be
7301 following branches in this case. */
7304 val.path = xmalloc (sizeof (struct branch_path)
7305 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7306 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7309 /* If the tables we allocated have enough space left
7310 to handle all the SETs in the next basic block,
7311 continue through it. Otherwise, return,
7312 and that block will be scanned individually. */
7313 if (val.nsets * 2 + next_qty > max_qty)
7316 cse_basic_block_start = val.low_cuid;
7317 cse_basic_block_end = val.high_cuid;
7320 /* Prevent TO from being deleted if it is a label. */
7321 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7324 /* Back up so we process the first insn in the extension. */
7325 insn = PREV_INSN (insn);
7329 if (next_qty > max_qty)
7332 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7333 the previous insn is the only insn that branches to the head of a loop,
7334 we can cse into the loop. Don't do this if we changed the jump
7335 structure of a loop unless we aren't going to be following jumps. */
7337 insn = prev_nonnote_insn (to);
7338 if ((cse_jumps_altered == 0
7339 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7340 && around_loop && to != 0
7341 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7342 && GET_CODE (insn) == JUMP_INSN
7343 && JUMP_LABEL (insn) != 0
7344 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7345 cse_around_loop (JUMP_LABEL (insn));
7349 return to ? NEXT_INSN (to) : 0;
7352 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7353 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7356 check_for_label_ref (rtx *rtl, void *data)
7358 rtx insn = (rtx) data;
7360 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7361 we must rerun jump since it needs to place the note. If this is a
7362 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7363 since no REG_LABEL will be added. */
7364 return (GET_CODE (*rtl) == LABEL_REF
7365 && ! LABEL_REF_NONLOCAL_P (*rtl)
7366 && LABEL_P (XEXP (*rtl, 0))
7367 && INSN_UID (XEXP (*rtl, 0)) != 0
7368 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7371 /* Count the number of times registers are used (not set) in X.
7372 COUNTS is an array in which we accumulate the count, INCR is how much
7373 we count each register usage. */
7376 count_reg_usage (rtx x, int *counts, int incr)
7386 switch (code = GET_CODE (x))
7389 counts[REGNO (x)] += incr;
7403 /* If we are clobbering a MEM, mark any registers inside the address
7405 if (GET_CODE (XEXP (x, 0)) == MEM)
7406 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7410 /* Unless we are setting a REG, count everything in SET_DEST. */
7411 if (GET_CODE (SET_DEST (x)) != REG)
7412 count_reg_usage (SET_DEST (x), counts, incr);
7413 count_reg_usage (SET_SRC (x), counts, incr);
7417 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7422 count_reg_usage (PATTERN (x), counts, incr);
7424 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7427 note = find_reg_equal_equiv_note (x);
7430 rtx eqv = XEXP (note, 0);
7432 if (GET_CODE (eqv) == EXPR_LIST)
7433 /* This REG_EQUAL note describes the result of a function call.
7434 Process all the arguments. */
7437 count_reg_usage (XEXP (eqv, 0), counts, incr);
7438 eqv = XEXP (eqv, 1);
7440 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7442 count_reg_usage (eqv, counts, incr);
7447 if (REG_NOTE_KIND (x) == REG_EQUAL
7448 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7449 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7450 involving registers in the address. */
7451 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7452 count_reg_usage (XEXP (x, 0), counts, incr);
7454 count_reg_usage (XEXP (x, 1), counts, incr);
7458 /* Iterate over just the inputs, not the constraints as well. */
7459 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7460 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7470 fmt = GET_RTX_FORMAT (code);
7471 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7474 count_reg_usage (XEXP (x, i), counts, incr);
7475 else if (fmt[i] == 'E')
7476 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7477 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7481 /* Return true if set is live. */
7483 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7490 if (set_noop_p (set))
7494 else if (GET_CODE (SET_DEST (set)) == CC0
7495 && !side_effects_p (SET_SRC (set))
7496 && ((tem = next_nonnote_insn (insn)) == 0
7498 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7501 else if (GET_CODE (SET_DEST (set)) != REG
7502 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7503 || counts[REGNO (SET_DEST (set))] != 0
7504 || side_effects_p (SET_SRC (set))
7505 /* An ADDRESSOF expression can turn into a use of the
7506 internal arg pointer, so always consider the
7507 internal arg pointer live. If it is truly dead,
7508 flow will delete the initializing insn. */
7509 || (SET_DEST (set) == current_function_internal_arg_pointer))
7514 /* Return true if insn is live. */
7517 insn_live_p (rtx insn, int *counts)
7520 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7522 else if (GET_CODE (PATTERN (insn)) == SET)
7523 return set_live_p (PATTERN (insn), insn, counts);
7524 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7526 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7528 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7530 if (GET_CODE (elt) == SET)
7532 if (set_live_p (elt, insn, counts))
7535 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7544 /* Return true if libcall is dead as a whole. */
7547 dead_libcall_p (rtx insn, int *counts)
7551 /* See if there's a REG_EQUAL note on this insn and try to
7552 replace the source with the REG_EQUAL expression.
7554 We assume that insns with REG_RETVALs can only be reg->reg
7555 copies at this point. */
7556 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7560 set = single_set (insn);
7564 new = simplify_rtx (XEXP (note, 0));
7566 new = XEXP (note, 0);
7568 /* While changing insn, we must update the counts accordingly. */
7569 count_reg_usage (insn, counts, -1);
7571 if (validate_change (insn, &SET_SRC (set), new, 0))
7573 count_reg_usage (insn, counts, 1);
7574 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7575 remove_note (insn, note);
7579 if (CONSTANT_P (new))
7581 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7582 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7584 count_reg_usage (insn, counts, 1);
7585 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7586 remove_note (insn, note);
7591 count_reg_usage (insn, counts, 1);
7595 /* Scan all the insns and delete any that are dead; i.e., they store a register
7596 that is never used or they copy a register to itself.
7598 This is used to remove insns made obviously dead by cse, loop or other
7599 optimizations. It improves the heuristics in loop since it won't try to
7600 move dead invariants out of loops or make givs for dead quantities. The
7601 remaining passes of the compilation are also sped up. */
7604 delete_trivially_dead_insns (rtx insns, int nreg)
7608 int in_libcall = 0, dead_libcall = 0;
7609 int ndead = 0, nlastdead, niterations = 0;
7611 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7612 /* First count the number of times each register is used. */
7613 counts = xcalloc (nreg, sizeof (int));
7614 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7615 count_reg_usage (insn, counts, 1);
7621 /* Go from the last insn to the first and delete insns that only set unused
7622 registers or copy a register to itself. As we delete an insn, remove
7623 usage counts for registers it uses.
7625 The first jump optimization pass may leave a real insn as the last
7626 insn in the function. We must not skip that insn or we may end
7627 up deleting code that is not really dead. */
7628 insn = get_last_insn ();
7629 if (! INSN_P (insn))
7630 insn = prev_real_insn (insn);
7632 for (; insn; insn = prev)
7636 prev = prev_real_insn (insn);
7638 /* Don't delete any insns that are part of a libcall block unless
7639 we can delete the whole libcall block.
7641 Flow or loop might get confused if we did that. Remember
7642 that we are scanning backwards. */
7643 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7647 dead_libcall = dead_libcall_p (insn, counts);
7649 else if (in_libcall)
7650 live_insn = ! dead_libcall;
7652 live_insn = insn_live_p (insn, counts);
7654 /* If this is a dead insn, delete it and show registers in it aren't
7659 count_reg_usage (insn, counts, -1);
7660 delete_insn_and_edges (insn);
7664 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7671 while (ndead != nlastdead);
7673 if (rtl_dump_file && ndead)
7674 fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7675 ndead, niterations);
7678 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7682 /* This function is called via for_each_rtx. The argument, NEWREG, is
7683 a condition code register with the desired mode. If we are looking
7684 at the same register in a different mode, replace it with
7688 cse_change_cc_mode (rtx *loc, void *data)
7690 rtx newreg = (rtx) data;
7693 && GET_CODE (*loc) == REG
7694 && REGNO (*loc) == REGNO (newreg)
7695 && GET_MODE (*loc) != GET_MODE (newreg))
7703 /* Change the mode of any reference to the register REGNO (NEWREG) to
7704 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7705 any instruction which modifies NEWREG. */
7708 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7712 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7714 if (! INSN_P (insn))
7717 if (reg_set_p (newreg, insn))
7720 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7721 for_each_rtx (®_NOTES (insn), cse_change_cc_mode, newreg);
7725 /* BB is a basic block which finishes with CC_REG as a condition code
7726 register which is set to CC_SRC. Look through the successors of BB
7727 to find blocks which have a single predecessor (i.e., this one),
7728 and look through those blocks for an assignment to CC_REG which is
7729 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7730 permitted to change the mode of CC_SRC to a compatible mode. This
7731 returns VOIDmode if no equivalent assignments were found.
7732 Otherwise it returns the mode which CC_SRC should wind up with.
7734 The main complexity in this function is handling the mode issues.
7735 We may have more than one duplicate which we can eliminate, and we
7736 try to find a mode which will work for multiple duplicates. */
7738 static enum machine_mode
7739 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7742 enum machine_mode mode;
7743 unsigned int insn_count;
7746 enum machine_mode modes[2];
7751 /* We expect to have two successors. Look at both before picking
7752 the final mode for the comparison. If we have more successors
7753 (i.e., some sort of table jump, although that seems unlikely),
7754 then we require all beyond the first two to use the same
7757 found_equiv = false;
7758 mode = GET_MODE (cc_src);
7760 for (e = bb->succ; e; e = e->succ_next)
7765 if (e->flags & EDGE_COMPLEX)
7769 || e->dest->pred->pred_next
7770 || e->dest == EXIT_BLOCK_PTR)
7773 end = NEXT_INSN (BB_END (e->dest));
7774 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7778 if (! INSN_P (insn))
7781 /* If CC_SRC is modified, we have to stop looking for
7782 something which uses it. */
7783 if (modified_in_p (cc_src, insn))
7786 /* Check whether INSN sets CC_REG to CC_SRC. */
7787 set = single_set (insn);
7789 && GET_CODE (SET_DEST (set)) == REG
7790 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7793 enum machine_mode set_mode;
7794 enum machine_mode comp_mode;
7797 set_mode = GET_MODE (SET_SRC (set));
7798 comp_mode = set_mode;
7799 if (rtx_equal_p (cc_src, SET_SRC (set)))
7801 else if (GET_CODE (cc_src) == COMPARE
7802 && GET_CODE (SET_SRC (set)) == COMPARE
7804 && rtx_equal_p (XEXP (cc_src, 0),
7805 XEXP (SET_SRC (set), 0))
7806 && rtx_equal_p (XEXP (cc_src, 1),
7807 XEXP (SET_SRC (set), 1)))
7810 comp_mode = (*targetm.cc_modes_compatible) (mode, set_mode);
7811 if (comp_mode != VOIDmode
7812 && (can_change_mode || comp_mode == mode))
7819 if (insn_count < ARRAY_SIZE (insns))
7821 insns[insn_count] = insn;
7822 modes[insn_count] = set_mode;
7823 last_insns[insn_count] = end;
7826 if (mode != comp_mode)
7828 if (! can_change_mode)
7831 PUT_MODE (cc_src, mode);
7836 if (set_mode != mode)
7838 /* We found a matching expression in the
7839 wrong mode, but we don't have room to
7840 store it in the array. Punt. This case
7844 /* INSN sets CC_REG to a value equal to CC_SRC
7845 with the right mode. We can simply delete
7850 /* We found an instruction to delete. Keep looking,
7851 in the hopes of finding a three-way jump. */
7855 /* We found an instruction which sets the condition
7856 code, so don't look any farther. */
7860 /* If INSN sets CC_REG in some other way, don't look any
7862 if (reg_set_p (cc_reg, insn))
7866 /* If we fell off the bottom of the block, we can keep looking
7867 through successors. We pass CAN_CHANGE_MODE as false because
7868 we aren't prepared to handle compatibility between the
7869 further blocks and this block. */
7872 enum machine_mode submode;
7874 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7875 if (submode != VOIDmode)
7877 if (submode != mode)
7880 can_change_mode = false;
7888 /* Now INSN_COUNT is the number of instructions we found which set
7889 CC_REG to a value equivalent to CC_SRC. The instructions are in
7890 INSNS. The modes used by those instructions are in MODES. */
7893 for (i = 0; i < insn_count; ++i)
7895 if (modes[i] != mode)
7897 /* We need to change the mode of CC_REG in INSNS[i] and
7898 subsequent instructions. */
7901 if (GET_MODE (cc_reg) == mode)
7904 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7906 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7910 delete_insn (insns[i]);
7916 /* If we have a fixed condition code register (or two), walk through
7917 the instructions and try to eliminate duplicate assignments. */
7920 cse_condition_code_reg (void)
7922 unsigned int cc_regno_1;
7923 unsigned int cc_regno_2;
7928 if (! (*targetm.fixed_condition_code_regs) (&cc_regno_1, &cc_regno_2))
7931 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7932 if (cc_regno_2 != INVALID_REGNUM)
7933 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7935 cc_reg_2 = NULL_RTX;
7944 enum machine_mode mode;
7945 enum machine_mode orig_mode;
7947 /* Look for blocks which end with a conditional jump based on a
7948 condition code register. Then look for the instruction which
7949 sets the condition code register. Then look through the
7950 successor blocks for instructions which set the condition
7951 code register to the same value. There are other possible
7952 uses of the condition code register, but these are by far the
7953 most common and the ones which we are most likely to be able
7956 last_insn = BB_END (bb);
7957 if (GET_CODE (last_insn) != JUMP_INSN)
7960 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7962 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7967 cc_src_insn = NULL_RTX;
7969 for (insn = PREV_INSN (last_insn);
7970 insn && insn != PREV_INSN (BB_HEAD (bb));
7971 insn = PREV_INSN (insn))
7975 if (! INSN_P (insn))
7977 set = single_set (insn);
7979 && GET_CODE (SET_DEST (set)) == REG
7980 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7983 cc_src = SET_SRC (set);
7986 else if (reg_set_p (cc_reg, insn))
7993 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7996 /* Now CC_REG is a condition code register used for a
7997 conditional jump at the end of the block, and CC_SRC, in
7998 CC_SRC_INSN, is the value to which that condition code
7999 register is set, and CC_SRC is still meaningful at the end of
8002 orig_mode = GET_MODE (cc_src);
8003 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
8004 if (mode != VOIDmode)
8006 if (mode != GET_MODE (cc_src))
8008 if (mode != orig_mode)
8010 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
8012 /* Change the mode of CC_REG in CC_SRC_INSN to
8013 GET_MODE (NEWREG). */
8014 for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
8016 for_each_rtx (®_NOTES (cc_src_insn), cse_change_cc_mode,
8019 /* Do the same in the following insns that use the
8020 current value of CC_REG within BB. */
8021 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
8022 NEXT_INSN (last_insn),