1 /* IRA hard register and memory cost calculation for allocnos.
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "hard-reg-set.h"
31 #include "basic-block.h"
33 #include "addresses.h"
34 #include "insn-config.h"
41 /* The file contains code is similar to one in regclass but the code
42 works on the allocno basis. */
44 #ifdef FORBIDDEN_INC_DEC_CLASSES
45 /* Indexed by n, is TRUE if allocno with number N is used in an
46 auto-inc or auto-dec context. */
47 static bool *in_inc_dec;
50 /* The `costs' struct records the cost of using hard registers of each
51 class considered for the calculation and of using memory for each
56 /* Costs for register classes start here. We process only some
57 register classes (cover classes on the 1st cost calculation
58 iteration and important classes on the 2nd iteration). */
62 /* Initialized once. It is a maximal possible size of the allocated
64 static int max_struct_costs_size;
66 /* Allocated and initialized once, and used to initialize cost values
68 static struct costs *init_cost;
70 /* Allocated once, and used for temporary purposes. */
71 static struct costs *temp_costs;
73 /* Allocated once, and used for the cost calculation. */
74 static struct costs *op_costs[MAX_RECOG_OPERANDS];
75 static struct costs *this_op_costs[MAX_RECOG_OPERANDS];
77 /* Original and accumulated costs of each class for each allocno. */
78 static struct costs *allocno_costs, *total_costs;
80 /* Classes used for cost calculation. They may be different on
81 different iterations of the cost calculations or in different
82 optimization modes. */
83 static enum reg_class *cost_classes;
85 /* The size of the previous array. */
86 static int cost_classes_num;
88 /* Map: cost class -> order number (they start with 0) of the cost
89 class. The order number is negative for non-cost classes. */
90 static int cost_class_nums[N_REG_CLASSES];
92 /* It is the current size of struct costs. */
93 static int struct_costs_size;
95 /* Return pointer to structure containing costs of allocno with given
97 #define COSTS_OF_ALLOCNO(arr, num) \
98 ((struct costs *) ((char *) (arr) + (num) * struct_costs_size))
100 /* Record register class preferences of each allocno. Null value
101 means no preferences. It happens on the 1st iteration of the cost
103 static enum reg_class *allocno_pref;
105 /* Allocated buffers for allocno_pref. */
106 static enum reg_class *allocno_pref_buffer;
108 /* Record register class of each allocno with the same regno. */
109 static enum reg_class *common_classes;
111 /* Execution frequency of the current insn. */
112 static int frequency;
116 /* Compute the cost of loading X into (if TO_P is TRUE) or from (if
117 TO_P is FALSE) a register of class RCLASS in mode MODE. X must not
118 be a pseudo register. */
120 copy_cost (rtx x, enum machine_mode mode, enum reg_class rclass, bool to_p,
121 secondary_reload_info *prev_sri)
123 secondary_reload_info sri;
124 enum reg_class secondary_class = NO_REGS;
126 /* If X is a SCRATCH, there is actually nothing to move since we are
127 assuming optimal allocation. */
128 if (GET_CODE (x) == SCRATCH)
131 /* Get the class we will actually use for a reload. */
132 rclass = PREFERRED_RELOAD_CLASS (x, rclass);
134 /* If we need a secondary reload for an intermediate, the cost is
135 that to load the input into the intermediate register, then to
137 sri.prev_sri = prev_sri;
139 secondary_class = targetm.secondary_reload (to_p, x, rclass, mode, &sri);
141 if (secondary_class != NO_REGS)
143 if (!move_cost[mode])
144 init_move_cost (mode);
145 return (move_cost[mode][secondary_class][rclass] + sri.extra_cost
146 + copy_cost (x, mode, secondary_class, to_p, &sri));
149 /* For memory, use the memory move cost, for (hard) registers, use
150 the cost to move between the register classes, and use 2 for
151 everything else (constants). */
152 if (MEM_P (x) || rclass == NO_REGS)
153 return sri.extra_cost + ira_memory_move_cost[mode][rclass][to_p != 0];
156 if (!move_cost[mode])
157 init_move_cost (mode);
158 return (sri.extra_cost + move_cost[mode][REGNO_REG_CLASS (REGNO (x))][rclass]);
161 /* If this is a constant, we may eventually want to call rtx_cost
163 return sri.extra_cost + COSTS_N_INSNS (1);
168 /* Record the cost of using memory or hard registers of various
169 classes for the operands in INSN.
171 N_ALTS is the number of alternatives.
172 N_OPS is the number of operands.
173 OPS is an array of the operands.
174 MODES are the modes of the operands, in case any are VOIDmode.
175 CONSTRAINTS are the constraints to use for the operands. This array
176 is modified by this procedure.
178 This procedure works alternative by alternative. For each
179 alternative we assume that we will be able to allocate all allocnos
180 to their ideal register class and calculate the cost of using that
181 alternative. Then we compute, for each operand that is a
182 pseudo-register, the cost of having the allocno allocated to each
183 register class and using it in that alternative. To this cost is
184 added the cost of the alternative.
186 The cost of each class for this insn is its lowest cost among all
189 record_reg_classes (int n_alts, int n_ops, rtx *ops,
190 enum machine_mode *modes, const char **constraints,
191 rtx insn, struct costs **op_costs,
192 enum reg_class *allocno_pref)
197 int insn_allows_mem[MAX_RECOG_OPERANDS];
199 for (i = 0; i < n_ops; i++)
200 insn_allows_mem[i] = 0;
202 /* Process each alternative, each time minimizing an operand's cost
203 with the cost for each operand in that alternative. */
204 for (alt = 0; alt < n_alts; alt++)
206 enum reg_class classes[MAX_RECOG_OPERANDS];
207 int allows_mem[MAX_RECOG_OPERANDS];
210 int alt_cost = 0, op_cost_add;
212 for (i = 0; i < n_ops; i++)
215 const char *p = constraints[i];
217 enum machine_mode mode = modes[i];
221 /* Initially show we know nothing about the register class. */
222 classes[i] = NO_REGS;
225 /* If this operand has no constraints at all, we can
226 conclude nothing about it since anything is valid. */
229 if (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER)
230 memset (this_op_costs[i], 0, struct_costs_size);
234 /* If this alternative is only relevant when this operand
235 matches a previous operand, we do different things
236 depending on whether this operand is a allocno-reg or not.
237 We must process any modifiers for the operand before we
238 can make this test. */
239 while (*p == '%' || *p == '=' || *p == '+' || *p == '&')
242 if (p[0] >= '0' && p[0] <= '0' + i && (p[1] == ',' || p[1] == 0))
244 /* Copy class and whether memory is allowed from the
245 matching alternative. Then perform any needed cost
246 computations and/or adjustments. */
248 classes[i] = classes[j];
249 allows_mem[i] = allows_mem[j];
251 insn_allows_mem[i] = 1;
253 if (! REG_P (op) || REGNO (op) < FIRST_PSEUDO_REGISTER)
255 /* If this matches the other operand, we have no
256 added cost and we win. */
257 if (rtx_equal_p (ops[j], op))
259 /* If we can put the other operand into a register,
260 add to the cost of this alternative the cost to
261 copy this operand to the register used for the
263 else if (classes[j] != NO_REGS)
265 alt_cost += copy_cost (op, mode, classes[j], 1, NULL);
269 else if (! REG_P (ops[j])
270 || REGNO (ops[j]) < FIRST_PSEUDO_REGISTER)
272 /* This op is an allocno but the one it matches is
275 /* If we can't put the other operand into a
276 register, this alternative can't be used. */
278 if (classes[j] == NO_REGS)
280 /* Otherwise, add to the cost of this alternative
281 the cost to copy the other operand to the hard
282 register used for this operand. */
284 alt_cost += copy_cost (ops[j], mode, classes[j], 1, NULL);
288 /* The costs of this operand are not the same as the
289 other operand since move costs are not symmetric.
290 Moreover, if we cannot tie them, this alternative
291 needs to do a copy, which is one insn. */
292 struct costs *pp = this_op_costs[i];
294 for (k = 0; k < cost_classes_num; k++)
296 rclass = cost_classes[k];
298 = (((recog_data.operand_type[i] != OP_OUT
299 ? ira_get_may_move_cost (mode, rclass,
300 classes[i], true) : 0)
301 + (recog_data.operand_type[i] != OP_IN
302 ? ira_get_may_move_cost (mode, classes[i],
307 /* If the alternative actually allows memory, make
308 things a bit cheaper since we won't need an extra
311 = ((recog_data.operand_type[i] != OP_IN
312 ? ira_memory_move_cost[mode][classes[i]][0] : 0)
313 + (recog_data.operand_type[i] != OP_OUT
314 ? ira_memory_move_cost[mode][classes[i]][1] : 0)
315 - allows_mem[i]) * frequency;
317 /* If we have assigned a class to this allocno in our
318 first pass, add a cost to this alternative
319 corresponding to what we would add if this allocno
320 were not in the appropriate class. We could use
321 cover class here but it is less accurate
325 enum reg_class pref_class
326 = allocno_pref[ALLOCNO_NUM
327 (ira_curr_regno_allocno_map
330 if (pref_class == NO_REGS)
332 += ((recog_data.operand_type[i] != OP_IN
333 ? ira_memory_move_cost[mode][classes[i]][0]
335 + (recog_data.operand_type[i] != OP_OUT
336 ? ira_memory_move_cost[mode][classes[i]][1]
338 else if (ira_reg_class_intersect
339 [pref_class][classes[i]] == NO_REGS)
340 alt_cost += ira_get_register_move_cost (mode,
344 if (REGNO (ops[i]) != REGNO (ops[j])
345 && ! find_reg_note (insn, REG_DEAD, op))
348 /* This is in place of ordinary cost computation for
349 this operand, so skip to the end of the
350 alternative (should be just one character). */
351 while (*p && *p++ != ',')
359 /* Scan all the constraint letters. See if the operand
360 matches any of the constraints. Collect the valid
361 register classes and see if this operand accepts
370 /* Ignore the next letter for this pass. */
376 case '!': case '#': case '&':
377 case '0': case '1': case '2': case '3': case '4':
378 case '5': case '6': case '7': case '8': case '9':
383 win = address_operand (op, GET_MODE (op));
384 /* We know this operand is an address, so we want it
385 to be allocated to a register that can be the
386 base of an address, i.e. BASE_REG_CLASS. */
388 = ira_reg_class_union[classes[i]]
389 [base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
392 case 'm': case 'o': case 'V':
393 /* It doesn't seem worth distinguishing between
394 offsettable and non-offsettable addresses
396 insn_allows_mem[i] = allows_mem[i] = 1;
403 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
404 || GET_CODE (XEXP (op, 0)) == POST_DEC))
410 && (GET_CODE (XEXP (op, 0)) == PRE_INC
411 || GET_CODE (XEXP (op, 0)) == POST_INC))
417 if (GET_CODE (op) == CONST_DOUBLE
418 || (GET_CODE (op) == CONST_VECTOR
419 && (GET_MODE_CLASS (GET_MODE (op))
420 == MODE_VECTOR_FLOAT)))
426 if (GET_CODE (op) == CONST_DOUBLE
427 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
432 if (GET_CODE (op) == CONST_INT
433 || (GET_CODE (op) == CONST_DOUBLE
434 && GET_MODE (op) == VOIDmode))
439 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
444 if (GET_CODE (op) == CONST_INT
445 || (GET_CODE (op) == CONST_DOUBLE
446 && GET_MODE (op) == VOIDmode))
458 if (GET_CODE (op) == CONST_INT
459 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
470 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))))
472 insn_allows_mem[i] = allows_mem[i] = 1;
474 classes[i] = ira_reg_class_union[classes[i]][GENERAL_REGS];
478 if (REG_CLASS_FROM_CONSTRAINT (c, p) != NO_REGS)
479 classes[i] = ira_reg_class_union[classes[i]]
480 [REG_CLASS_FROM_CONSTRAINT (c, p)];
481 #ifdef EXTRA_CONSTRAINT_STR
482 else if (EXTRA_CONSTRAINT_STR (op, c, p))
485 if (EXTRA_MEMORY_CONSTRAINT (c, p))
487 /* Every MEM can be reloaded to fit. */
488 insn_allows_mem[i] = allows_mem[i] = 1;
492 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
494 /* Every address can be reloaded to fit. */
496 if (address_operand (op, GET_MODE (op)))
498 /* We know this operand is an address, so we
499 want it to be allocated to a hard register
500 that can be the base of an address,
501 i.e. BASE_REG_CLASS. */
503 = ira_reg_class_union[classes[i]]
504 [base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
509 p += CONSTRAINT_LEN (c, p);
516 /* How we account for this operand now depends on whether it
517 is a pseudo register or not. If it is, we first check if
518 any register classes are valid. If not, we ignore this
519 alternative, since we want to assume that all allocnos get
520 allocated for register preferencing. If some register
521 class is valid, compute the costs of moving the allocno
523 if (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER)
525 if (classes[i] == NO_REGS)
527 /* We must always fail if the operand is a REG, but
528 we did not find a suitable class.
530 Otherwise we may perform an uninitialized read
531 from this_op_costs after the `continue' statement
537 struct costs *pp = this_op_costs[i];
539 for (k = 0; k < cost_classes_num; k++)
541 rclass = cost_classes[k];
543 = (((recog_data.operand_type[i] != OP_OUT
544 ? ira_get_may_move_cost (mode, rclass,
545 classes[i], true) : 0)
546 + (recog_data.operand_type[i] != OP_IN
547 ? ira_get_may_move_cost (mode, classes[i],
552 /* If the alternative actually allows memory, make
553 things a bit cheaper since we won't need an extra
556 = ((recog_data.operand_type[i] != OP_IN
557 ? ira_memory_move_cost[mode][classes[i]][0] : 0)
558 + (recog_data.operand_type[i] != OP_OUT
559 ? ira_memory_move_cost[mode][classes[i]][1] : 0)
560 - allows_mem[i]) * frequency;
561 /* If we have assigned a class to this allocno in our
562 first pass, add a cost to this alternative
563 corresponding to what we would add if this allocno
564 were not in the appropriate class. We could use
565 cover class here but it is less accurate
569 enum reg_class pref_class
570 = allocno_pref[ALLOCNO_NUM
571 (ira_curr_regno_allocno_map
574 if (pref_class == NO_REGS)
576 += ((recog_data.operand_type[i] != OP_IN
577 ? ira_memory_move_cost[mode][classes[i]][0]
579 + (recog_data.operand_type[i] != OP_OUT
580 ? ira_memory_move_cost[mode][classes[i]][1]
582 else if (ira_reg_class_intersect[pref_class][classes[i]]
584 alt_cost += ira_get_register_move_cost (mode,
591 /* Otherwise, if this alternative wins, either because we
592 have already determined that or if we have a hard
593 register of the proper class, there is no cost for this
595 else if (win || (REG_P (op)
596 && reg_fits_class_p (op, classes[i],
600 /* If registers are valid, the cost of this alternative
601 includes copying the object to and/or from a
603 else if (classes[i] != NO_REGS)
605 if (recog_data.operand_type[i] != OP_OUT)
606 alt_cost += copy_cost (op, mode, classes[i], 1, NULL);
608 if (recog_data.operand_type[i] != OP_IN)
609 alt_cost += copy_cost (op, mode, classes[i], 0, NULL);
611 /* The only other way this alternative can be used is if
612 this is a constant that could be placed into memory. */
613 else if (CONSTANT_P (op) && (allows_addr || allows_mem[i]))
614 alt_cost += ira_memory_move_cost[mode][classes[i]][1];
622 op_cost_add = alt_cost * frequency;
623 /* Finally, update the costs with the information we've
624 calculated about this alternative. */
625 for (i = 0; i < n_ops; i++)
626 if (REG_P (ops[i]) && REGNO (ops[i]) >= FIRST_PSEUDO_REGISTER)
628 struct costs *pp = op_costs[i], *qq = this_op_costs[i];
629 int scale = 1 + (recog_data.operand_type[i] == OP_INOUT);
631 pp->mem_cost = MIN (pp->mem_cost,
632 (qq->mem_cost + op_cost_add) * scale);
634 for (k = 0; k < cost_classes_num; k++)
636 = MIN (pp->cost[k], (qq->cost[k] + op_cost_add) * scale);
640 for (i = 0; i < n_ops; i++)
645 if (! REG_P (op) || REGNO (op) < FIRST_PSEUDO_REGISTER)
647 a = ira_curr_regno_allocno_map [REGNO (op)];
648 if (! ALLOCNO_BAD_SPILL_P (a) && insn_allows_mem[i] == 0)
649 ALLOCNO_BAD_SPILL_P (a) = true;
652 /* If this insn is a single set copying operand 1 to operand 0 and
653 one operand is an allocno with the other a hard reg or an allocno
654 that prefers a hard register that is in its own register class
655 then we may want to adjust the cost of that register class to -1.
657 Avoid the adjustment if the source does not die to avoid
658 stressing of register allocator by preferrencing two colliding
659 registers into single class.
661 Also avoid the adjustment if a copy between hard registers of the
662 class is expensive (ten times the cost of a default copy is
663 considered arbitrarily expensive). This avoids losing when the
664 preferred class is very expensive as the source of a copy
666 if ((set = single_set (insn)) != 0
667 && ops[0] == SET_DEST (set) && ops[1] == SET_SRC (set)
668 && REG_P (ops[0]) && REG_P (ops[1])
669 && find_regno_note (insn, REG_DEAD, REGNO (ops[1])))
670 for (i = 0; i <= 1; i++)
671 if (REGNO (ops[i]) >= FIRST_PSEUDO_REGISTER)
673 unsigned int regno = REGNO (ops[!i]);
674 enum machine_mode mode = GET_MODE (ops[!i]);
678 if (regno < FIRST_PSEUDO_REGISTER)
679 for (k = 0; k < cost_classes_num; k++)
681 rclass = cost_classes[k];
682 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
683 && (reg_class_size[rclass]
684 == (unsigned) CLASS_MAX_NREGS (rclass, mode)))
686 if (reg_class_size[rclass] == 1)
687 op_costs[i]->cost[k] = -frequency;
691 nr < (unsigned) hard_regno_nregs[regno][mode];
693 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass],
697 if (nr == (unsigned) hard_regno_nregs[regno][mode])
698 op_costs[i]->cost[k] = -frequency;
707 /* Wrapper around REGNO_OK_FOR_INDEX_P, to allow pseudo registers. */
709 ok_for_index_p_nonstrict (rtx reg)
711 unsigned regno = REGNO (reg);
713 return regno >= FIRST_PSEUDO_REGISTER || REGNO_OK_FOR_INDEX_P (regno);
716 /* A version of regno_ok_for_base_p for use here, when all
717 pseudo-registers should count as OK. Arguments as for
718 regno_ok_for_base_p. */
720 ok_for_base_p_nonstrict (rtx reg, enum machine_mode mode,
721 enum rtx_code outer_code, enum rtx_code index_code)
723 unsigned regno = REGNO (reg);
725 if (regno >= FIRST_PSEUDO_REGISTER)
727 return ok_for_base_p_1 (regno, mode, outer_code, index_code);
730 /* Record the pseudo registers we must reload into hard registers in a
731 subexpression of a memory address, X.
733 If CONTEXT is 0, we are looking at the base part of an address,
734 otherwise we are looking at the index part.
736 MODE is the mode of the memory reference; OUTER_CODE and INDEX_CODE
737 give the context that the rtx appears in. These three arguments
738 are passed down to base_reg_class.
740 SCALE is twice the amount to multiply the cost by (it is twice so
741 we can represent half-cost adjustments). */
743 record_address_regs (enum machine_mode mode, rtx x, int context,
744 enum rtx_code outer_code, enum rtx_code index_code,
747 enum rtx_code code = GET_CODE (x);
748 enum reg_class rclass;
751 rclass = INDEX_REG_CLASS;
753 rclass = base_reg_class (mode, outer_code, index_code);
766 /* When we have an address that is a sum, we must determine
767 whether registers are "base" or "index" regs. If there is a
768 sum of two registers, we must choose one to be the "base".
769 Luckily, we can use the REG_POINTER to make a good choice
770 most of the time. We only need to do this on machines that
771 can have two registers in an address and where the base and
772 index register classes are different.
774 ??? This code used to set REGNO_POINTER_FLAG in some cases,
775 but that seems bogus since it should only be set when we are
776 sure the register is being used as a pointer. */
778 rtx arg0 = XEXP (x, 0);
779 rtx arg1 = XEXP (x, 1);
780 enum rtx_code code0 = GET_CODE (arg0);
781 enum rtx_code code1 = GET_CODE (arg1);
783 /* Look inside subregs. */
785 arg0 = SUBREG_REG (arg0), code0 = GET_CODE (arg0);
787 arg1 = SUBREG_REG (arg1), code1 = GET_CODE (arg1);
789 /* If this machine only allows one register per address, it
790 must be in the first operand. */
791 if (MAX_REGS_PER_ADDRESS == 1)
792 record_address_regs (mode, arg0, 0, PLUS, code1, scale);
794 /* If index and base registers are the same on this machine,
795 just record registers in any non-constant operands. We
796 assume here, as well as in the tests below, that all
797 addresses are in canonical form. */
798 else if (INDEX_REG_CLASS == base_reg_class (VOIDmode, PLUS, SCRATCH))
800 record_address_regs (mode, arg0, context, PLUS, code1, scale);
801 if (! CONSTANT_P (arg1))
802 record_address_regs (mode, arg1, context, PLUS, code0, scale);
805 /* If the second operand is a constant integer, it doesn't
806 change what class the first operand must be. */
807 else if (code1 == CONST_INT || code1 == CONST_DOUBLE)
808 record_address_regs (mode, arg0, context, PLUS, code1, scale);
809 /* If the second operand is a symbolic constant, the first
810 operand must be an index register. */
811 else if (code1 == SYMBOL_REF || code1 == CONST || code1 == LABEL_REF)
812 record_address_regs (mode, arg0, 1, PLUS, code1, scale);
813 /* If both operands are registers but one is already a hard
814 register of index or reg-base class, give the other the
815 class that the hard register is not. */
816 else if (code0 == REG && code1 == REG
817 && REGNO (arg0) < FIRST_PSEUDO_REGISTER
818 && (ok_for_base_p_nonstrict (arg0, mode, PLUS, REG)
819 || ok_for_index_p_nonstrict (arg0)))
820 record_address_regs (mode, arg1,
821 ok_for_base_p_nonstrict (arg0, mode, PLUS, REG)
824 else if (code0 == REG && code1 == REG
825 && REGNO (arg1) < FIRST_PSEUDO_REGISTER
826 && (ok_for_base_p_nonstrict (arg1, mode, PLUS, REG)
827 || ok_for_index_p_nonstrict (arg1)))
828 record_address_regs (mode, arg0,
829 ok_for_base_p_nonstrict (arg1, mode, PLUS, REG)
832 /* If one operand is known to be a pointer, it must be the
833 base with the other operand the index. Likewise if the
834 other operand is a MULT. */
835 else if ((code0 == REG && REG_POINTER (arg0)) || code1 == MULT)
837 record_address_regs (mode, arg0, 0, PLUS, code1, scale);
838 record_address_regs (mode, arg1, 1, PLUS, code0, scale);
840 else if ((code1 == REG && REG_POINTER (arg1)) || code0 == MULT)
842 record_address_regs (mode, arg0, 1, PLUS, code1, scale);
843 record_address_regs (mode, arg1, 0, PLUS, code0, scale);
845 /* Otherwise, count equal chances that each might be a base or
846 index register. This case should be rare. */
849 record_address_regs (mode, arg0, 0, PLUS, code1, scale / 2);
850 record_address_regs (mode, arg0, 1, PLUS, code1, scale / 2);
851 record_address_regs (mode, arg1, 0, PLUS, code0, scale / 2);
852 record_address_regs (mode, arg1, 1, PLUS, code0, scale / 2);
857 /* Double the importance of an allocno that is incremented or
858 decremented, since it would take two extra insns if it ends
859 up in the wrong place. */
862 record_address_regs (mode, XEXP (x, 0), 0, code,
863 GET_CODE (XEXP (XEXP (x, 1), 1)), 2 * scale);
864 if (REG_P (XEXP (XEXP (x, 1), 1)))
865 record_address_regs (mode, XEXP (XEXP (x, 1), 1), 1, code, REG,
873 /* Double the importance of an allocno that is incremented or
874 decremented, since it would take two extra insns if it ends
875 up in the wrong place. If the operand is a pseudo-register,
876 show it is being used in an INC_DEC context. */
877 #ifdef FORBIDDEN_INC_DEC_CLASSES
878 if (REG_P (XEXP (x, 0))
879 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER)
880 in_inc_dec[ALLOCNO_NUM (ira_curr_regno_allocno_map
881 [REGNO (XEXP (x, 0))])] = true;
883 record_address_regs (mode, XEXP (x, 0), 0, code, SCRATCH, 2 * scale);
891 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
894 ALLOCNO_BAD_SPILL_P (ira_curr_regno_allocno_map[REGNO (x)]) = true;
895 pp = COSTS_OF_ALLOCNO (allocno_costs,
896 ALLOCNO_NUM (ira_curr_regno_allocno_map
898 pp->mem_cost += (ira_memory_move_cost[Pmode][rclass][1] * scale) / 2;
899 for (k = 0; k < cost_classes_num; k++)
903 += (ira_get_may_move_cost (Pmode, i, rclass, true) * scale) / 2;
910 const char *fmt = GET_RTX_FORMAT (code);
912 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
914 record_address_regs (mode, XEXP (x, i), context, code, SCRATCH,
922 /* Calculate the costs of insn operands. */
924 record_operand_costs (rtx insn, struct costs **op_costs,
925 enum reg_class *allocno_pref)
927 const char *constraints[MAX_RECOG_OPERANDS];
928 enum machine_mode modes[MAX_RECOG_OPERANDS];
931 for (i = 0; i < recog_data.n_operands; i++)
933 constraints[i] = recog_data.constraints[i];
934 modes[i] = recog_data.operand_mode[i];
937 /* If we get here, we are set up to record the costs of all the
938 operands for this insn. Start by initializing the costs. Then
939 handle any address registers. Finally record the desired classes
940 for any allocnos, doing it twice if some pair of operands are
942 for (i = 0; i < recog_data.n_operands; i++)
944 memcpy (op_costs[i], init_cost, struct_costs_size);
946 if (GET_CODE (recog_data.operand[i]) == SUBREG)
947 recog_data.operand[i] = SUBREG_REG (recog_data.operand[i]);
949 if (MEM_P (recog_data.operand[i]))
950 record_address_regs (GET_MODE (recog_data.operand[i]),
951 XEXP (recog_data.operand[i], 0),
952 0, MEM, SCRATCH, frequency * 2);
953 else if (constraints[i][0] == 'p'
954 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0],
956 record_address_regs (VOIDmode, recog_data.operand[i], 0, ADDRESS,
957 SCRATCH, frequency * 2);
960 /* Check for commutative in a separate loop so everything will have
961 been initialized. We must do this even if one operand is a
962 constant--see addsi3 in m68k.md. */
963 for (i = 0; i < (int) recog_data.n_operands - 1; i++)
964 if (constraints[i][0] == '%')
966 const char *xconstraints[MAX_RECOG_OPERANDS];
969 /* Handle commutative operands by swapping the constraints.
970 We assume the modes are the same. */
971 for (j = 0; j < recog_data.n_operands; j++)
972 xconstraints[j] = constraints[j];
974 xconstraints[i] = constraints[i+1];
975 xconstraints[i+1] = constraints[i];
976 record_reg_classes (recog_data.n_alternatives, recog_data.n_operands,
977 recog_data.operand, modes,
978 xconstraints, insn, op_costs, allocno_pref);
980 record_reg_classes (recog_data.n_alternatives, recog_data.n_operands,
981 recog_data.operand, modes,
982 constraints, insn, op_costs, allocno_pref);
987 /* Process one insn INSN. Scan it and record each time it would save
988 code to put a certain allocnos in a certain class. Return the last
989 insn processed, so that the scan can be continued from there. */
991 scan_one_insn (rtx insn)
993 enum rtx_code pat_code;
1000 pat_code = GET_CODE (PATTERN (insn));
1001 if (pat_code == USE || pat_code == CLOBBER || pat_code == ASM_INPUT
1002 || pat_code == ADDR_VEC || pat_code == ADDR_DIFF_VEC)
1005 set = single_set (insn);
1006 extract_insn (insn);
1008 /* If this insn loads a parameter from its stack slot, then it
1009 represents a savings, rather than a cost, if the parameter is
1010 stored in memory. Record this fact. */
1011 if (set != 0 && REG_P (SET_DEST (set)) && MEM_P (SET_SRC (set))
1012 && (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != NULL_RTX
1013 && MEM_P (XEXP (note, 0)))
1015 enum reg_class cl = GENERAL_REGS;
1016 rtx reg = SET_DEST (set);
1017 int num = ALLOCNO_NUM (ira_curr_regno_allocno_map[REGNO (reg)]);
1020 cl = allocno_pref[num];
1021 COSTS_OF_ALLOCNO (allocno_costs, num)->mem_cost
1022 -= ira_memory_move_cost[GET_MODE (reg)][cl][1] * frequency;
1023 record_address_regs (GET_MODE (SET_SRC (set)), XEXP (SET_SRC (set), 0),
1024 0, MEM, SCRATCH, frequency * 2);
1027 record_operand_costs (insn, op_costs, allocno_pref);
1029 /* Now add the cost for each operand to the total costs for its
1031 for (i = 0; i < recog_data.n_operands; i++)
1032 if (REG_P (recog_data.operand[i])
1033 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER)
1035 int regno = REGNO (recog_data.operand[i]);
1037 = COSTS_OF_ALLOCNO (allocno_costs,
1038 ALLOCNO_NUM (ira_curr_regno_allocno_map[regno]));
1039 struct costs *q = op_costs[i];
1041 p->mem_cost += q->mem_cost;
1042 for (k = 0; k < cost_classes_num; k++)
1043 p->cost[k] += q->cost[k];
1051 /* Print allocnos costs to file F. */
1053 print_costs (FILE *f)
1057 ira_allocno_iterator ai;
1060 FOR_EACH_ALLOCNO (a, ai)
1064 int regno = ALLOCNO_REGNO (a);
1066 i = ALLOCNO_NUM (a);
1067 fprintf (f, " a%d(r%d,", i, regno);
1068 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
1069 fprintf (f, "b%d", bb->index);
1071 fprintf (f, "l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
1072 fprintf (f, ") costs:");
1073 for (k = 0; k < cost_classes_num; k++)
1075 rclass = cost_classes[k];
1076 if (contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (regno)]
1077 #ifdef FORBIDDEN_INC_DEC_CLASSES
1078 && (! in_inc_dec[i] || ! forbidden_inc_dec_class[rclass])
1080 #ifdef CANNOT_CHANGE_MODE_CLASS
1081 && ! invalid_mode_change_p (regno, (enum reg_class) rclass,
1082 PSEUDO_REGNO_MODE (regno))
1086 fprintf (f, " %s:%d", reg_class_names[rclass],
1087 COSTS_OF_ALLOCNO (allocno_costs, i)->cost[k]);
1088 if (flag_ira_region == IRA_REGION_ALL
1089 || flag_ira_region == IRA_REGION_MIXED)
1090 fprintf (f, ",%d", COSTS_OF_ALLOCNO (total_costs, i)->cost[k]);
1093 fprintf (f, " MEM:%i\n", COSTS_OF_ALLOCNO (allocno_costs, i)->mem_cost);
1097 /* Traverse the BB represented by LOOP_TREE_NODE to update the allocno
1100 process_bb_node_for_costs (ira_loop_tree_node_t loop_tree_node)
1105 bb = loop_tree_node->bb;
1108 frequency = REG_FREQ_FROM_BB (bb);
1111 FOR_BB_INSNS (bb, insn)
1112 insn = scan_one_insn (insn);
1115 /* Find costs of register classes and memory for allocnos and their
1118 find_allocno_class_costs (void)
1125 #ifdef FORBIDDEN_INC_DEC_CLASSES
1126 in_inc_dec = ira_allocate (sizeof (bool) * ira_allocnos_num);
1127 #endif /* FORBIDDEN_INC_DEC_CLASSES */
1128 allocno_pref = NULL;
1129 /* Normally we scan the insns once and determine the best class to
1130 use for each allocno. However, if -fexpensive-optimizations are
1131 on, we do so twice, the second time using the tentative best
1132 classes to guide the selection. */
1133 for (pass = 0; pass <= flag_expensive_optimizations; pass++)
1135 if (internal_flag_ira_verbose > 0 && ira_dump_file)
1136 fprintf (ira_dump_file, "\nPass %i for finding allocno costs\n\n",
1138 /* We could use only cover classes. Unfortunately it does not
1139 work well for some targets where some subclass of cover class
1140 is costly and wrong cover class is chosen. */
1141 for (i = 0; i < N_REG_CLASSES; i++)
1142 cost_class_nums[i] = -1;
1143 for (cost_classes_num = 0;
1144 cost_classes_num < ira_important_classes_num;
1147 cost_classes[cost_classes_num]
1148 = ira_important_classes[cost_classes_num];
1149 cost_class_nums[cost_classes[cost_classes_num]]
1153 = sizeof (struct costs) + sizeof (int) * (cost_classes_num - 1);
1154 /* Zero out our accumulation of the cost of each class for each
1156 memset (allocno_costs, 0, ira_allocnos_num * struct_costs_size);
1157 #ifdef FORBIDDEN_INC_DEC_CLASSES
1158 memset (in_inc_dec, 0, ira_allocnos_num * sizeof (bool));
1161 /* Scan the instructions and record each time it would save code
1162 to put a certain allocno in a certain class. */
1163 ira_traverse_loop_tree (true, ira_loop_tree_root,
1164 process_bb_node_for_costs, NULL);
1166 memcpy (total_costs, allocno_costs,
1167 max_struct_costs_size * ira_allocnos_num);
1169 allocno_pref = allocno_pref_buffer;
1171 /* Now for each allocno look at how desirable each class is and
1172 find which class is preferred. */
1173 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1175 ira_allocno_t a, parent_a;
1176 int rclass, a_num, parent_a_num;
1177 ira_loop_tree_node_t parent;
1178 int best_cost, allocno_cost;
1179 enum reg_class best, alt_class;
1180 #ifdef FORBIDDEN_INC_DEC_CLASSES
1181 int inc_dec_p = false;
1184 if (ira_regno_allocno_map[i] == NULL)
1186 memset (temp_costs, 0, struct_costs_size);
1187 /* Find cost of all allocnos with the same regno. */
1188 for (a = ira_regno_allocno_map[i];
1190 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1192 a_num = ALLOCNO_NUM (a);
1193 if ((flag_ira_region == IRA_REGION_ALL
1194 || flag_ira_region == IRA_REGION_MIXED)
1195 && (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1196 && (parent_a = parent->regno_allocno_map[i]) != NULL
1197 /* There are no caps yet. */
1198 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
1201 /* Propagate costs to upper levels in the region
1203 parent_a_num = ALLOCNO_NUM (parent_a);
1204 for (k = 0; k < cost_classes_num; k++)
1205 COSTS_OF_ALLOCNO (total_costs, parent_a_num)->cost[k]
1206 += COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k];
1207 COSTS_OF_ALLOCNO (total_costs, parent_a_num)->mem_cost
1208 += COSTS_OF_ALLOCNO (total_costs, a_num)->mem_cost;
1210 for (k = 0; k < cost_classes_num; k++)
1212 += COSTS_OF_ALLOCNO (allocno_costs, a_num)->cost[k];
1213 temp_costs->mem_cost
1214 += COSTS_OF_ALLOCNO (allocno_costs, a_num)->mem_cost;
1215 #ifdef FORBIDDEN_INC_DEC_CLASSES
1216 if (in_inc_dec[a_num])
1220 best_cost = (1 << (HOST_BITS_PER_INT - 2)) - 1;
1222 alt_class = NO_REGS;
1223 /* Find best common class for all allocnos with the same
1225 for (k = 0; k < cost_classes_num; k++)
1227 rclass = cost_classes[k];
1228 /* Ignore classes that are too small for this operand or
1229 invalid for an operand that was auto-incremented. */
1230 if (! contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (i)]
1231 #ifdef FORBIDDEN_INC_DEC_CLASSES
1232 || (inc_dec_p && forbidden_inc_dec_class[rclass])
1234 #ifdef CANNOT_CHANGE_MODE_CLASS
1235 || invalid_mode_change_p (i, (enum reg_class) rclass,
1236 PSEUDO_REGNO_MODE (i))
1240 if (temp_costs->cost[k] < best_cost)
1242 best_cost = temp_costs->cost[k];
1243 best = (enum reg_class) rclass;
1245 else if (temp_costs->cost[k] == best_cost)
1246 best = ira_reg_class_union[best][rclass];
1247 if (pass == flag_expensive_optimizations
1248 && temp_costs->cost[k] < temp_costs->mem_cost
1249 && (reg_class_size[reg_class_subunion[alt_class][rclass]]
1250 > reg_class_size[alt_class]))
1251 alt_class = reg_class_subunion[alt_class][rclass];
1253 alt_class = ira_class_translate[alt_class];
1254 if (pass == flag_expensive_optimizations)
1256 if (best_cost > temp_costs->mem_cost)
1257 best = alt_class = NO_REGS;
1258 else if (best == alt_class)
1259 alt_class = NO_REGS;
1260 setup_reg_classes (i, best, alt_class);
1261 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1262 fprintf (ira_dump_file,
1263 " r%d: preferred %s, alternative %s\n",
1264 i, reg_class_names[best], reg_class_names[alt_class]);
1266 if (best_cost > temp_costs->mem_cost)
1267 common_classes[i] = NO_REGS;
1268 else if (flag_ira_algorithm == IRA_ALGORITHM_PRIORITY)
1269 /* Make the common class the biggest class of best and
1271 common_classes[i] = alt_class == NO_REGS ? best : alt_class;
1273 /* Make the common class a cover class. Remember all
1274 allocnos with the same regno should have the same cover
1276 common_classes[i] = ira_class_translate[best];
1277 for (a = ira_regno_allocno_map[i];
1279 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1281 a_num = ALLOCNO_NUM (a);
1282 if (common_classes[i] == NO_REGS)
1286 /* Finding best class which is subset of the common
1288 best_cost = (1 << (HOST_BITS_PER_INT - 2)) - 1;
1289 allocno_cost = best_cost;
1291 for (k = 0; k < cost_classes_num; k++)
1293 rclass = cost_classes[k];
1294 if (! ira_class_subset_p[rclass][common_classes[i]])
1296 /* Ignore classes that are too small for this
1297 operand or invalid for an operand that was
1298 auto-incremented. */
1299 if (! contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (i)]
1300 #ifdef FORBIDDEN_INC_DEC_CLASSES
1301 || (inc_dec_p && forbidden_inc_dec_class[rclass])
1303 #ifdef CANNOT_CHANGE_MODE_CLASS
1304 || invalid_mode_change_p (i, (enum reg_class) rclass,
1305 PSEUDO_REGNO_MODE (i))
1309 else if (COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k]
1313 = COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k];
1315 = COSTS_OF_ALLOCNO (allocno_costs, a_num)->cost[k];
1316 best = (enum reg_class) rclass;
1318 else if (COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k]
1321 best = ira_reg_class_union[best][rclass];
1323 = MAX (allocno_cost,
1324 COSTS_OF_ALLOCNO (allocno_costs,
1328 ALLOCNO_COVER_CLASS_COST (a) = allocno_cost;
1330 ira_assert (flag_ira_algorithm == IRA_ALGORITHM_PRIORITY
1331 || ira_class_translate[best] == common_classes[i]);
1332 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL
1333 && (pass == 0 || allocno_pref[a_num] != best))
1335 fprintf (ira_dump_file, " a%d (r%d,", a_num, i);
1336 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
1337 fprintf (ira_dump_file, "b%d", bb->index);
1339 fprintf (ira_dump_file, "l%d",
1340 ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
1341 fprintf (ira_dump_file, ") best %s, cover %s\n",
1342 reg_class_names[best],
1343 reg_class_names[common_classes[i]]);
1345 allocno_pref[a_num] = best;
1349 if (internal_flag_ira_verbose > 4 && ira_dump_file)
1351 print_costs (ira_dump_file);
1352 fprintf (ira_dump_file,"\n");
1355 #ifdef FORBIDDEN_INC_DEC_CLASSES
1356 ira_free (in_inc_dec);
1362 /* Process moves involving hard regs to modify allocno hard register
1363 costs. We can do this only after determining allocno cover class.
1364 If a hard register forms a register class, than moves with the hard
1365 register are already taken into account in class costs for the
1368 process_bb_node_for_hard_reg_moves (ira_loop_tree_node_t loop_tree_node)
1370 int i, freq, cost, src_regno, dst_regno, hard_regno;
1373 enum reg_class rclass, hard_reg_class;
1374 enum machine_mode mode;
1376 rtx insn, set, src, dst;
1378 bb = loop_tree_node->bb;
1381 freq = REG_FREQ_FROM_BB (bb);
1384 FOR_BB_INSNS (bb, insn)
1386 if (! INSN_P (insn))
1388 set = single_set (insn);
1389 if (set == NULL_RTX)
1391 dst = SET_DEST (set);
1392 src = SET_SRC (set);
1393 if (! REG_P (dst) || ! REG_P (src))
1395 dst_regno = REGNO (dst);
1396 src_regno = REGNO (src);
1397 if (dst_regno >= FIRST_PSEUDO_REGISTER
1398 && src_regno < FIRST_PSEUDO_REGISTER)
1400 hard_regno = src_regno;
1402 a = ira_curr_regno_allocno_map[dst_regno];
1404 else if (src_regno >= FIRST_PSEUDO_REGISTER
1405 && dst_regno < FIRST_PSEUDO_REGISTER)
1407 hard_regno = dst_regno;
1409 a = ira_curr_regno_allocno_map[src_regno];
1413 rclass = ALLOCNO_COVER_CLASS (a);
1414 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], hard_regno))
1416 i = ira_class_hard_reg_index[rclass][hard_regno];
1419 mode = ALLOCNO_MODE (a);
1420 hard_reg_class = REGNO_REG_CLASS (hard_regno);
1422 = (to_p ? ira_get_register_move_cost (mode, hard_reg_class, rclass)
1423 : ira_get_register_move_cost (mode, rclass, hard_reg_class)) * freq;
1424 ira_allocate_and_set_costs (&ALLOCNO_HARD_REG_COSTS (a), rclass,
1425 ALLOCNO_COVER_CLASS_COST (a));
1426 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1428 ALLOCNO_HARD_REG_COSTS (a)[i] -= cost;
1429 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[i] -= cost;
1430 ALLOCNO_COVER_CLASS_COST (a) = MIN (ALLOCNO_COVER_CLASS_COST (a),
1431 ALLOCNO_HARD_REG_COSTS (a)[i]);
1435 /* After we find hard register and memory costs for allocnos, define
1436 its cover class and modify hard register cost because insns moving
1437 allocno to/from hard registers. */
1439 setup_allocno_cover_class_and_costs (void)
1441 int i, j, n, regno, num;
1443 enum reg_class cover_class, rclass;
1444 enum machine_mode mode;
1447 ira_allocno_iterator ai;
1449 FOR_EACH_ALLOCNO (a, ai)
1451 i = ALLOCNO_NUM (a);
1452 mode = ALLOCNO_MODE (a);
1453 cover_class = common_classes[ALLOCNO_REGNO (a)];
1454 ira_assert (allocno_pref[i] == NO_REGS || cover_class != NO_REGS);
1455 ALLOCNO_MEMORY_COST (a) = COSTS_OF_ALLOCNO (allocno_costs, i)->mem_cost;
1456 ira_set_allocno_cover_class (a, cover_class);
1457 if (cover_class == NO_REGS)
1459 ALLOCNO_AVAILABLE_REGS_NUM (a) = ira_available_class_regs[cover_class];
1460 pref = ®_class_contents[allocno_pref[i]];
1461 if (optimize && ALLOCNO_COVER_CLASS (a) != allocno_pref[i])
1463 n = ira_class_hard_regs_num[cover_class];
1464 ALLOCNO_HARD_REG_COSTS (a)
1465 = reg_costs = ira_allocate_cost_vector (cover_class);
1466 for (j = n - 1; j >= 0; j--)
1468 regno = ira_class_hard_regs[cover_class][j];
1469 if (TEST_HARD_REG_BIT (*pref, regno))
1470 reg_costs[j] = ALLOCNO_COVER_CLASS_COST (a);
1473 rclass = REGNO_REG_CLASS (regno);
1474 num = cost_class_nums[rclass];
1477 /* The hard register class is not a cover class or a
1478 class not fully inside in a cover class -- use
1479 the allocno cover class. */
1480 ira_assert (ira_hard_regno_cover_class[regno]
1482 num = cost_class_nums[cover_class];
1484 reg_costs[j] = COSTS_OF_ALLOCNO (allocno_costs, i)->cost[num];
1490 ira_traverse_loop_tree (true, ira_loop_tree_root,
1491 process_bb_node_for_hard_reg_moves, NULL);
1496 /* Function called once during compiler work. */
1498 ira_init_costs_once (void)
1503 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1506 this_op_costs[i] = NULL;
1509 cost_classes = NULL;
1512 /* Free allocated temporary cost vectors. */
1514 free_ira_costs (void)
1518 if (init_cost != NULL)
1521 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1523 if (op_costs[i] != NULL)
1525 if (this_op_costs[i] != NULL)
1526 free (this_op_costs[i]);
1527 op_costs[i] = this_op_costs[i] = NULL;
1529 if (temp_costs != NULL)
1532 if (cost_classes != NULL)
1533 free (cost_classes);
1534 cost_classes = NULL;
1537 /* This is called each time register related information is
1540 ira_init_costs (void)
1545 max_struct_costs_size
1546 = sizeof (struct costs) + sizeof (int) * (ira_important_classes_num - 1);
1547 /* Don't use ira_allocate because vectors live through several IRA calls. */
1548 init_cost = (struct costs *) xmalloc (max_struct_costs_size);
1549 init_cost->mem_cost = 1000000;
1550 for (i = 0; i < ira_important_classes_num; i++)
1551 init_cost->cost[i] = 1000000;
1552 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1554 op_costs[i] = (struct costs *) xmalloc (max_struct_costs_size);
1555 this_op_costs[i] = (struct costs *) xmalloc (max_struct_costs_size);
1557 temp_costs = (struct costs *) xmalloc (max_struct_costs_size);
1558 cost_classes = (enum reg_class *) xmalloc (sizeof (enum reg_class)
1559 * ira_important_classes_num);
1562 /* Function called once at the end of compiler work. */
1564 ira_finish_costs_once (void)
1571 /* Entry function which defines cover class, memory and hard register
1572 costs for each allocno. */
1576 allocno_costs = (struct costs *) ira_allocate (max_struct_costs_size
1577 * ira_allocnos_num);
1578 total_costs = (struct costs *) ira_allocate (max_struct_costs_size
1579 * ira_allocnos_num);
1581 = (enum reg_class *) ira_allocate (sizeof (enum reg_class)
1582 * ira_allocnos_num);
1584 = (enum reg_class *) ira_allocate (sizeof (enum reg_class)
1586 find_allocno_class_costs ();
1587 setup_allocno_cover_class_and_costs ();
1588 ira_free (common_classes);
1589 ira_free (allocno_pref_buffer);
1590 ira_free (total_costs);
1591 ira_free (allocno_costs);
1596 /* Change hard register costs for allocnos which lives through
1597 function calls. This is called only when we found all intersected
1598 calls during building allocno live ranges. */
1600 ira_tune_allocno_costs_and_cover_classes (void)
1603 int cost, min_cost, *reg_costs;
1604 enum reg_class cover_class, rclass;
1605 enum machine_mode mode;
1607 ira_allocno_iterator ai;
1609 FOR_EACH_ALLOCNO (a, ai)
1611 cover_class = ALLOCNO_COVER_CLASS (a);
1612 if (cover_class == NO_REGS)
1614 mode = ALLOCNO_MODE (a);
1615 n = ira_class_hard_regs_num[cover_class];
1617 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
1619 ira_allocate_and_set_costs
1620 (&ALLOCNO_HARD_REG_COSTS (a), cover_class,
1621 ALLOCNO_COVER_CLASS_COST (a));
1622 reg_costs = ALLOCNO_HARD_REG_COSTS (a);
1623 for (j = n - 1; j >= 0; j--)
1625 regno = ira_class_hard_regs[cover_class][j];
1626 rclass = REGNO_REG_CLASS (regno);
1628 if (! ira_hard_reg_not_in_set_p (regno, mode, call_used_reg_set)
1629 || HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1630 cost += (ALLOCNO_CALL_FREQ (a)
1631 * (ira_memory_move_cost[mode][rclass][0]
1632 + ira_memory_move_cost[mode][rclass][1]));
1633 #ifdef IRA_HARD_REGNO_ADD_COST_MULTIPLIER
1634 cost += ((ira_memory_move_cost[mode][rclass][0]
1635 + ira_memory_move_cost[mode][rclass][1])
1637 * IRA_HARD_REGNO_ADD_COST_MULTIPLIER (regno) / 2);
1639 reg_costs[j] += cost;
1640 if (min_cost > reg_costs[j])
1641 min_cost = reg_costs[j];
1644 if (min_cost != INT_MAX)
1645 ALLOCNO_COVER_CLASS_COST (a) = min_cost;