Merge branch 'vendor/GCC47'
[dragonfly.git] / contrib / gcc-4.7 / gcc / tree-ssa-phiprop.c
CommitLineData
e4b17023
JM
1/* Backward propagation of indirect loads through PHIs.
2 Copyright (C) 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "tm_p.h"
27#include "basic-block.h"
28#include "timevar.h"
29#include "tree-pretty-print.h"
30#include "gimple-pretty-print.h"
31#include "tree-flow.h"
32#include "tree-pass.h"
33#include "tree-dump.h"
34#include "langhooks.h"
35#include "flags.h"
36
37/* This pass propagates indirect loads through the PHI node for its
38 address to make the load source possibly non-addressable and to
39 allow for PHI optimization to trigger.
40
41 For example the pass changes
42
43 # addr_1 = PHI <&a, &b>
44 tmp_1 = *addr_1;
45
46 to
47
48 # tmp_1 = PHI <a, b>
49
50 but also handles more complex scenarios like
51
52 D.2077_2 = &this_1(D)->a1;
53 ...
54
55 # b_12 = PHI <&c(2), D.2077_2(3)>
56 D.2114_13 = *b_12;
57 ...
58
59 # b_15 = PHI <b_12(4), &b(5)>
60 D.2080_5 = &this_1(D)->a0;
61 ...
62
63 # b_18 = PHI <D.2080_5(6), &c(7)>
64 ...
65
66 # b_21 = PHI <b_15(8), b_18(9)>
67 D.2076_8 = *b_21;
68
69 where the addresses loaded are defined by PHIs itself.
70 The above happens for
71
72 std::max(std::min(a0, c), std::min(std::max(a1, c), b))
73
74 where this pass transforms it to a form later PHI optimization
75 recognizes and transforms it to the simple
76
77 D.2109_10 = this_1(D)->a1;
78 D.2110_11 = c;
79 D.2114_31 = MAX_EXPR <D.2109_10, D.2110_11>;
80 D.2115_14 = b;
81 D.2125_17 = MIN_EXPR <D.2115_14, D.2114_31>;
82 D.2119_16 = this_1(D)->a0;
83 D.2124_32 = MIN_EXPR <D.2110_11, D.2119_16>;
84 D.2076_33 = MAX_EXPR <D.2125_17, D.2124_32>;
85
86 The pass does a dominator walk processing loads using a basic-block
87 local analysis and stores the result for use by transformations on
88 dominated basic-blocks. */
89
90
91/* Structure to keep track of the value of a dereferenced PHI result
92 and the virtual operand used for that dereference. */
93
94struct phiprop_d
95{
96 tree value;
97 tree vuse;
98};
99
100/* Verify if the value recorded for NAME in PHIVN is still valid at
101 the start of basic block BB. */
102
103static bool
104phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
105{
106 tree vuse = phivn[SSA_NAME_VERSION (name)].vuse;
107 gimple use_stmt;
108 imm_use_iterator ui2;
109 bool ok = true;
110
111 /* The def stmts of the virtual uses need to be dominated by bb. */
112 gcc_assert (vuse != NULL_TREE);
113
114 FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
115 {
116 /* If BB does not dominate a VDEF, the value is invalid. */
117 if ((gimple_vdef (use_stmt) != NULL_TREE
118 || gimple_code (use_stmt) == GIMPLE_PHI)
119 && !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), bb))
120 {
121 ok = false;
122 BREAK_FROM_IMM_USE_STMT (ui2);
123 }
124 }
125
126 return ok;
127}
128
129/* Insert a new phi node for the dereference of PHI at basic_block
130 BB with the virtual operands from USE_STMT. */
131
132static tree
133phiprop_insert_phi (basic_block bb, gimple phi, gimple use_stmt,
134 struct phiprop_d *phivn, size_t n)
135{
136 tree res;
137 gimple new_phi;
138 edge_iterator ei;
139 edge e;
140
141 gcc_assert (is_gimple_assign (use_stmt)
142 && gimple_assign_rhs_code (use_stmt) == MEM_REF);
143
144 /* Build a new PHI node to replace the definition of
145 the indirect reference lhs. */
146 res = gimple_assign_lhs (use_stmt);
147 SSA_NAME_DEF_STMT (res) = new_phi = create_phi_node (res, bb);
148
149 if (dump_file && (dump_flags & TDF_DETAILS))
150 {
151 fprintf (dump_file, "Inserting PHI for result of load ");
152 print_gimple_stmt (dump_file, use_stmt, 0, 0);
153 }
154
155 /* Add PHI arguments for each edge inserting loads of the
156 addressable operands. */
157 FOR_EACH_EDGE (e, ei, bb->preds)
158 {
159 tree old_arg, new_var;
160 gimple tmp;
161 source_location locus;
162
163 old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
164 locus = gimple_phi_arg_location_from_edge (phi, e);
165 while (TREE_CODE (old_arg) == SSA_NAME
166 && (SSA_NAME_VERSION (old_arg) >= n
167 || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
168 {
169 gimple def_stmt = SSA_NAME_DEF_STMT (old_arg);
170 old_arg = gimple_assign_rhs1 (def_stmt);
171 locus = gimple_location (def_stmt);
172 }
173
174 if (TREE_CODE (old_arg) == SSA_NAME)
175 {
176 if (dump_file && (dump_flags & TDF_DETAILS))
177 {
178 fprintf (dump_file, " for edge defining ");
179 print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e), 0);
180 fprintf (dump_file, " reusing PHI result ");
181 print_generic_expr (dump_file,
182 phivn[SSA_NAME_VERSION (old_arg)].value, 0);
183 fprintf (dump_file, "\n");
184 }
185 /* Reuse a formerly created dereference. */
186 new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
187 }
188 else
189 {
190 tree rhs = gimple_assign_rhs1 (use_stmt);
191 gcc_assert (TREE_CODE (old_arg) == ADDR_EXPR);
192 new_var = create_tmp_reg (TREE_TYPE (rhs), NULL);
193 if (!is_gimple_min_invariant (old_arg))
194 old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
195 else
196 old_arg = unshare_expr (old_arg);
197 tmp = gimple_build_assign (new_var,
198 fold_build2 (MEM_REF, TREE_TYPE (rhs),
199 old_arg,
200 TREE_OPERAND (rhs, 1)));
201 gcc_assert (is_gimple_reg (new_var));
202 add_referenced_var (new_var);
203 new_var = make_ssa_name (new_var, tmp);
204 gimple_assign_set_lhs (tmp, new_var);
205 gimple_set_location (tmp, locus);
206
207 gsi_insert_on_edge (e, tmp);
208 update_stmt (tmp);
209
210 if (dump_file && (dump_flags & TDF_DETAILS))
211 {
212 fprintf (dump_file, " for edge defining ");
213 print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e), 0);
214 fprintf (dump_file, " inserting load ");
215 print_gimple_stmt (dump_file, tmp, 0, 0);
216 }
217 }
218
219 add_phi_arg (new_phi, new_var, e, locus);
220 }
221
222 update_stmt (new_phi);
223
224 if (dump_file && (dump_flags & TDF_DETAILS))
225 print_gimple_stmt (dump_file, new_phi, 0, 0);
226
227 return res;
228}
229
230/* Propagate between the phi node arguments of PHI in BB and phi result
231 users. For now this matches
232 # p_2 = PHI <&x, &y>
233 <Lx>:;
234 p_3 = p_2;
235 z_2 = *p_3;
236 and converts it to
237 # z_2 = PHI <x, y>
238 <Lx>:;
239 Returns true if a transformation was done and edge insertions
240 need to be committed. Global data PHIVN and N is used to track
241 past transformation results. We need to be especially careful here
242 with aliasing issues as we are moving memory reads. */
243
244static bool
245propagate_with_phi (basic_block bb, gimple phi, struct phiprop_d *phivn,
246 size_t n)
247{
248 tree ptr = PHI_RESULT (phi);
249 gimple use_stmt;
250 tree res = NULL_TREE;
251 gimple_stmt_iterator gsi;
252 imm_use_iterator ui;
253 use_operand_p arg_p, use;
254 ssa_op_iter i;
255 bool phi_inserted;
256 tree type = NULL_TREE;
257 bool one_invariant = false;
258
259 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
260 || !is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr))))
261 return false;
262
263 /* Check if we can "cheaply" dereference all phi arguments. */
264 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
265 {
266 tree arg = USE_FROM_PTR (arg_p);
267 /* Walk the ssa chain until we reach a ssa name we already
268 created a value for or we reach a definition of the form
269 ssa_name_n = &var; */
270 while (TREE_CODE (arg) == SSA_NAME
271 && !SSA_NAME_IS_DEFAULT_DEF (arg)
272 && (SSA_NAME_VERSION (arg) >= n
273 || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
274 {
275 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
276 if (!gimple_assign_single_p (def_stmt))
277 return false;
278 arg = gimple_assign_rhs1 (def_stmt);
279 }
280 if (TREE_CODE (arg) != ADDR_EXPR
281 && !(TREE_CODE (arg) == SSA_NAME
282 && SSA_NAME_VERSION (arg) < n
283 && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
284 && (!type
285 || types_compatible_p
286 (type, TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value)))
287 && phivn_valid_p (phivn, arg, bb)))
288 return false;
289 if (!type
290 && TREE_CODE (arg) == SSA_NAME)
291 type = TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value);
292 if (TREE_CODE (arg) == ADDR_EXPR
293 && is_gimple_min_invariant (arg))
294 one_invariant = true;
295 }
296
297 /* If we neither have an address of a decl nor can reuse a previously
298 inserted load, do not hoist anything. */
299 if (!one_invariant
300 && !type)
301 return false;
302
303 /* Find a dereferencing use. First follow (single use) ssa
304 copy chains for ptr. */
305 while (single_imm_use (ptr, &use, &use_stmt)
306 && gimple_assign_ssa_name_copy_p (use_stmt))
307 ptr = gimple_assign_lhs (use_stmt);
308
309 /* Replace the first dereference of *ptr if there is one and if we
310 can move the loads to the place of the ptr phi node. */
311 phi_inserted = false;
312 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
313 {
314 gimple def_stmt;
315 tree vuse;
316
95d28233
JM
317 /* Only replace loads in blocks that post-dominate the PHI node. That
318 makes sure we don't end up speculating loads. */
319 if (!dominated_by_p (CDI_POST_DOMINATORS,
320 bb, gimple_bb (use_stmt)))
321 continue;
322
e4b17023
JM
323 /* Check whether this is a load of *ptr. */
324 if (!(is_gimple_assign (use_stmt)
325 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
326 && gimple_assign_rhs_code (use_stmt) == MEM_REF
327 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
328 && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 1))
329 && (!type
330 || types_compatible_p
331 (TREE_TYPE (gimple_assign_lhs (use_stmt)), type))
332 /* We cannot replace a load that may throw or is volatile. */
333 && !stmt_can_throw_internal (use_stmt)))
334 continue;
335
336 /* Check if we can move the loads. The def stmt of the virtual use
337 needs to be in a different basic block dominating bb. */
338 vuse = gimple_vuse (use_stmt);
339 def_stmt = SSA_NAME_DEF_STMT (vuse);
340 if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
341 && (gimple_bb (def_stmt) == bb
342 || !dominated_by_p (CDI_DOMINATORS,
343 bb, gimple_bb (def_stmt))))
344 goto next;
345
346 /* Found a proper dereference. Insert a phi node if this
347 is the first load transformation. */
348 if (!phi_inserted)
349 {
350 res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
351 type = TREE_TYPE (res);
352
353 /* Remember the value we created for *ptr. */
354 phivn[SSA_NAME_VERSION (ptr)].value = res;
355 phivn[SSA_NAME_VERSION (ptr)].vuse = vuse;
356
357 /* Remove old stmt. The phi is taken care of by DCE, if we
358 want to delete it here we also have to delete all intermediate
359 copies. */
360 gsi = gsi_for_stmt (use_stmt);
361 gsi_remove (&gsi, true);
362
363 phi_inserted = true;
364 }
365 else
366 {
367 /* Further replacements are easy, just make a copy out of the
368 load. */
369 gimple_assign_set_rhs1 (use_stmt, res);
370 update_stmt (use_stmt);
371 }
372
373next:;
374 /* Continue searching for a proper dereference. */
375 }
376
377 return phi_inserted;
378}
379
380/* Main entry for phiprop pass. */
381
382static unsigned int
383tree_ssa_phiprop (void)
384{
385 VEC(basic_block, heap) *bbs;
386 struct phiprop_d *phivn;
387 bool did_something = false;
388 basic_block bb;
389 gimple_stmt_iterator gsi;
390 unsigned i;
391 size_t n;
392
393 calculate_dominance_info (CDI_DOMINATORS);
95d28233 394 calculate_dominance_info (CDI_POST_DOMINATORS);
e4b17023
JM
395
396 n = num_ssa_names;
397 phivn = XCNEWVEC (struct phiprop_d, n);
398
399 /* Walk the dominator tree in preorder. */
400 bbs = get_all_dominated_blocks (CDI_DOMINATORS,
401 single_succ (ENTRY_BLOCK_PTR));
402 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
403 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
404 did_something |= propagate_with_phi (bb, gsi_stmt (gsi), phivn, n);
405
406 if (did_something)
407 gsi_commit_edge_inserts ();
408
409 VEC_free (basic_block, heap, bbs);
410 free (phivn);
411
95d28233
JM
412 free_dominance_info (CDI_POST_DOMINATORS);
413
e4b17023
JM
414 return 0;
415}
416
417static bool
418gate_phiprop (void)
419{
420 return flag_tree_phiprop;
421}
422
423struct gimple_opt_pass pass_phiprop =
424{
425 {
426 GIMPLE_PASS,
427 "phiprop", /* name */
428 gate_phiprop, /* gate */
429 tree_ssa_phiprop, /* execute */
430 NULL, /* sub */
431 NULL, /* next */
432 0, /* static_pass_number */
433 TV_TREE_PHIPROP, /* tv_id */
434 PROP_cfg | PROP_ssa, /* properties_required */
435 0, /* properties_provided */
436 0, /* properties_destroyed */
437 0, /* todo_flags_start */
438 TODO_ggc_collect
439 | TODO_update_ssa
440 | TODO_verify_ssa /* todo_flags_finish */
441 }
442};