Commit | Line | Data |
---|---|---|
e4b17023 JM |
1 | /* Dead code elimination pass for the GNU compiler. |
2 | Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 | |
3 | Free Software Foundation, Inc. | |
4 | Contributed by Ben Elliston <bje@redhat.com> | |
5 | and Andrew MacLeod <amacleod@redhat.com> | |
6 | Adapted to use control dependence by Steven Bosscher, SUSE Labs. | |
7 | ||
8 | This file is part of GCC. | |
9 | ||
10 | GCC is free software; you can redistribute it and/or modify it | |
11 | under the terms of the GNU General Public License as published by the | |
12 | Free Software Foundation; either version 3, or (at your option) any | |
13 | later version. | |
14 | ||
15 | GCC is distributed in the hope that it will be useful, but WITHOUT | |
16 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
17 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
18 | for more details. | |
19 | ||
20 | You should have received a copy of the GNU General Public License | |
21 | along with GCC; see the file COPYING3. If not see | |
22 | <http://www.gnu.org/licenses/>. */ | |
23 | ||
24 | /* Dead code elimination. | |
25 | ||
26 | References: | |
27 | ||
28 | Building an Optimizing Compiler, | |
29 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
30 | ||
31 | Advanced Compiler Design and Implementation, | |
32 | Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10. | |
33 | ||
34 | Dead-code elimination is the removal of statements which have no | |
35 | impact on the program's output. "Dead statements" have no impact | |
36 | on the program's output, while "necessary statements" may have | |
37 | impact on the output. | |
38 | ||
39 | The algorithm consists of three phases: | |
40 | 1. Marking as necessary all statements known to be necessary, | |
41 | e.g. most function calls, writing a value to memory, etc; | |
42 | 2. Propagating necessary statements, e.g., the statements | |
43 | giving values to operands in necessary statements; and | |
44 | 3. Removing dead statements. */ | |
45 | ||
46 | #include "config.h" | |
47 | #include "system.h" | |
48 | #include "coretypes.h" | |
49 | #include "tm.h" | |
50 | ||
51 | #include "tree.h" | |
52 | #include "tree-pretty-print.h" | |
53 | #include "gimple-pretty-print.h" | |
54 | #include "basic-block.h" | |
55 | #include "tree-flow.h" | |
56 | #include "gimple.h" | |
57 | #include "tree-dump.h" | |
58 | #include "tree-pass.h" | |
59 | #include "timevar.h" | |
60 | #include "flags.h" | |
61 | #include "cfgloop.h" | |
62 | #include "tree-scalar-evolution.h" | |
63 | ||
64 | static struct stmt_stats | |
65 | { | |
66 | int total; | |
67 | int total_phis; | |
68 | int removed; | |
69 | int removed_phis; | |
70 | } stats; | |
71 | ||
72 | #define STMT_NECESSARY GF_PLF_1 | |
73 | ||
74 | static VEC(gimple,heap) *worklist; | |
75 | ||
76 | /* Vector indicating an SSA name has already been processed and marked | |
77 | as necessary. */ | |
78 | static sbitmap processed; | |
79 | ||
80 | /* Vector indicating that the last statement of a basic block has already | |
81 | been marked as necessary. */ | |
82 | static sbitmap last_stmt_necessary; | |
83 | ||
84 | /* Vector indicating that BB contains statements that are live. */ | |
85 | static sbitmap bb_contains_live_stmts; | |
86 | ||
87 | /* Before we can determine whether a control branch is dead, we need to | |
88 | compute which blocks are control dependent on which edges. | |
89 | ||
90 | We expect each block to be control dependent on very few edges so we | |
91 | use a bitmap for each block recording its edges. An array holds the | |
92 | bitmap. The Ith bit in the bitmap is set if that block is dependent | |
93 | on the Ith edge. */ | |
94 | static bitmap *control_dependence_map; | |
95 | ||
96 | /* Vector indicating that a basic block has already had all the edges | |
97 | processed that it is control dependent on. */ | |
98 | static sbitmap visited_control_parents; | |
99 | ||
100 | /* TRUE if this pass alters the CFG (by removing control statements). | |
101 | FALSE otherwise. | |
102 | ||
103 | If this pass alters the CFG, then it will arrange for the dominators | |
104 | to be recomputed. */ | |
105 | static bool cfg_altered; | |
106 | ||
107 | /* Execute code that follows the macro for each edge (given number | |
108 | EDGE_NUMBER within the CODE) for which the block with index N is | |
109 | control dependent. */ | |
110 | #define EXECUTE_IF_CONTROL_DEPENDENT(BI, N, EDGE_NUMBER) \ | |
111 | EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[(N)], 0, \ | |
112 | (EDGE_NUMBER), (BI)) | |
113 | ||
114 | ||
115 | /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */ | |
116 | static inline void | |
117 | set_control_dependence_map_bit (basic_block bb, int edge_index) | |
118 | { | |
119 | if (bb == ENTRY_BLOCK_PTR) | |
120 | return; | |
121 | gcc_assert (bb != EXIT_BLOCK_PTR); | |
122 | bitmap_set_bit (control_dependence_map[bb->index], edge_index); | |
123 | } | |
124 | ||
125 | /* Clear all control dependences for block BB. */ | |
126 | static inline void | |
127 | clear_control_dependence_bitmap (basic_block bb) | |
128 | { | |
129 | bitmap_clear (control_dependence_map[bb->index]); | |
130 | } | |
131 | ||
132 | ||
133 | /* Find the immediate postdominator PDOM of the specified basic block BLOCK. | |
134 | This function is necessary because some blocks have negative numbers. */ | |
135 | ||
136 | static inline basic_block | |
137 | find_pdom (basic_block block) | |
138 | { | |
139 | gcc_assert (block != ENTRY_BLOCK_PTR); | |
140 | ||
141 | if (block == EXIT_BLOCK_PTR) | |
142 | return EXIT_BLOCK_PTR; | |
143 | else | |
144 | { | |
145 | basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block); | |
146 | if (! bb) | |
147 | return EXIT_BLOCK_PTR; | |
148 | return bb; | |
149 | } | |
150 | } | |
151 | ||
152 | ||
153 | /* Determine all blocks' control dependences on the given edge with edge_list | |
154 | EL index EDGE_INDEX, ala Morgan, Section 3.6. */ | |
155 | ||
156 | static void | |
157 | find_control_dependence (struct edge_list *el, int edge_index) | |
158 | { | |
159 | basic_block current_block; | |
160 | basic_block ending_block; | |
161 | ||
162 | gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR); | |
163 | ||
164 | if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR) | |
165 | ending_block = single_succ (ENTRY_BLOCK_PTR); | |
166 | else | |
167 | ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index)); | |
168 | ||
169 | for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index); | |
170 | current_block != ending_block && current_block != EXIT_BLOCK_PTR; | |
171 | current_block = find_pdom (current_block)) | |
172 | { | |
173 | edge e = INDEX_EDGE (el, edge_index); | |
174 | ||
175 | /* For abnormal edges, we don't make current_block control | |
176 | dependent because instructions that throw are always necessary | |
177 | anyway. */ | |
178 | if (e->flags & EDGE_ABNORMAL) | |
179 | continue; | |
180 | ||
181 | set_control_dependence_map_bit (current_block, edge_index); | |
182 | } | |
183 | } | |
184 | ||
185 | ||
186 | /* Record all blocks' control dependences on all edges in the edge | |
187 | list EL, ala Morgan, Section 3.6. */ | |
188 | ||
189 | static void | |
190 | find_all_control_dependences (struct edge_list *el) | |
191 | { | |
192 | int i; | |
193 | ||
194 | for (i = 0; i < NUM_EDGES (el); ++i) | |
195 | find_control_dependence (el, i); | |
196 | } | |
197 | ||
198 | /* If STMT is not already marked necessary, mark it, and add it to the | |
199 | worklist if ADD_TO_WORKLIST is true. */ | |
200 | ||
201 | static inline void | |
202 | mark_stmt_necessary (gimple stmt, bool add_to_worklist) | |
203 | { | |
204 | gcc_assert (stmt); | |
205 | ||
206 | if (gimple_plf (stmt, STMT_NECESSARY)) | |
207 | return; | |
208 | ||
209 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
210 | { | |
211 | fprintf (dump_file, "Marking useful stmt: "); | |
212 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
213 | fprintf (dump_file, "\n"); | |
214 | } | |
215 | ||
216 | gimple_set_plf (stmt, STMT_NECESSARY, true); | |
217 | if (add_to_worklist) | |
218 | VEC_safe_push (gimple, heap, worklist, stmt); | |
219 | if (bb_contains_live_stmts && !is_gimple_debug (stmt)) | |
220 | SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index); | |
221 | } | |
222 | ||
223 | ||
224 | /* Mark the statement defining operand OP as necessary. */ | |
225 | ||
226 | static inline void | |
227 | mark_operand_necessary (tree op) | |
228 | { | |
229 | gimple stmt; | |
230 | int ver; | |
231 | ||
232 | gcc_assert (op); | |
233 | ||
234 | ver = SSA_NAME_VERSION (op); | |
235 | if (TEST_BIT (processed, ver)) | |
236 | { | |
237 | stmt = SSA_NAME_DEF_STMT (op); | |
238 | gcc_assert (gimple_nop_p (stmt) | |
239 | || gimple_plf (stmt, STMT_NECESSARY)); | |
240 | return; | |
241 | } | |
242 | SET_BIT (processed, ver); | |
243 | ||
244 | stmt = SSA_NAME_DEF_STMT (op); | |
245 | gcc_assert (stmt); | |
246 | ||
247 | if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt)) | |
248 | return; | |
249 | ||
250 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
251 | { | |
252 | fprintf (dump_file, "marking necessary through "); | |
253 | print_generic_expr (dump_file, op, 0); | |
254 | fprintf (dump_file, " stmt "); | |
255 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
256 | } | |
257 | ||
258 | gimple_set_plf (stmt, STMT_NECESSARY, true); | |
259 | if (bb_contains_live_stmts) | |
260 | SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index); | |
261 | VEC_safe_push (gimple, heap, worklist, stmt); | |
262 | } | |
263 | ||
264 | ||
265 | /* Mark STMT as necessary if it obviously is. Add it to the worklist if | |
266 | it can make other statements necessary. | |
267 | ||
268 | If AGGRESSIVE is false, control statements are conservatively marked as | |
269 | necessary. */ | |
270 | ||
271 | static void | |
272 | mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive) | |
273 | { | |
274 | /* With non-call exceptions, we have to assume that all statements could | |
275 | throw. If a statement may throw, it is inherently necessary. */ | |
276 | if (cfun->can_throw_non_call_exceptions && stmt_could_throw_p (stmt)) | |
277 | { | |
278 | mark_stmt_necessary (stmt, true); | |
279 | return; | |
280 | } | |
281 | ||
282 | /* Statements that are implicitly live. Most function calls, asm | |
283 | and return statements are required. Labels and GIMPLE_BIND nodes | |
284 | are kept because they are control flow, and we have no way of | |
285 | knowing whether they can be removed. DCE can eliminate all the | |
286 | other statements in a block, and CFG can then remove the block | |
287 | and labels. */ | |
288 | switch (gimple_code (stmt)) | |
289 | { | |
290 | case GIMPLE_PREDICT: | |
291 | case GIMPLE_LABEL: | |
292 | mark_stmt_necessary (stmt, false); | |
293 | return; | |
294 | ||
295 | case GIMPLE_ASM: | |
296 | case GIMPLE_RESX: | |
297 | case GIMPLE_RETURN: | |
298 | mark_stmt_necessary (stmt, true); | |
299 | return; | |
300 | ||
301 | case GIMPLE_CALL: | |
302 | { | |
303 | tree callee = gimple_call_fndecl (stmt); | |
304 | if (callee != NULL_TREE | |
305 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) | |
306 | switch (DECL_FUNCTION_CODE (callee)) | |
307 | { | |
308 | case BUILT_IN_MALLOC: | |
309 | case BUILT_IN_CALLOC: | |
310 | case BUILT_IN_ALLOCA: | |
311 | case BUILT_IN_ALLOCA_WITH_ALIGN: | |
312 | return; | |
313 | ||
314 | default:; | |
315 | } | |
316 | /* Most, but not all function calls are required. Function calls that | |
317 | produce no result and have no side effects (i.e. const pure | |
318 | functions) are unnecessary. */ | |
319 | if (gimple_has_side_effects (stmt)) | |
320 | { | |
321 | mark_stmt_necessary (stmt, true); | |
322 | return; | |
323 | } | |
324 | if (!gimple_call_lhs (stmt)) | |
325 | return; | |
326 | break; | |
327 | } | |
328 | ||
329 | case GIMPLE_DEBUG: | |
330 | /* Debug temps without a value are not useful. ??? If we could | |
331 | easily locate the debug temp bind stmt for a use thereof, | |
332 | would could refrain from marking all debug temps here, and | |
333 | mark them only if they're used. */ | |
334 | if (!gimple_debug_bind_p (stmt) | |
335 | || gimple_debug_bind_has_value_p (stmt) | |
336 | || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL) | |
337 | mark_stmt_necessary (stmt, false); | |
338 | return; | |
339 | ||
340 | case GIMPLE_GOTO: | |
341 | gcc_assert (!simple_goto_p (stmt)); | |
342 | mark_stmt_necessary (stmt, true); | |
343 | return; | |
344 | ||
345 | case GIMPLE_COND: | |
346 | gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2); | |
347 | /* Fall through. */ | |
348 | ||
349 | case GIMPLE_SWITCH: | |
350 | if (! aggressive) | |
351 | mark_stmt_necessary (stmt, true); | |
352 | break; | |
353 | ||
354 | case GIMPLE_ASSIGN: | |
355 | if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME | |
356 | && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt))) | |
357 | return; | |
358 | break; | |
359 | ||
360 | default: | |
361 | break; | |
362 | } | |
363 | ||
364 | /* If the statement has volatile operands, it needs to be preserved. | |
365 | Same for statements that can alter control flow in unpredictable | |
366 | ways. */ | |
367 | if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt)) | |
368 | { | |
369 | mark_stmt_necessary (stmt, true); | |
370 | return; | |
371 | } | |
372 | ||
373 | if (is_hidden_global_store (stmt)) | |
374 | { | |
375 | mark_stmt_necessary (stmt, true); | |
376 | return; | |
377 | } | |
378 | ||
379 | return; | |
380 | } | |
381 | ||
382 | ||
383 | /* Mark the last statement of BB as necessary. */ | |
384 | ||
385 | static void | |
386 | mark_last_stmt_necessary (basic_block bb) | |
387 | { | |
388 | gimple stmt = last_stmt (bb); | |
389 | ||
390 | SET_BIT (last_stmt_necessary, bb->index); | |
391 | SET_BIT (bb_contains_live_stmts, bb->index); | |
392 | ||
393 | /* We actually mark the statement only if it is a control statement. */ | |
394 | if (stmt && is_ctrl_stmt (stmt)) | |
395 | mark_stmt_necessary (stmt, true); | |
396 | } | |
397 | ||
398 | ||
399 | /* Mark control dependent edges of BB as necessary. We have to do this only | |
400 | once for each basic block so we set the appropriate bit after we're done. | |
401 | ||
402 | When IGNORE_SELF is true, ignore BB in the list of control dependences. */ | |
403 | ||
404 | static void | |
405 | mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el, | |
406 | bool ignore_self) | |
407 | { | |
408 | bitmap_iterator bi; | |
409 | unsigned edge_number; | |
410 | bool skipped = false; | |
411 | ||
412 | gcc_assert (bb != EXIT_BLOCK_PTR); | |
413 | ||
414 | if (bb == ENTRY_BLOCK_PTR) | |
415 | return; | |
416 | ||
417 | EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number) | |
418 | { | |
419 | basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number); | |
420 | ||
421 | if (ignore_self && cd_bb == bb) | |
422 | { | |
423 | skipped = true; | |
424 | continue; | |
425 | } | |
426 | ||
427 | if (!TEST_BIT (last_stmt_necessary, cd_bb->index)) | |
428 | mark_last_stmt_necessary (cd_bb); | |
429 | } | |
430 | ||
431 | if (!skipped) | |
432 | SET_BIT (visited_control_parents, bb->index); | |
433 | } | |
434 | ||
435 | ||
436 | /* Find obviously necessary statements. These are things like most function | |
437 | calls, and stores to file level variables. | |
438 | ||
439 | If EL is NULL, control statements are conservatively marked as | |
440 | necessary. Otherwise it contains the list of edges used by control | |
441 | dependence analysis. */ | |
442 | ||
443 | static void | |
444 | find_obviously_necessary_stmts (struct edge_list *el) | |
445 | { | |
446 | basic_block bb; | |
447 | gimple_stmt_iterator gsi; | |
448 | edge e; | |
449 | gimple phi, stmt; | |
450 | int flags; | |
451 | ||
452 | FOR_EACH_BB (bb) | |
453 | { | |
454 | /* PHI nodes are never inherently necessary. */ | |
455 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
456 | { | |
457 | phi = gsi_stmt (gsi); | |
458 | gimple_set_plf (phi, STMT_NECESSARY, false); | |
459 | } | |
460 | ||
461 | /* Check all statements in the block. */ | |
462 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
463 | { | |
464 | stmt = gsi_stmt (gsi); | |
465 | gimple_set_plf (stmt, STMT_NECESSARY, false); | |
466 | mark_stmt_if_obviously_necessary (stmt, el != NULL); | |
467 | } | |
468 | } | |
469 | ||
470 | /* Pure and const functions are finite and thus have no infinite loops in | |
471 | them. */ | |
472 | flags = flags_from_decl_or_type (current_function_decl); | |
473 | if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE)) | |
474 | return; | |
475 | ||
476 | /* Prevent the empty possibly infinite loops from being removed. */ | |
477 | if (el) | |
478 | { | |
479 | loop_iterator li; | |
480 | struct loop *loop; | |
481 | scev_initialize (); | |
482 | if (mark_irreducible_loops ()) | |
483 | FOR_EACH_BB (bb) | |
484 | { | |
485 | edge_iterator ei; | |
486 | FOR_EACH_EDGE (e, ei, bb->succs) | |
487 | if ((e->flags & EDGE_DFS_BACK) | |
488 | && (e->flags & EDGE_IRREDUCIBLE_LOOP)) | |
489 | { | |
490 | if (dump_file) | |
491 | fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n", | |
492 | e->src->index, e->dest->index); | |
493 | mark_control_dependent_edges_necessary (e->dest, el, false); | |
494 | } | |
495 | } | |
496 | ||
497 | FOR_EACH_LOOP (li, loop, 0) | |
498 | if (!finite_loop_p (loop)) | |
499 | { | |
500 | if (dump_file) | |
501 | fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num); | |
502 | mark_control_dependent_edges_necessary (loop->latch, el, false); | |
503 | } | |
504 | scev_finalize (); | |
505 | } | |
506 | } | |
507 | ||
508 | ||
509 | /* Return true if REF is based on an aliased base, otherwise false. */ | |
510 | ||
511 | static bool | |
512 | ref_may_be_aliased (tree ref) | |
513 | { | |
514 | gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR); | |
515 | while (handled_component_p (ref)) | |
516 | ref = TREE_OPERAND (ref, 0); | |
517 | if (TREE_CODE (ref) == MEM_REF | |
518 | && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR) | |
519 | ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0); | |
520 | return !(DECL_P (ref) | |
521 | && !may_be_aliased (ref)); | |
522 | } | |
523 | ||
524 | static bitmap visited = NULL; | |
525 | static unsigned int longest_chain = 0; | |
526 | static unsigned int total_chain = 0; | |
527 | static unsigned int nr_walks = 0; | |
528 | static bool chain_ovfl = false; | |
529 | ||
530 | /* Worker for the walker that marks reaching definitions of REF, | |
531 | which is based on a non-aliased decl, necessary. It returns | |
532 | true whenever the defining statement of the current VDEF is | |
533 | a kill for REF, as no dominating may-defs are necessary for REF | |
534 | anymore. DATA points to the basic-block that contains the | |
535 | stmt that refers to REF. */ | |
536 | ||
537 | static bool | |
538 | mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data) | |
539 | { | |
540 | gimple def_stmt = SSA_NAME_DEF_STMT (vdef); | |
541 | ||
542 | /* All stmts we visit are necessary. */ | |
543 | mark_operand_necessary (vdef); | |
544 | ||
545 | /* If the stmt lhs kills ref, then we can stop walking. */ | |
546 | if (gimple_has_lhs (def_stmt) | |
547 | && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME | |
548 | /* The assignment is not necessarily carried out if it can throw | |
549 | and we can catch it in the current function where we could inspect | |
550 | the previous value. | |
551 | ??? We only need to care about the RHS throwing. For aggregate | |
552 | assignments or similar calls and non-call exceptions the LHS | |
553 | might throw as well. */ | |
554 | && !stmt_can_throw_internal (def_stmt)) | |
555 | { | |
556 | tree base, lhs = gimple_get_lhs (def_stmt); | |
557 | HOST_WIDE_INT size, offset, max_size; | |
558 | ao_ref_base (ref); | |
559 | base = get_ref_base_and_extent (lhs, &offset, &size, &max_size); | |
560 | /* We can get MEM[symbol: sZ, index: D.8862_1] here, | |
561 | so base == refd->base does not always hold. */ | |
562 | if (base == ref->base) | |
563 | { | |
564 | /* For a must-alias check we need to be able to constrain | |
565 | the accesses properly. */ | |
566 | if (size != -1 && size == max_size | |
567 | && ref->max_size != -1) | |
568 | { | |
569 | if (offset <= ref->offset | |
570 | && offset + size >= ref->offset + ref->max_size) | |
571 | return true; | |
572 | } | |
573 | /* Or they need to be exactly the same. */ | |
574 | else if (ref->ref | |
575 | /* Make sure there is no induction variable involved | |
576 | in the references (gcc.c-torture/execute/pr42142.c). | |
577 | The simplest way is to check if the kill dominates | |
578 | the use. */ | |
95d28233 JM |
579 | /* But when both are in the same block we cannot |
580 | easily tell whether we came from a backedge | |
581 | unless we decide to compute stmt UIDs | |
582 | (see PR58246). */ | |
583 | && (basic_block) data != gimple_bb (def_stmt) | |
e4b17023 JM |
584 | && dominated_by_p (CDI_DOMINATORS, (basic_block) data, |
585 | gimple_bb (def_stmt)) | |
586 | && operand_equal_p (ref->ref, lhs, 0)) | |
587 | return true; | |
588 | } | |
589 | } | |
590 | ||
591 | /* Otherwise keep walking. */ | |
592 | return false; | |
593 | } | |
594 | ||
595 | static void | |
596 | mark_aliased_reaching_defs_necessary (gimple stmt, tree ref) | |
597 | { | |
598 | unsigned int chain; | |
599 | ao_ref refd; | |
600 | gcc_assert (!chain_ovfl); | |
601 | ao_ref_init (&refd, ref); | |
602 | chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt), | |
603 | mark_aliased_reaching_defs_necessary_1, | |
604 | gimple_bb (stmt), NULL); | |
605 | if (chain > longest_chain) | |
606 | longest_chain = chain; | |
607 | total_chain += chain; | |
608 | nr_walks++; | |
609 | } | |
610 | ||
611 | /* Worker for the walker that marks reaching definitions of REF, which | |
612 | is not based on a non-aliased decl. For simplicity we need to end | |
613 | up marking all may-defs necessary that are not based on a non-aliased | |
614 | decl. The only job of this walker is to skip may-defs based on | |
615 | a non-aliased decl. */ | |
616 | ||
617 | static bool | |
618 | mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED, | |
619 | tree vdef, void *data ATTRIBUTE_UNUSED) | |
620 | { | |
621 | gimple def_stmt = SSA_NAME_DEF_STMT (vdef); | |
622 | ||
623 | /* We have to skip already visited (and thus necessary) statements | |
624 | to make the chaining work after we dropped back to simple mode. */ | |
625 | if (chain_ovfl | |
626 | && TEST_BIT (processed, SSA_NAME_VERSION (vdef))) | |
627 | { | |
628 | gcc_assert (gimple_nop_p (def_stmt) | |
629 | || gimple_plf (def_stmt, STMT_NECESSARY)); | |
630 | return false; | |
631 | } | |
632 | ||
633 | /* We want to skip stores to non-aliased variables. */ | |
634 | if (!chain_ovfl | |
635 | && gimple_assign_single_p (def_stmt)) | |
636 | { | |
637 | tree lhs = gimple_assign_lhs (def_stmt); | |
638 | if (!ref_may_be_aliased (lhs)) | |
639 | return false; | |
640 | } | |
641 | ||
642 | /* We want to skip statments that do not constitute stores but have | |
643 | a virtual definition. */ | |
644 | if (is_gimple_call (def_stmt)) | |
645 | { | |
646 | tree callee = gimple_call_fndecl (def_stmt); | |
647 | if (callee != NULL_TREE | |
648 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) | |
649 | switch (DECL_FUNCTION_CODE (callee)) | |
650 | { | |
651 | case BUILT_IN_MALLOC: | |
652 | case BUILT_IN_CALLOC: | |
653 | case BUILT_IN_ALLOCA: | |
654 | case BUILT_IN_ALLOCA_WITH_ALIGN: | |
655 | case BUILT_IN_FREE: | |
656 | return false; | |
657 | ||
658 | default:; | |
659 | } | |
660 | } | |
661 | ||
662 | mark_operand_necessary (vdef); | |
663 | ||
664 | return false; | |
665 | } | |
666 | ||
667 | static void | |
668 | mark_all_reaching_defs_necessary (gimple stmt) | |
669 | { | |
670 | walk_aliased_vdefs (NULL, gimple_vuse (stmt), | |
671 | mark_all_reaching_defs_necessary_1, NULL, &visited); | |
672 | } | |
673 | ||
674 | /* Return true for PHI nodes with one or identical arguments | |
675 | can be removed. */ | |
676 | static bool | |
677 | degenerate_phi_p (gimple phi) | |
678 | { | |
679 | unsigned int i; | |
680 | tree op = gimple_phi_arg_def (phi, 0); | |
681 | for (i = 1; i < gimple_phi_num_args (phi); i++) | |
682 | if (gimple_phi_arg_def (phi, i) != op) | |
683 | return false; | |
684 | return true; | |
685 | } | |
686 | ||
687 | /* Propagate necessity using the operands of necessary statements. | |
688 | Process the uses on each statement in the worklist, and add all | |
689 | feeding statements which contribute to the calculation of this | |
690 | value to the worklist. | |
691 | ||
692 | In conservative mode, EL is NULL. */ | |
693 | ||
694 | static void | |
695 | propagate_necessity (struct edge_list *el) | |
696 | { | |
697 | gimple stmt; | |
698 | bool aggressive = (el ? true : false); | |
699 | ||
700 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
701 | fprintf (dump_file, "\nProcessing worklist:\n"); | |
702 | ||
703 | while (VEC_length (gimple, worklist) > 0) | |
704 | { | |
705 | /* Take STMT from worklist. */ | |
706 | stmt = VEC_pop (gimple, worklist); | |
707 | ||
708 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
709 | { | |
710 | fprintf (dump_file, "processing: "); | |
711 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
712 | fprintf (dump_file, "\n"); | |
713 | } | |
714 | ||
715 | if (aggressive) | |
716 | { | |
717 | /* Mark the last statement of the basic blocks on which the block | |
718 | containing STMT is control dependent, but only if we haven't | |
719 | already done so. */ | |
720 | basic_block bb = gimple_bb (stmt); | |
721 | if (bb != ENTRY_BLOCK_PTR | |
722 | && !TEST_BIT (visited_control_parents, bb->index)) | |
723 | mark_control_dependent_edges_necessary (bb, el, false); | |
724 | } | |
725 | ||
726 | if (gimple_code (stmt) == GIMPLE_PHI | |
727 | /* We do not process virtual PHI nodes nor do we track their | |
728 | necessity. */ | |
729 | && is_gimple_reg (gimple_phi_result (stmt))) | |
730 | { | |
731 | /* PHI nodes are somewhat special in that each PHI alternative has | |
732 | data and control dependencies. All the statements feeding the | |
733 | PHI node's arguments are always necessary. In aggressive mode, | |
734 | we also consider the control dependent edges leading to the | |
735 | predecessor block associated with each PHI alternative as | |
736 | necessary. */ | |
737 | size_t k; | |
738 | ||
739 | for (k = 0; k < gimple_phi_num_args (stmt); k++) | |
740 | { | |
741 | tree arg = PHI_ARG_DEF (stmt, k); | |
742 | if (TREE_CODE (arg) == SSA_NAME) | |
743 | mark_operand_necessary (arg); | |
744 | } | |
745 | ||
746 | /* For PHI operands it matters from where the control flow arrives | |
747 | to the BB. Consider the following example: | |
748 | ||
749 | a=exp1; | |
750 | b=exp2; | |
751 | if (test) | |
752 | ; | |
753 | else | |
754 | ; | |
755 | c=PHI(a,b) | |
756 | ||
757 | We need to mark control dependence of the empty basic blocks, since they | |
758 | contains computation of PHI operands. | |
759 | ||
760 | Doing so is too restrictive in the case the predecestor block is in | |
761 | the loop. Consider: | |
762 | ||
763 | if (b) | |
764 | { | |
765 | int i; | |
766 | for (i = 0; i<1000; ++i) | |
767 | ; | |
768 | j = 0; | |
769 | } | |
770 | return j; | |
771 | ||
772 | There is PHI for J in the BB containing return statement. | |
773 | In this case the control dependence of predecestor block (that is | |
774 | within the empty loop) also contains the block determining number | |
775 | of iterations of the block that would prevent removing of empty | |
776 | loop in this case. | |
777 | ||
778 | This scenario can be avoided by splitting critical edges. | |
779 | To save the critical edge splitting pass we identify how the control | |
780 | dependence would look like if the edge was split. | |
781 | ||
782 | Consider the modified CFG created from current CFG by splitting | |
783 | edge B->C. In the postdominance tree of modified CFG, C' is | |
784 | always child of C. There are two cases how chlids of C' can look | |
785 | like: | |
786 | ||
787 | 1) C' is leaf | |
788 | ||
789 | In this case the only basic block C' is control dependent on is B. | |
790 | ||
791 | 2) C' has single child that is B | |
792 | ||
793 | In this case control dependence of C' is same as control | |
794 | dependence of B in original CFG except for block B itself. | |
795 | (since C' postdominate B in modified CFG) | |
796 | ||
797 | Now how to decide what case happens? There are two basic options: | |
798 | ||
799 | a) C postdominate B. Then C immediately postdominate B and | |
800 | case 2 happens iff there is no other way from B to C except | |
801 | the edge B->C. | |
802 | ||
803 | There is other way from B to C iff there is succesor of B that | |
804 | is not postdominated by B. Testing this condition is somewhat | |
805 | expensive, because we need to iterate all succesors of B. | |
806 | We are safe to assume that this does not happen: we will mark B | |
807 | as needed when processing the other path from B to C that is | |
808 | conrol dependent on B and marking control dependencies of B | |
809 | itself is harmless because they will be processed anyway after | |
810 | processing control statement in B. | |
811 | ||
812 | b) C does not postdominate B. Always case 1 happens since there is | |
813 | path from C to exit that does not go through B and thus also C'. */ | |
814 | ||
815 | if (aggressive && !degenerate_phi_p (stmt)) | |
816 | { | |
817 | for (k = 0; k < gimple_phi_num_args (stmt); k++) | |
818 | { | |
819 | basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src; | |
820 | ||
821 | if (gimple_bb (stmt) | |
822 | != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb)) | |
823 | { | |
824 | if (!TEST_BIT (last_stmt_necessary, arg_bb->index)) | |
825 | mark_last_stmt_necessary (arg_bb); | |
826 | } | |
827 | else if (arg_bb != ENTRY_BLOCK_PTR | |
828 | && !TEST_BIT (visited_control_parents, | |
829 | arg_bb->index)) | |
830 | mark_control_dependent_edges_necessary (arg_bb, el, true); | |
831 | } | |
832 | } | |
833 | } | |
834 | else | |
835 | { | |
836 | /* Propagate through the operands. Examine all the USE, VUSE and | |
837 | VDEF operands in this statement. Mark all the statements | |
838 | which feed this statement's uses as necessary. */ | |
839 | ssa_op_iter iter; | |
840 | tree use; | |
841 | ||
842 | /* If this is a call to free which is directly fed by an | |
843 | allocation function do not mark that necessary through | |
844 | processing the argument. */ | |
845 | if (gimple_call_builtin_p (stmt, BUILT_IN_FREE)) | |
846 | { | |
847 | tree ptr = gimple_call_arg (stmt, 0); | |
848 | gimple def_stmt; | |
849 | tree def_callee; | |
850 | /* If the pointer we free is defined by an allocation | |
851 | function do not add the call to the worklist. */ | |
852 | if (TREE_CODE (ptr) == SSA_NAME | |
853 | && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr)) | |
854 | && (def_callee = gimple_call_fndecl (def_stmt)) | |
855 | && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL | |
856 | && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC | |
857 | || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC)) | |
858 | continue; | |
859 | } | |
860 | ||
861 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) | |
862 | mark_operand_necessary (use); | |
863 | ||
864 | use = gimple_vuse (stmt); | |
865 | if (!use) | |
866 | continue; | |
867 | ||
868 | /* If we dropped to simple mode make all immediately | |
869 | reachable definitions necessary. */ | |
870 | if (chain_ovfl) | |
871 | { | |
872 | mark_all_reaching_defs_necessary (stmt); | |
873 | continue; | |
874 | } | |
875 | ||
876 | /* For statements that may load from memory (have a VUSE) we | |
877 | have to mark all reaching (may-)definitions as necessary. | |
878 | We partition this task into two cases: | |
879 | 1) explicit loads based on decls that are not aliased | |
880 | 2) implicit loads (like calls) and explicit loads not | |
881 | based on decls that are not aliased (like indirect | |
882 | references or loads from globals) | |
883 | For 1) we mark all reaching may-defs as necessary, stopping | |
884 | at dominating kills. For 2) we want to mark all dominating | |
885 | references necessary, but non-aliased ones which we handle | |
886 | in 1). By keeping a global visited bitmap for references | |
887 | we walk for 2) we avoid quadratic behavior for those. */ | |
888 | ||
889 | if (is_gimple_call (stmt)) | |
890 | { | |
891 | tree callee = gimple_call_fndecl (stmt); | |
892 | unsigned i; | |
893 | ||
894 | /* Calls to functions that are merely acting as barriers | |
895 | or that only store to memory do not make any previous | |
896 | stores necessary. */ | |
897 | if (callee != NULL_TREE | |
898 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL | |
899 | && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET | |
900 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK | |
901 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC | |
902 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC | |
903 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE | |
904 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END | |
905 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA | |
906 | || (DECL_FUNCTION_CODE (callee) | |
907 | == BUILT_IN_ALLOCA_WITH_ALIGN) | |
908 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE | |
909 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE | |
910 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED)) | |
911 | continue; | |
912 | ||
913 | /* Calls implicitly load from memory, their arguments | |
914 | in addition may explicitly perform memory loads. */ | |
915 | mark_all_reaching_defs_necessary (stmt); | |
916 | for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
917 | { | |
918 | tree arg = gimple_call_arg (stmt, i); | |
919 | if (TREE_CODE (arg) == SSA_NAME | |
920 | || is_gimple_min_invariant (arg)) | |
921 | continue; | |
922 | if (TREE_CODE (arg) == WITH_SIZE_EXPR) | |
923 | arg = TREE_OPERAND (arg, 0); | |
924 | if (!ref_may_be_aliased (arg)) | |
925 | mark_aliased_reaching_defs_necessary (stmt, arg); | |
926 | } | |
927 | } | |
928 | else if (gimple_assign_single_p (stmt)) | |
929 | { | |
930 | tree rhs; | |
931 | /* If this is a load mark things necessary. */ | |
932 | rhs = gimple_assign_rhs1 (stmt); | |
933 | if (TREE_CODE (rhs) != SSA_NAME | |
934 | && !is_gimple_min_invariant (rhs) | |
935 | && TREE_CODE (rhs) != CONSTRUCTOR) | |
936 | { | |
937 | if (!ref_may_be_aliased (rhs)) | |
938 | mark_aliased_reaching_defs_necessary (stmt, rhs); | |
939 | else | |
940 | mark_all_reaching_defs_necessary (stmt); | |
941 | } | |
942 | } | |
943 | else if (gimple_code (stmt) == GIMPLE_RETURN) | |
944 | { | |
945 | tree rhs = gimple_return_retval (stmt); | |
946 | /* A return statement may perform a load. */ | |
947 | if (rhs | |
948 | && TREE_CODE (rhs) != SSA_NAME | |
949 | && !is_gimple_min_invariant (rhs) | |
950 | && TREE_CODE (rhs) != CONSTRUCTOR) | |
951 | { | |
952 | if (!ref_may_be_aliased (rhs)) | |
953 | mark_aliased_reaching_defs_necessary (stmt, rhs); | |
954 | else | |
955 | mark_all_reaching_defs_necessary (stmt); | |
956 | } | |
957 | } | |
958 | else if (gimple_code (stmt) == GIMPLE_ASM) | |
959 | { | |
960 | unsigned i; | |
961 | mark_all_reaching_defs_necessary (stmt); | |
962 | /* Inputs may perform loads. */ | |
963 | for (i = 0; i < gimple_asm_ninputs (stmt); ++i) | |
964 | { | |
965 | tree op = TREE_VALUE (gimple_asm_input_op (stmt, i)); | |
966 | if (TREE_CODE (op) != SSA_NAME | |
967 | && !is_gimple_min_invariant (op) | |
968 | && TREE_CODE (op) != CONSTRUCTOR | |
969 | && !ref_may_be_aliased (op)) | |
970 | mark_aliased_reaching_defs_necessary (stmt, op); | |
971 | } | |
972 | } | |
973 | else if (gimple_code (stmt) == GIMPLE_TRANSACTION) | |
974 | { | |
975 | /* The beginning of a transaction is a memory barrier. */ | |
976 | /* ??? If we were really cool, we'd only be a barrier | |
977 | for the memories touched within the transaction. */ | |
978 | mark_all_reaching_defs_necessary (stmt); | |
979 | } | |
980 | else | |
981 | gcc_unreachable (); | |
982 | ||
983 | /* If we over-used our alias oracle budget drop to simple | |
984 | mode. The cost metric allows quadratic behavior | |
985 | (number of uses times number of may-defs queries) up to | |
986 | a constant maximal number of queries and after that falls back to | |
987 | super-linear complexity. */ | |
988 | if (/* Constant but quadratic for small functions. */ | |
989 | total_chain > 128 * 128 | |
990 | /* Linear in the number of may-defs. */ | |
991 | && total_chain > 32 * longest_chain | |
992 | /* Linear in the number of uses. */ | |
993 | && total_chain > nr_walks * 32) | |
994 | { | |
995 | chain_ovfl = true; | |
996 | if (visited) | |
997 | bitmap_clear (visited); | |
998 | } | |
999 | } | |
1000 | } | |
1001 | } | |
1002 | ||
1003 | /* Replace all uses of NAME by underlying variable and mark it | |
1004 | for renaming. */ | |
1005 | ||
1006 | void | |
1007 | mark_virtual_operand_for_renaming (tree name) | |
1008 | { | |
1009 | bool used = false; | |
1010 | imm_use_iterator iter; | |
1011 | use_operand_p use_p; | |
1012 | gimple stmt; | |
1013 | tree name_var; | |
1014 | ||
1015 | name_var = SSA_NAME_VAR (name); | |
1016 | FOR_EACH_IMM_USE_STMT (stmt, iter, name) | |
1017 | { | |
1018 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
1019 | SET_USE (use_p, name_var); | |
1020 | update_stmt (stmt); | |
1021 | used = true; | |
1022 | } | |
1023 | if (used) | |
1024 | mark_sym_for_renaming (name_var); | |
1025 | } | |
1026 | ||
1027 | /* Replace all uses of result of PHI by underlying variable and mark it | |
1028 | for renaming. */ | |
1029 | ||
1030 | void | |
1031 | mark_virtual_phi_result_for_renaming (gimple phi) | |
1032 | { | |
1033 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1034 | { | |
1035 | fprintf (dump_file, "Marking result for renaming : "); | |
1036 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); | |
1037 | fprintf (dump_file, "\n"); | |
1038 | } | |
1039 | ||
1040 | mark_virtual_operand_for_renaming (gimple_phi_result (phi)); | |
1041 | } | |
1042 | ||
1043 | ||
1044 | /* Remove dead PHI nodes from block BB. */ | |
1045 | ||
1046 | static bool | |
1047 | remove_dead_phis (basic_block bb) | |
1048 | { | |
1049 | bool something_changed = false; | |
1050 | gimple_seq phis; | |
1051 | gimple phi; | |
1052 | gimple_stmt_iterator gsi; | |
1053 | phis = phi_nodes (bb); | |
1054 | ||
1055 | for (gsi = gsi_start (phis); !gsi_end_p (gsi);) | |
1056 | { | |
1057 | stats.total_phis++; | |
1058 | phi = gsi_stmt (gsi); | |
1059 | ||
1060 | /* We do not track necessity of virtual PHI nodes. Instead do | |
1061 | very simple dead PHI removal here. */ | |
1062 | if (!is_gimple_reg (gimple_phi_result (phi))) | |
1063 | { | |
1064 | /* Virtual PHI nodes with one or identical arguments | |
1065 | can be removed. */ | |
1066 | if (degenerate_phi_p (phi)) | |
1067 | { | |
1068 | tree vdef = gimple_phi_result (phi); | |
1069 | tree vuse = gimple_phi_arg_def (phi, 0); | |
1070 | ||
1071 | use_operand_p use_p; | |
1072 | imm_use_iterator iter; | |
1073 | gimple use_stmt; | |
1074 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) | |
1075 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
1076 | SET_USE (use_p, vuse); | |
1077 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef) | |
1078 | && TREE_CODE (vuse) == SSA_NAME) | |
1079 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; | |
1080 | } | |
1081 | else | |
1082 | gimple_set_plf (phi, STMT_NECESSARY, true); | |
1083 | } | |
1084 | ||
1085 | if (!gimple_plf (phi, STMT_NECESSARY)) | |
1086 | { | |
1087 | something_changed = true; | |
1088 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1089 | { | |
1090 | fprintf (dump_file, "Deleting : "); | |
1091 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); | |
1092 | fprintf (dump_file, "\n"); | |
1093 | } | |
1094 | ||
1095 | remove_phi_node (&gsi, true); | |
1096 | stats.removed_phis++; | |
1097 | continue; | |
1098 | } | |
1099 | ||
1100 | gsi_next (&gsi); | |
1101 | } | |
1102 | return something_changed; | |
1103 | } | |
1104 | ||
1105 | /* Forward edge E to respective POST_DOM_BB and update PHIs. */ | |
1106 | ||
1107 | static edge | |
1108 | forward_edge_to_pdom (edge e, basic_block post_dom_bb) | |
1109 | { | |
1110 | gimple_stmt_iterator gsi; | |
1111 | edge e2 = NULL; | |
1112 | edge_iterator ei; | |
1113 | ||
1114 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1115 | fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index, | |
1116 | e->dest->index, post_dom_bb->index); | |
1117 | ||
1118 | e2 = redirect_edge_and_branch (e, post_dom_bb); | |
1119 | cfg_altered = true; | |
1120 | ||
1121 | /* If edge was already around, no updating is neccesary. */ | |
1122 | if (e2 != e) | |
1123 | return e2; | |
1124 | ||
1125 | if (!gimple_seq_empty_p (phi_nodes (post_dom_bb))) | |
1126 | { | |
1127 | /* We are sure that for every live PHI we are seeing control dependent BB. | |
1128 | This means that we can pick any edge to duplicate PHI args from. */ | |
1129 | FOR_EACH_EDGE (e2, ei, post_dom_bb->preds) | |
1130 | if (e2 != e) | |
1131 | break; | |
1132 | for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);) | |
1133 | { | |
1134 | gimple phi = gsi_stmt (gsi); | |
1135 | tree op; | |
1136 | source_location locus; | |
1137 | ||
1138 | /* PHIs for virtuals have no control dependency relation on them. | |
1139 | We are lost here and must force renaming of the symbol. */ | |
1140 | if (!is_gimple_reg (gimple_phi_result (phi))) | |
1141 | { | |
1142 | mark_virtual_phi_result_for_renaming (phi); | |
1143 | remove_phi_node (&gsi, true); | |
1144 | continue; | |
1145 | } | |
1146 | ||
1147 | /* Dead PHI do not imply control dependency. */ | |
1148 | if (!gimple_plf (phi, STMT_NECESSARY)) | |
1149 | { | |
1150 | gsi_next (&gsi); | |
1151 | continue; | |
1152 | } | |
1153 | ||
1154 | op = gimple_phi_arg_def (phi, e2->dest_idx); | |
1155 | locus = gimple_phi_arg_location (phi, e2->dest_idx); | |
1156 | add_phi_arg (phi, op, e, locus); | |
1157 | /* The resulting PHI if not dead can only be degenerate. */ | |
1158 | gcc_assert (degenerate_phi_p (phi)); | |
1159 | gsi_next (&gsi); | |
1160 | } | |
1161 | } | |
1162 | return e; | |
1163 | } | |
1164 | ||
1165 | /* Remove dead statement pointed to by iterator I. Receives the basic block BB | |
1166 | containing I so that we don't have to look it up. */ | |
1167 | ||
1168 | static void | |
1169 | remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb) | |
1170 | { | |
1171 | gimple stmt = gsi_stmt (*i); | |
1172 | ||
1173 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1174 | { | |
1175 | fprintf (dump_file, "Deleting : "); | |
1176 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1177 | fprintf (dump_file, "\n"); | |
1178 | } | |
1179 | ||
1180 | stats.removed++; | |
1181 | ||
1182 | /* If we have determined that a conditional branch statement contributes | |
1183 | nothing to the program, then we not only remove it, but we also change | |
1184 | the flow graph so that the current block will simply fall-thru to its | |
1185 | immediate post-dominator. The blocks we are circumventing will be | |
1186 | removed by cleanup_tree_cfg if this change in the flow graph makes them | |
1187 | unreachable. */ | |
1188 | if (is_ctrl_stmt (stmt)) | |
1189 | { | |
1190 | basic_block post_dom_bb; | |
1191 | edge e, e2; | |
1192 | edge_iterator ei; | |
1193 | ||
1194 | post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb); | |
1195 | ||
1196 | e = find_edge (bb, post_dom_bb); | |
1197 | ||
1198 | /* If edge is already there, try to use it. This avoids need to update | |
1199 | PHI nodes. Also watch for cases where post dominator does not exists | |
1200 | or is exit block. These can happen for infinite loops as we create | |
1201 | fake edges in the dominator tree. */ | |
1202 | if (e) | |
1203 | ; | |
1204 | else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR) | |
1205 | e = EDGE_SUCC (bb, 0); | |
1206 | else | |
1207 | e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb); | |
1208 | gcc_assert (e); | |
1209 | e->probability = REG_BR_PROB_BASE; | |
1210 | e->count = bb->count; | |
1211 | ||
1212 | /* The edge is no longer associated with a conditional, so it does | |
1213 | not have TRUE/FALSE flags. */ | |
1214 | e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE); | |
1215 | ||
1216 | /* The lone outgoing edge from BB will be a fallthru edge. */ | |
1217 | e->flags |= EDGE_FALLTHRU; | |
1218 | ||
1219 | /* Remove the remaining outgoing edges. */ | |
1220 | for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); ) | |
1221 | if (e != e2) | |
1222 | { | |
1223 | cfg_altered = true; | |
1224 | remove_edge (e2); | |
1225 | } | |
1226 | else | |
1227 | ei_next (&ei); | |
1228 | } | |
1229 | ||
1230 | /* If this is a store into a variable that is being optimized away, | |
1231 | add a debug bind stmt if possible. */ | |
1232 | if (MAY_HAVE_DEBUG_STMTS | |
1233 | && gimple_assign_single_p (stmt) | |
1234 | && is_gimple_val (gimple_assign_rhs1 (stmt))) | |
1235 | { | |
1236 | tree lhs = gimple_assign_lhs (stmt); | |
1237 | if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL) | |
1238 | && !DECL_IGNORED_P (lhs) | |
1239 | && is_gimple_reg_type (TREE_TYPE (lhs)) | |
1240 | && !is_global_var (lhs) | |
1241 | && !DECL_HAS_VALUE_EXPR_P (lhs)) | |
1242 | { | |
1243 | tree rhs = gimple_assign_rhs1 (stmt); | |
1244 | gimple note | |
1245 | = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt); | |
1246 | gsi_insert_after (i, note, GSI_SAME_STMT); | |
1247 | } | |
1248 | } | |
1249 | ||
1250 | unlink_stmt_vdef (stmt); | |
1251 | gsi_remove (i, true); | |
1252 | release_defs (stmt); | |
1253 | } | |
1254 | ||
1255 | /* Eliminate unnecessary statements. Any instruction not marked as necessary | |
1256 | contributes nothing to the program, and can be deleted. */ | |
1257 | ||
1258 | static bool | |
1259 | eliminate_unnecessary_stmts (void) | |
1260 | { | |
1261 | bool something_changed = false; | |
1262 | basic_block bb; | |
1263 | gimple_stmt_iterator gsi, psi; | |
1264 | gimple stmt; | |
1265 | tree call; | |
1266 | VEC (basic_block, heap) *h; | |
1267 | ||
1268 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1269 | fprintf (dump_file, "\nEliminating unnecessary statements:\n"); | |
1270 | ||
1271 | clear_special_calls (); | |
1272 | ||
1273 | /* Walking basic blocks and statements in reverse order avoids | |
1274 | releasing SSA names before any other DEFs that refer to them are | |
1275 | released. This helps avoid loss of debug information, as we get | |
1276 | a chance to propagate all RHSs of removed SSAs into debug uses, | |
1277 | rather than only the latest ones. E.g., consider: | |
1278 | ||
1279 | x_3 = y_1 + z_2; | |
1280 | a_5 = x_3 - b_4; | |
1281 | # DEBUG a => a_5 | |
1282 | ||
1283 | If we were to release x_3 before a_5, when we reached a_5 and | |
1284 | tried to substitute it into the debug stmt, we'd see x_3 there, | |
1285 | but x_3's DEF, type, etc would have already been disconnected. | |
1286 | By going backwards, the debug stmt first changes to: | |
1287 | ||
1288 | # DEBUG a => x_3 - b_4 | |
1289 | ||
1290 | and then to: | |
1291 | ||
1292 | # DEBUG a => y_1 + z_2 - b_4 | |
1293 | ||
1294 | as desired. */ | |
1295 | gcc_assert (dom_info_available_p (CDI_DOMINATORS)); | |
1296 | h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR)); | |
1297 | ||
1298 | while (VEC_length (basic_block, h)) | |
1299 | { | |
1300 | bb = VEC_pop (basic_block, h); | |
1301 | ||
1302 | /* Remove dead statements. */ | |
1303 | for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi) | |
1304 | { | |
1305 | stmt = gsi_stmt (gsi); | |
1306 | ||
1307 | psi = gsi; | |
1308 | gsi_prev (&psi); | |
1309 | ||
1310 | stats.total++; | |
1311 | ||
1312 | /* We can mark a call to free as not necessary if the | |
95d28233 JM |
1313 | defining statement of its argument is not necessary |
1314 | (and thus is getting removed). */ | |
1315 | if (gimple_plf (stmt, STMT_NECESSARY) | |
1316 | && gimple_call_builtin_p (stmt, BUILT_IN_FREE)) | |
e4b17023 JM |
1317 | { |
1318 | tree ptr = gimple_call_arg (stmt, 0); | |
95d28233 JM |
1319 | if (TREE_CODE (ptr) == SSA_NAME) |
1320 | { | |
1321 | gimple def_stmt = SSA_NAME_DEF_STMT (ptr); | |
1322 | if (!gimple_nop_p (def_stmt) | |
1323 | && !gimple_plf (def_stmt, STMT_NECESSARY)) | |
1324 | gimple_set_plf (stmt, STMT_NECESSARY, false); | |
1325 | } | |
e4b17023 JM |
1326 | } |
1327 | ||
1328 | /* If GSI is not necessary then remove it. */ | |
1329 | if (!gimple_plf (stmt, STMT_NECESSARY)) | |
1330 | { | |
1331 | if (!is_gimple_debug (stmt)) | |
1332 | something_changed = true; | |
1333 | remove_dead_stmt (&gsi, bb); | |
1334 | } | |
1335 | else if (is_gimple_call (stmt)) | |
1336 | { | |
1337 | tree name = gimple_call_lhs (stmt); | |
1338 | ||
1339 | notice_special_calls (stmt); | |
1340 | ||
1341 | /* When LHS of var = call (); is dead, simplify it into | |
1342 | call (); saving one operand. */ | |
1343 | if (name | |
1344 | && TREE_CODE (name) == SSA_NAME | |
1345 | && !TEST_BIT (processed, SSA_NAME_VERSION (name)) | |
1346 | /* Avoid doing so for allocation calls which we | |
1347 | did not mark as necessary, it will confuse the | |
1348 | special logic we apply to malloc/free pair removal. */ | |
1349 | && (!(call = gimple_call_fndecl (stmt)) | |
1350 | || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL | |
1351 | || (DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC | |
1352 | && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC | |
1353 | && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA | |
1354 | && (DECL_FUNCTION_CODE (call) | |
1355 | != BUILT_IN_ALLOCA_WITH_ALIGN)))) | |
1356 | { | |
1357 | something_changed = true; | |
1358 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1359 | { | |
1360 | fprintf (dump_file, "Deleting LHS of call: "); | |
1361 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1362 | fprintf (dump_file, "\n"); | |
1363 | } | |
1364 | ||
1365 | gimple_call_set_lhs (stmt, NULL_TREE); | |
1366 | maybe_clean_or_replace_eh_stmt (stmt, stmt); | |
1367 | update_stmt (stmt); | |
1368 | release_ssa_name (name); | |
1369 | } | |
1370 | } | |
1371 | } | |
1372 | } | |
1373 | ||
1374 | VEC_free (basic_block, heap, h); | |
1375 | ||
1376 | /* Since we don't track liveness of virtual PHI nodes, it is possible that we | |
1377 | rendered some PHI nodes unreachable while they are still in use. | |
1378 | Mark them for renaming. */ | |
1379 | if (cfg_altered) | |
1380 | { | |
1381 | basic_block prev_bb; | |
1382 | ||
1383 | find_unreachable_blocks (); | |
1384 | ||
1385 | /* Delete all unreachable basic blocks in reverse dominator order. */ | |
1386 | for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb) | |
1387 | { | |
1388 | prev_bb = bb->prev_bb; | |
1389 | ||
1390 | if (!TEST_BIT (bb_contains_live_stmts, bb->index) | |
1391 | || !(bb->flags & BB_REACHABLE)) | |
1392 | { | |
1393 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1394 | if (!is_gimple_reg (gimple_phi_result (gsi_stmt (gsi)))) | |
1395 | { | |
1396 | bool found = false; | |
1397 | imm_use_iterator iter; | |
1398 | ||
1399 | FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi))) | |
1400 | { | |
1401 | if (!(gimple_bb (stmt)->flags & BB_REACHABLE)) | |
1402 | continue; | |
1403 | if (gimple_code (stmt) == GIMPLE_PHI | |
1404 | || gimple_plf (stmt, STMT_NECESSARY)) | |
1405 | { | |
1406 | found = true; | |
1407 | BREAK_FROM_IMM_USE_STMT (iter); | |
1408 | } | |
1409 | } | |
1410 | if (found) | |
1411 | mark_virtual_phi_result_for_renaming (gsi_stmt (gsi)); | |
1412 | } | |
1413 | ||
1414 | if (!(bb->flags & BB_REACHABLE)) | |
1415 | { | |
1416 | /* Speed up the removal of blocks that don't | |
1417 | dominate others. Walking backwards, this should | |
1418 | be the common case. ??? Do we need to recompute | |
1419 | dominators because of cfg_altered? */ | |
1420 | if (!MAY_HAVE_DEBUG_STMTS | |
1421 | || !first_dom_son (CDI_DOMINATORS, bb)) | |
1422 | delete_basic_block (bb); | |
1423 | else | |
1424 | { | |
1425 | h = get_all_dominated_blocks (CDI_DOMINATORS, bb); | |
1426 | ||
1427 | while (VEC_length (basic_block, h)) | |
1428 | { | |
1429 | bb = VEC_pop (basic_block, h); | |
1430 | prev_bb = bb->prev_bb; | |
1431 | /* Rearrangements to the CFG may have failed | |
1432 | to update the dominators tree, so that | |
1433 | formerly-dominated blocks are now | |
1434 | otherwise reachable. */ | |
1435 | if (!!(bb->flags & BB_REACHABLE)) | |
1436 | continue; | |
1437 | delete_basic_block (bb); | |
1438 | } | |
1439 | ||
1440 | VEC_free (basic_block, heap, h); | |
1441 | } | |
1442 | } | |
1443 | } | |
1444 | } | |
1445 | } | |
1446 | FOR_EACH_BB (bb) | |
1447 | { | |
1448 | /* Remove dead PHI nodes. */ | |
1449 | something_changed |= remove_dead_phis (bb); | |
1450 | } | |
1451 | ||
1452 | return something_changed; | |
1453 | } | |
1454 | ||
1455 | ||
1456 | /* Print out removed statement statistics. */ | |
1457 | ||
1458 | static void | |
1459 | print_stats (void) | |
1460 | { | |
1461 | float percg; | |
1462 | ||
1463 | percg = ((float) stats.removed / (float) stats.total) * 100; | |
1464 | fprintf (dump_file, "Removed %d of %d statements (%d%%)\n", | |
1465 | stats.removed, stats.total, (int) percg); | |
1466 | ||
1467 | if (stats.total_phis == 0) | |
1468 | percg = 0; | |
1469 | else | |
1470 | percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100; | |
1471 | ||
1472 | fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n", | |
1473 | stats.removed_phis, stats.total_phis, (int) percg); | |
1474 | } | |
1475 | ||
1476 | /* Initialization for this pass. Set up the used data structures. */ | |
1477 | ||
1478 | static void | |
1479 | tree_dce_init (bool aggressive) | |
1480 | { | |
1481 | memset ((void *) &stats, 0, sizeof (stats)); | |
1482 | ||
1483 | if (aggressive) | |
1484 | { | |
1485 | int i; | |
1486 | ||
1487 | control_dependence_map = XNEWVEC (bitmap, last_basic_block); | |
1488 | for (i = 0; i < last_basic_block; ++i) | |
1489 | control_dependence_map[i] = BITMAP_ALLOC (NULL); | |
1490 | ||
1491 | last_stmt_necessary = sbitmap_alloc (last_basic_block); | |
1492 | sbitmap_zero (last_stmt_necessary); | |
1493 | bb_contains_live_stmts = sbitmap_alloc (last_basic_block); | |
1494 | sbitmap_zero (bb_contains_live_stmts); | |
1495 | } | |
1496 | ||
1497 | processed = sbitmap_alloc (num_ssa_names + 1); | |
1498 | sbitmap_zero (processed); | |
1499 | ||
1500 | worklist = VEC_alloc (gimple, heap, 64); | |
1501 | cfg_altered = false; | |
1502 | } | |
1503 | ||
1504 | /* Cleanup after this pass. */ | |
1505 | ||
1506 | static void | |
1507 | tree_dce_done (bool aggressive) | |
1508 | { | |
1509 | if (aggressive) | |
1510 | { | |
1511 | int i; | |
1512 | ||
1513 | for (i = 0; i < last_basic_block; ++i) | |
1514 | BITMAP_FREE (control_dependence_map[i]); | |
1515 | free (control_dependence_map); | |
1516 | ||
1517 | sbitmap_free (visited_control_parents); | |
1518 | sbitmap_free (last_stmt_necessary); | |
1519 | sbitmap_free (bb_contains_live_stmts); | |
1520 | bb_contains_live_stmts = NULL; | |
1521 | } | |
1522 | ||
1523 | sbitmap_free (processed); | |
1524 | ||
1525 | VEC_free (gimple, heap, worklist); | |
1526 | } | |
1527 | ||
1528 | /* Main routine to eliminate dead code. | |
1529 | ||
1530 | AGGRESSIVE controls the aggressiveness of the algorithm. | |
1531 | In conservative mode, we ignore control dependence and simply declare | |
1532 | all but the most trivially dead branches necessary. This mode is fast. | |
1533 | In aggressive mode, control dependences are taken into account, which | |
1534 | results in more dead code elimination, but at the cost of some time. | |
1535 | ||
1536 | FIXME: Aggressive mode before PRE doesn't work currently because | |
1537 | the dominance info is not invalidated after DCE1. This is | |
1538 | not an issue right now because we only run aggressive DCE | |
1539 | as the last tree SSA pass, but keep this in mind when you | |
1540 | start experimenting with pass ordering. */ | |
1541 | ||
1542 | static unsigned int | |
1543 | perform_tree_ssa_dce (bool aggressive) | |
1544 | { | |
1545 | struct edge_list *el = NULL; | |
1546 | bool something_changed = 0; | |
1547 | ||
1548 | calculate_dominance_info (CDI_DOMINATORS); | |
1549 | ||
1550 | /* Preheaders are needed for SCEV to work. | |
1551 | Simple lateches and recorded exits improve chances that loop will | |
1552 | proved to be finite in testcases such as in loop-15.c and loop-24.c */ | |
1553 | if (aggressive) | |
1554 | loop_optimizer_init (LOOPS_NORMAL | |
1555 | | LOOPS_HAVE_RECORDED_EXITS); | |
1556 | ||
1557 | tree_dce_init (aggressive); | |
1558 | ||
1559 | if (aggressive) | |
1560 | { | |
1561 | /* Compute control dependence. */ | |
1562 | timevar_push (TV_CONTROL_DEPENDENCES); | |
1563 | calculate_dominance_info (CDI_POST_DOMINATORS); | |
1564 | el = create_edge_list (); | |
1565 | find_all_control_dependences (el); | |
1566 | timevar_pop (TV_CONTROL_DEPENDENCES); | |
1567 | ||
1568 | visited_control_parents = sbitmap_alloc (last_basic_block); | |
1569 | sbitmap_zero (visited_control_parents); | |
1570 | ||
1571 | mark_dfs_back_edges (); | |
1572 | } | |
1573 | ||
1574 | find_obviously_necessary_stmts (el); | |
1575 | ||
1576 | if (aggressive) | |
1577 | loop_optimizer_finalize (); | |
1578 | ||
1579 | longest_chain = 0; | |
1580 | total_chain = 0; | |
1581 | nr_walks = 0; | |
1582 | chain_ovfl = false; | |
1583 | visited = BITMAP_ALLOC (NULL); | |
1584 | propagate_necessity (el); | |
1585 | BITMAP_FREE (visited); | |
1586 | ||
1587 | something_changed |= eliminate_unnecessary_stmts (); | |
1588 | something_changed |= cfg_altered; | |
1589 | ||
1590 | /* We do not update postdominators, so free them unconditionally. */ | |
1591 | free_dominance_info (CDI_POST_DOMINATORS); | |
1592 | ||
1593 | /* If we removed paths in the CFG, then we need to update | |
1594 | dominators as well. I haven't investigated the possibility | |
1595 | of incrementally updating dominators. */ | |
1596 | if (cfg_altered) | |
1597 | free_dominance_info (CDI_DOMINATORS); | |
1598 | ||
1599 | statistics_counter_event (cfun, "Statements deleted", stats.removed); | |
1600 | statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis); | |
1601 | ||
1602 | /* Debugging dumps. */ | |
1603 | if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS))) | |
1604 | print_stats (); | |
1605 | ||
1606 | tree_dce_done (aggressive); | |
1607 | ||
1608 | free_edge_list (el); | |
1609 | ||
1610 | if (something_changed) | |
1611 | return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect | |
1612 | | TODO_remove_unused_locals); | |
1613 | else | |
1614 | return 0; | |
1615 | } | |
1616 | ||
1617 | /* Pass entry points. */ | |
1618 | static unsigned int | |
1619 | tree_ssa_dce (void) | |
1620 | { | |
1621 | return perform_tree_ssa_dce (/*aggressive=*/false); | |
1622 | } | |
1623 | ||
1624 | static unsigned int | |
1625 | tree_ssa_dce_loop (void) | |
1626 | { | |
1627 | unsigned int todo; | |
1628 | todo = perform_tree_ssa_dce (/*aggressive=*/false); | |
1629 | if (todo) | |
1630 | { | |
1631 | free_numbers_of_iterations_estimates (); | |
1632 | scev_reset (); | |
1633 | } | |
1634 | return todo; | |
1635 | } | |
1636 | ||
1637 | static unsigned int | |
1638 | tree_ssa_cd_dce (void) | |
1639 | { | |
1640 | return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2); | |
1641 | } | |
1642 | ||
1643 | static bool | |
1644 | gate_dce (void) | |
1645 | { | |
1646 | return flag_tree_dce != 0; | |
1647 | } | |
1648 | ||
1649 | struct gimple_opt_pass pass_dce = | |
1650 | { | |
1651 | { | |
1652 | GIMPLE_PASS, | |
1653 | "dce", /* name */ | |
1654 | gate_dce, /* gate */ | |
1655 | tree_ssa_dce, /* execute */ | |
1656 | NULL, /* sub */ | |
1657 | NULL, /* next */ | |
1658 | 0, /* static_pass_number */ | |
1659 | TV_TREE_DCE, /* tv_id */ | |
1660 | PROP_cfg | PROP_ssa, /* properties_required */ | |
1661 | 0, /* properties_provided */ | |
1662 | 0, /* properties_destroyed */ | |
1663 | 0, /* todo_flags_start */ | |
1664 | TODO_verify_ssa /* todo_flags_finish */ | |
1665 | } | |
1666 | }; | |
1667 | ||
1668 | struct gimple_opt_pass pass_dce_loop = | |
1669 | { | |
1670 | { | |
1671 | GIMPLE_PASS, | |
1672 | "dceloop", /* name */ | |
1673 | gate_dce, /* gate */ | |
1674 | tree_ssa_dce_loop, /* execute */ | |
1675 | NULL, /* sub */ | |
1676 | NULL, /* next */ | |
1677 | 0, /* static_pass_number */ | |
1678 | TV_TREE_DCE, /* tv_id */ | |
1679 | PROP_cfg | PROP_ssa, /* properties_required */ | |
1680 | 0, /* properties_provided */ | |
1681 | 0, /* properties_destroyed */ | |
1682 | 0, /* todo_flags_start */ | |
1683 | TODO_verify_ssa /* todo_flags_finish */ | |
1684 | } | |
1685 | }; | |
1686 | ||
1687 | struct gimple_opt_pass pass_cd_dce = | |
1688 | { | |
1689 | { | |
1690 | GIMPLE_PASS, | |
1691 | "cddce", /* name */ | |
1692 | gate_dce, /* gate */ | |
1693 | tree_ssa_cd_dce, /* execute */ | |
1694 | NULL, /* sub */ | |
1695 | NULL, /* next */ | |
1696 | 0, /* static_pass_number */ | |
1697 | TV_TREE_CD_DCE, /* tv_id */ | |
1698 | PROP_cfg | PROP_ssa, /* properties_required */ | |
1699 | 0, /* properties_provided */ | |
1700 | 0, /* properties_destroyed */ | |
1701 | 0, /* todo_flags_start */ | |
1702 | TODO_verify_ssa | |
1703 | | TODO_verify_flow /* todo_flags_finish */ | |
1704 | } | |
1705 | }; |