Merge branch 'vendor/GCC47'
[dragonfly.git] / contrib / gcc-4.7 / gcc / tree-cfg.c
CommitLineData
e4b17023
JM
1/* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 3, or (at your option)
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "output.h"
30#include "flags.h"
31#include "function.h"
32#include "ggc.h"
33#include "langhooks.h"
34#include "tree-pretty-print.h"
35#include "gimple-pretty-print.h"
36#include "tree-flow.h"
37#include "timevar.h"
38#include "tree-dump.h"
39#include "tree-pass.h"
40#include "diagnostic-core.h"
41#include "except.h"
42#include "cfgloop.h"
43#include "cfglayout.h"
44#include "tree-ssa-propagate.h"
45#include "value-prof.h"
46#include "pointer-set.h"
47#include "tree-inline.h"
48
49/* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
51
52/* Local declarations. */
53
54/* Initial capacity for the basic block array. */
55static const int initial_cfg_capacity = 20;
56
57/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
61
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
64
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
69
70static struct pointer_map_t *edge_to_cases;
71
72/* If we record edge_to_cases, this bitmap will hold indexes
73 of basic blocks that end in a GIMPLE_SWITCH which we touched
74 due to edge manipulations. */
75
76static bitmap touched_switch_bbs;
77
78/* CFG statistics. */
79struct cfg_stats_d
80{
81 long num_merged_labels;
82};
83
84static struct cfg_stats_d cfg_stats;
85
86/* Nonzero if we found a computed goto while building basic blocks. */
87static bool found_computed_goto;
88
89/* Hash table to store last discriminator assigned for each locus. */
90struct locus_discrim_map
91{
92 location_t locus;
93 int discriminator;
94};
95static htab_t discriminator_per_locus;
96
97/* Basic blocks and flowgraphs. */
98static void make_blocks (gimple_seq);
99static void factor_computed_gotos (void);
100
101/* Edges. */
102static void make_edges (void);
103static void make_cond_expr_edges (basic_block);
104static void make_gimple_switch_edges (basic_block);
105static void make_goto_expr_edges (basic_block);
106static void make_gimple_asm_edges (basic_block);
107static unsigned int locus_map_hash (const void *);
108static int locus_map_eq (const void *, const void *);
109static void assign_discriminator (location_t, basic_block);
110static edge gimple_redirect_edge_and_branch (edge, basic_block);
111static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
e4b17023
JM
112
113/* Various helpers. */
114static inline bool stmt_starts_bb_p (gimple, gimple);
115static int gimple_verify_flow_info (void);
116static void gimple_make_forwarder_block (edge);
117static void gimple_cfg2vcg (FILE *);
118static gimple first_non_label_stmt (basic_block);
119static bool verify_gimple_transaction (gimple);
120
121/* Flowgraph optimization and cleanup. */
122static void gimple_merge_blocks (basic_block, basic_block);
123static bool gimple_can_merge_blocks_p (basic_block, basic_block);
124static void remove_bb (basic_block);
125static edge find_taken_edge_computed_goto (basic_block, tree);
126static edge find_taken_edge_cond_expr (basic_block, tree);
127static edge find_taken_edge_switch_expr (basic_block, tree);
128static tree find_case_label_for_value (gimple, tree);
129static void group_case_labels_stmt (gimple);
130
131void
132init_empty_tree_cfg_for_function (struct function *fn)
133{
134 /* Initialize the basic block array. */
135 init_flow (fn);
136 profile_status_for_function (fn) = PROFILE_ABSENT;
137 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
138 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
139 basic_block_info_for_function (fn)
140 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow_cleared (basic_block, gc,
142 basic_block_info_for_function (fn),
143 initial_cfg_capacity);
144
145 /* Build a mapping of labels to their associated blocks. */
146 label_to_block_map_for_function (fn)
147 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
148 VEC_safe_grow_cleared (basic_block, gc,
149 label_to_block_map_for_function (fn),
150 initial_cfg_capacity);
151
152 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
153 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
154 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
155 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
156
157 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
158 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
159 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
160 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
161}
162
163void
164init_empty_tree_cfg (void)
165{
166 init_empty_tree_cfg_for_function (cfun);
167}
168
169/*---------------------------------------------------------------------------
170 Create basic blocks
171---------------------------------------------------------------------------*/
172
173/* Entry point to the CFG builder for trees. SEQ is the sequence of
174 statements to be added to the flowgraph. */
175
176static void
177build_gimple_cfg (gimple_seq seq)
178{
179 /* Register specific gimple functions. */
180 gimple_register_cfg_hooks ();
181
182 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
183
184 init_empty_tree_cfg ();
185
186 found_computed_goto = 0;
187 make_blocks (seq);
188
189 /* Computed gotos are hell to deal with, especially if there are
190 lots of them with a large number of destinations. So we factor
191 them to a common computed goto location before we build the
192 edge list. After we convert back to normal form, we will un-factor
193 the computed gotos since factoring introduces an unwanted jump. */
194 if (found_computed_goto)
195 factor_computed_gotos ();
196
197 /* Make sure there is always at least one block, even if it's empty. */
198 if (n_basic_blocks == NUM_FIXED_BLOCKS)
199 create_empty_bb (ENTRY_BLOCK_PTR);
200
201 /* Adjust the size of the array. */
202 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
203 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
204
205 /* To speed up statement iterator walks, we first purge dead labels. */
206 cleanup_dead_labels ();
207
208 /* Group case nodes to reduce the number of edges.
209 We do this after cleaning up dead labels because otherwise we miss
210 a lot of obvious case merging opportunities. */
211 group_case_labels ();
212
213 /* Create the edges of the flowgraph. */
214 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
215 free);
216 make_edges ();
217 cleanup_dead_labels ();
218 htab_delete (discriminator_per_locus);
219
220 /* Debugging dumps. */
221
222 /* Write the flowgraph to a VCG file. */
223 {
224 int local_dump_flags;
225 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
226 if (vcg_file)
227 {
228 gimple_cfg2vcg (vcg_file);
229 dump_end (TDI_vcg, vcg_file);
230 }
231 }
232}
233
234static unsigned int
235execute_build_cfg (void)
236{
237 gimple_seq body = gimple_body (current_function_decl);
238
239 build_gimple_cfg (body);
240 gimple_set_body (current_function_decl, NULL);
241 if (dump_file && (dump_flags & TDF_DETAILS))
242 {
243 fprintf (dump_file, "Scope blocks:\n");
244 dump_scope_blocks (dump_file, dump_flags);
245 }
246 return 0;
247}
248
249struct gimple_opt_pass pass_build_cfg =
250{
251 {
252 GIMPLE_PASS,
253 "cfg", /* name */
254 NULL, /* gate */
255 execute_build_cfg, /* execute */
256 NULL, /* sub */
257 NULL, /* next */
258 0, /* static_pass_number */
259 TV_TREE_CFG, /* tv_id */
260 PROP_gimple_leh, /* properties_required */
261 PROP_cfg, /* properties_provided */
262 0, /* properties_destroyed */
263 0, /* todo_flags_start */
264 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
265 }
266};
267
268
269/* Return true if T is a computed goto. */
270
271static bool
272computed_goto_p (gimple t)
273{
274 return (gimple_code (t) == GIMPLE_GOTO
275 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
276}
277
278
279/* Search the CFG for any computed gotos. If found, factor them to a
280 common computed goto site. Also record the location of that site so
281 that we can un-factor the gotos after we have converted back to
282 normal form. */
283
284static void
285factor_computed_gotos (void)
286{
287 basic_block bb;
288 tree factored_label_decl = NULL;
289 tree var = NULL;
290 gimple factored_computed_goto_label = NULL;
291 gimple factored_computed_goto = NULL;
292
293 /* We know there are one or more computed gotos in this function.
294 Examine the last statement in each basic block to see if the block
295 ends with a computed goto. */
296
297 FOR_EACH_BB (bb)
298 {
299 gimple_stmt_iterator gsi = gsi_last_bb (bb);
300 gimple last;
301
302 if (gsi_end_p (gsi))
303 continue;
304
305 last = gsi_stmt (gsi);
306
307 /* Ignore the computed goto we create when we factor the original
308 computed gotos. */
309 if (last == factored_computed_goto)
310 continue;
311
312 /* If the last statement is a computed goto, factor it. */
313 if (computed_goto_p (last))
314 {
315 gimple assignment;
316
317 /* The first time we find a computed goto we need to create
318 the factored goto block and the variable each original
319 computed goto will use for their goto destination. */
320 if (!factored_computed_goto)
321 {
322 basic_block new_bb = create_empty_bb (bb);
323 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
324
325 /* Create the destination of the factored goto. Each original
326 computed goto will put its desired destination into this
327 variable and jump to the label we create immediately
328 below. */
329 var = create_tmp_var (ptr_type_node, "gotovar");
330
331 /* Build a label for the new block which will contain the
332 factored computed goto. */
333 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
334 factored_computed_goto_label
335 = gimple_build_label (factored_label_decl);
336 gsi_insert_after (&new_gsi, factored_computed_goto_label,
337 GSI_NEW_STMT);
338
339 /* Build our new computed goto. */
340 factored_computed_goto = gimple_build_goto (var);
341 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
342 }
343
344 /* Copy the original computed goto's destination into VAR. */
345 assignment = gimple_build_assign (var, gimple_goto_dest (last));
346 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
347
348 /* And re-vector the computed goto to the new destination. */
349 gimple_goto_set_dest (last, factored_label_decl);
350 }
351 }
352}
353
354
355/* Build a flowgraph for the sequence of stmts SEQ. */
356
357static void
358make_blocks (gimple_seq seq)
359{
360 gimple_stmt_iterator i = gsi_start (seq);
361 gimple stmt = NULL;
362 bool start_new_block = true;
363 bool first_stmt_of_seq = true;
364 basic_block bb = ENTRY_BLOCK_PTR;
365
366 while (!gsi_end_p (i))
367 {
368 gimple prev_stmt;
369
370 prev_stmt = stmt;
371 stmt = gsi_stmt (i);
372
373 /* If the statement starts a new basic block or if we have determined
374 in a previous pass that we need to create a new block for STMT, do
375 so now. */
376 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
377 {
378 if (!first_stmt_of_seq)
379 seq = gsi_split_seq_before (&i);
380 bb = create_basic_block (seq, NULL, bb);
381 start_new_block = false;
382 }
383
384 /* Now add STMT to BB and create the subgraphs for special statement
385 codes. */
386 gimple_set_bb (stmt, bb);
387
388 if (computed_goto_p (stmt))
389 found_computed_goto = true;
390
391 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
392 next iteration. */
393 if (stmt_ends_bb_p (stmt))
394 {
395 /* If the stmt can make abnormal goto use a new temporary
396 for the assignment to the LHS. This makes sure the old value
397 of the LHS is available on the abnormal edge. Otherwise
398 we will end up with overlapping life-ranges for abnormal
399 SSA names. */
400 if (gimple_has_lhs (stmt)
401 && stmt_can_make_abnormal_goto (stmt)
402 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
403 {
404 tree lhs = gimple_get_lhs (stmt);
405 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
406 gimple s = gimple_build_assign (lhs, tmp);
407 gimple_set_location (s, gimple_location (stmt));
408 gimple_set_block (s, gimple_block (stmt));
409 gimple_set_lhs (stmt, tmp);
410 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
411 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
412 DECL_GIMPLE_REG_P (tmp) = 1;
413 gsi_insert_after (&i, s, GSI_SAME_STMT);
414 }
415 start_new_block = true;
416 }
417
418 gsi_next (&i);
419 first_stmt_of_seq = false;
420 }
421}
422
423
424/* Create and return a new empty basic block after bb AFTER. */
425
426static basic_block
427create_bb (void *h, void *e, basic_block after)
428{
429 basic_block bb;
430
431 gcc_assert (!e);
432
433 /* Create and initialize a new basic block. Since alloc_block uses
434 GC allocation that clears memory to allocate a basic block, we do
435 not have to clear the newly allocated basic block here. */
436 bb = alloc_block ();
437
438 bb->index = last_basic_block;
439 bb->flags = BB_NEW;
440 bb->il.gimple = ggc_alloc_cleared_gimple_bb_info ();
441 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
442
443 /* Add the new block to the linked list of blocks. */
444 link_block (bb, after);
445
446 /* Grow the basic block array if needed. */
447 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
448 {
449 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
450 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
451 }
452
453 /* Add the newly created block to the array. */
454 SET_BASIC_BLOCK (last_basic_block, bb);
455
456 n_basic_blocks++;
457 last_basic_block++;
458
459 return bb;
460}
461
462
463/*---------------------------------------------------------------------------
464 Edge creation
465---------------------------------------------------------------------------*/
466
467/* Fold COND_EXPR_COND of each COND_EXPR. */
468
469void
470fold_cond_expr_cond (void)
471{
472 basic_block bb;
473
474 FOR_EACH_BB (bb)
475 {
476 gimple stmt = last_stmt (bb);
477
478 if (stmt && gimple_code (stmt) == GIMPLE_COND)
479 {
480 location_t loc = gimple_location (stmt);
481 tree cond;
482 bool zerop, onep;
483
484 fold_defer_overflow_warnings ();
485 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
486 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
487 if (cond)
488 {
489 zerop = integer_zerop (cond);
490 onep = integer_onep (cond);
491 }
492 else
493 zerop = onep = false;
494
495 fold_undefer_overflow_warnings (zerop || onep,
496 stmt,
497 WARN_STRICT_OVERFLOW_CONDITIONAL);
498 if (zerop)
499 gimple_cond_make_false (stmt);
500 else if (onep)
501 gimple_cond_make_true (stmt);
502 }
503 }
504}
505
506/* Join all the blocks in the flowgraph. */
507
508static void
509make_edges (void)
510{
511 basic_block bb;
512 struct omp_region *cur_region = NULL;
513
514 /* Create an edge from entry to the first block with executable
515 statements in it. */
516 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
517
518 /* Traverse the basic block array placing edges. */
519 FOR_EACH_BB (bb)
520 {
521 gimple last = last_stmt (bb);
522 bool fallthru;
523
524 if (last)
525 {
526 enum gimple_code code = gimple_code (last);
527 switch (code)
528 {
529 case GIMPLE_GOTO:
530 make_goto_expr_edges (bb);
531 fallthru = false;
532 break;
533 case GIMPLE_RETURN:
534 make_edge (bb, EXIT_BLOCK_PTR, 0);
535 fallthru = false;
536 break;
537 case GIMPLE_COND:
538 make_cond_expr_edges (bb);
539 fallthru = false;
540 break;
541 case GIMPLE_SWITCH:
542 make_gimple_switch_edges (bb);
543 fallthru = false;
544 break;
545 case GIMPLE_RESX:
546 make_eh_edges (last);
547 fallthru = false;
548 break;
549 case GIMPLE_EH_DISPATCH:
550 fallthru = make_eh_dispatch_edges (last);
551 break;
552
553 case GIMPLE_CALL:
554 /* If this function receives a nonlocal goto, then we need to
555 make edges from this call site to all the nonlocal goto
556 handlers. */
557 if (stmt_can_make_abnormal_goto (last))
558 make_abnormal_goto_edges (bb, true);
559
560 /* If this statement has reachable exception handlers, then
561 create abnormal edges to them. */
562 make_eh_edges (last);
563
564 /* BUILTIN_RETURN is really a return statement. */
565 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
566 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
567 /* Some calls are known not to return. */
568 else
569 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
570 break;
571
572 case GIMPLE_ASSIGN:
573 /* A GIMPLE_ASSIGN may throw internally and thus be considered
574 control-altering. */
575 if (is_ctrl_altering_stmt (last))
576 make_eh_edges (last);
577 fallthru = true;
578 break;
579
580 case GIMPLE_ASM:
581 make_gimple_asm_edges (bb);
582 fallthru = true;
583 break;
584
585 case GIMPLE_OMP_PARALLEL:
586 case GIMPLE_OMP_TASK:
587 case GIMPLE_OMP_FOR:
588 case GIMPLE_OMP_SINGLE:
589 case GIMPLE_OMP_MASTER:
590 case GIMPLE_OMP_ORDERED:
591 case GIMPLE_OMP_CRITICAL:
592 case GIMPLE_OMP_SECTION:
593 cur_region = new_omp_region (bb, code, cur_region);
594 fallthru = true;
595 break;
596
597 case GIMPLE_OMP_SECTIONS:
598 cur_region = new_omp_region (bb, code, cur_region);
599 fallthru = true;
600 break;
601
602 case GIMPLE_OMP_SECTIONS_SWITCH:
603 fallthru = false;
604 break;
605
606 case GIMPLE_OMP_ATOMIC_LOAD:
607 case GIMPLE_OMP_ATOMIC_STORE:
608 fallthru = true;
609 break;
610
611 case GIMPLE_OMP_RETURN:
612 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
613 somewhere other than the next block. This will be
614 created later. */
615 cur_region->exit = bb;
616 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
617 cur_region = cur_region->outer;
618 break;
619
620 case GIMPLE_OMP_CONTINUE:
621 cur_region->cont = bb;
622 switch (cur_region->type)
623 {
624 case GIMPLE_OMP_FOR:
625 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
626 succs edges as abnormal to prevent splitting
627 them. */
628 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
629 /* Make the loopback edge. */
630 make_edge (bb, single_succ (cur_region->entry),
631 EDGE_ABNORMAL);
632
633 /* Create an edge from GIMPLE_OMP_FOR to exit, which
634 corresponds to the case that the body of the loop
635 is not executed at all. */
636 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
637 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
638 fallthru = false;
639 break;
640
641 case GIMPLE_OMP_SECTIONS:
642 /* Wire up the edges into and out of the nested sections. */
643 {
644 basic_block switch_bb = single_succ (cur_region->entry);
645
646 struct omp_region *i;
647 for (i = cur_region->inner; i ; i = i->next)
648 {
649 gcc_assert (i->type == GIMPLE_OMP_SECTION);
650 make_edge (switch_bb, i->entry, 0);
651 make_edge (i->exit, bb, EDGE_FALLTHRU);
652 }
653
654 /* Make the loopback edge to the block with
655 GIMPLE_OMP_SECTIONS_SWITCH. */
656 make_edge (bb, switch_bb, 0);
657
658 /* Make the edge from the switch to exit. */
659 make_edge (switch_bb, bb->next_bb, 0);
660 fallthru = false;
661 }
662 break;
663
664 default:
665 gcc_unreachable ();
666 }
667 break;
668
669 case GIMPLE_TRANSACTION:
670 {
671 tree abort_label = gimple_transaction_label (last);
672 if (abort_label)
673 make_edge (bb, label_to_block (abort_label), 0);
674 fallthru = true;
675 }
676 break;
677
678 default:
679 gcc_assert (!stmt_ends_bb_p (last));
680 fallthru = true;
681 }
682 }
683 else
684 fallthru = true;
685
686 if (fallthru)
687 {
688 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
689 if (last)
690 assign_discriminator (gimple_location (last), bb->next_bb);
691 }
692 }
693
694 if (root_omp_region)
695 free_omp_regions ();
696
697 /* Fold COND_EXPR_COND of each COND_EXPR. */
698 fold_cond_expr_cond ();
699}
700
701/* Trivial hash function for a location_t. ITEM is a pointer to
702 a hash table entry that maps a location_t to a discriminator. */
703
704static unsigned int
705locus_map_hash (const void *item)
706{
707 return ((const struct locus_discrim_map *) item)->locus;
708}
709
710/* Equality function for the locus-to-discriminator map. VA and VB
711 point to the two hash table entries to compare. */
712
713static int
714locus_map_eq (const void *va, const void *vb)
715{
716 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
717 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
718 return a->locus == b->locus;
719}
720
721/* Find the next available discriminator value for LOCUS. The
722 discriminator distinguishes among several basic blocks that
723 share a common locus, allowing for more accurate sample-based
724 profiling. */
725
726static int
727next_discriminator_for_locus (location_t locus)
728{
729 struct locus_discrim_map item;
730 struct locus_discrim_map **slot;
731
732 item.locus = locus;
733 item.discriminator = 0;
734 slot = (struct locus_discrim_map **)
735 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
736 (hashval_t) locus, INSERT);
737 gcc_assert (slot);
738 if (*slot == HTAB_EMPTY_ENTRY)
739 {
740 *slot = XNEW (struct locus_discrim_map);
741 gcc_assert (*slot);
742 (*slot)->locus = locus;
743 (*slot)->discriminator = 0;
744 }
745 (*slot)->discriminator++;
746 return (*slot)->discriminator;
747}
748
749/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
750
751static bool
752same_line_p (location_t locus1, location_t locus2)
753{
754 expanded_location from, to;
755
756 if (locus1 == locus2)
757 return true;
758
759 from = expand_location (locus1);
760 to = expand_location (locus2);
761
762 if (from.line != to.line)
763 return false;
764 if (from.file == to.file)
765 return true;
766 return (from.file != NULL
767 && to.file != NULL
768 && filename_cmp (from.file, to.file) == 0);
769}
770
771/* Assign a unique discriminator value to block BB if it begins at the same
772 LOCUS as its predecessor block. */
773
774static void
775assign_discriminator (location_t locus, basic_block bb)
776{
777 gimple first_in_to_bb, last_in_to_bb;
778
779 if (locus == 0 || bb->discriminator != 0)
780 return;
781
782 first_in_to_bb = first_non_label_stmt (bb);
783 last_in_to_bb = last_stmt (bb);
784 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
785 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
786 bb->discriminator = next_discriminator_for_locus (locus);
787}
788
789/* Create the edges for a GIMPLE_COND starting at block BB. */
790
791static void
792make_cond_expr_edges (basic_block bb)
793{
794 gimple entry = last_stmt (bb);
795 gimple then_stmt, else_stmt;
796 basic_block then_bb, else_bb;
797 tree then_label, else_label;
798 edge e;
799 location_t entry_locus;
800
801 gcc_assert (entry);
802 gcc_assert (gimple_code (entry) == GIMPLE_COND);
803
804 entry_locus = gimple_location (entry);
805
806 /* Entry basic blocks for each component. */
807 then_label = gimple_cond_true_label (entry);
808 else_label = gimple_cond_false_label (entry);
809 then_bb = label_to_block (then_label);
810 else_bb = label_to_block (else_label);
811 then_stmt = first_stmt (then_bb);
812 else_stmt = first_stmt (else_bb);
813
814 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
815 assign_discriminator (entry_locus, then_bb);
816 e->goto_locus = gimple_location (then_stmt);
817 if (e->goto_locus)
818 e->goto_block = gimple_block (then_stmt);
819 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
820 if (e)
821 {
822 assign_discriminator (entry_locus, else_bb);
823 e->goto_locus = gimple_location (else_stmt);
824 if (e->goto_locus)
825 e->goto_block = gimple_block (else_stmt);
826 }
827
828 /* We do not need the labels anymore. */
829 gimple_cond_set_true_label (entry, NULL_TREE);
830 gimple_cond_set_false_label (entry, NULL_TREE);
831}
832
833
834/* Called for each element in the hash table (P) as we delete the
835 edge to cases hash table.
836
837 Clear all the TREE_CHAINs to prevent problems with copying of
838 SWITCH_EXPRs and structure sharing rules, then free the hash table
839 element. */
840
841static bool
842edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
843 void *data ATTRIBUTE_UNUSED)
844{
845 tree t, next;
846
847 for (t = (tree) *value; t; t = next)
848 {
849 next = CASE_CHAIN (t);
850 CASE_CHAIN (t) = NULL;
851 }
852
853 *value = NULL;
854 return true;
855}
856
857/* Start recording information mapping edges to case labels. */
858
859void
860start_recording_case_labels (void)
861{
862 gcc_assert (edge_to_cases == NULL);
863 edge_to_cases = pointer_map_create ();
864 touched_switch_bbs = BITMAP_ALLOC (NULL);
865}
866
867/* Return nonzero if we are recording information for case labels. */
868
869static bool
870recording_case_labels_p (void)
871{
872 return (edge_to_cases != NULL);
873}
874
875/* Stop recording information mapping edges to case labels and
876 remove any information we have recorded. */
877void
878end_recording_case_labels (void)
879{
880 bitmap_iterator bi;
881 unsigned i;
882 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
883 pointer_map_destroy (edge_to_cases);
884 edge_to_cases = NULL;
885 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
886 {
887 basic_block bb = BASIC_BLOCK (i);
888 if (bb)
889 {
890 gimple stmt = last_stmt (bb);
891 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
892 group_case_labels_stmt (stmt);
893 }
894 }
895 BITMAP_FREE (touched_switch_bbs);
896}
897
898/* If we are inside a {start,end}_recording_cases block, then return
899 a chain of CASE_LABEL_EXPRs from T which reference E.
900
901 Otherwise return NULL. */
902
903static tree
904get_cases_for_edge (edge e, gimple t)
905{
906 void **slot;
907 size_t i, n;
908
909 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
910 chains available. Return NULL so the caller can detect this case. */
911 if (!recording_case_labels_p ())
912 return NULL;
913
914 slot = pointer_map_contains (edge_to_cases, e);
915 if (slot)
916 return (tree) *slot;
917
918 /* If we did not find E in the hash table, then this must be the first
919 time we have been queried for information about E & T. Add all the
920 elements from T to the hash table then perform the query again. */
921
922 n = gimple_switch_num_labels (t);
923 for (i = 0; i < n; i++)
924 {
925 tree elt = gimple_switch_label (t, i);
926 tree lab = CASE_LABEL (elt);
927 basic_block label_bb = label_to_block (lab);
928 edge this_edge = find_edge (e->src, label_bb);
929
930 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
931 a new chain. */
932 slot = pointer_map_insert (edge_to_cases, this_edge);
933 CASE_CHAIN (elt) = (tree) *slot;
934 *slot = elt;
935 }
936
937 return (tree) *pointer_map_contains (edge_to_cases, e);
938}
939
940/* Create the edges for a GIMPLE_SWITCH starting at block BB. */
941
942static void
943make_gimple_switch_edges (basic_block bb)
944{
945 gimple entry = last_stmt (bb);
946 location_t entry_locus;
947 size_t i, n;
948
949 entry_locus = gimple_location (entry);
950
951 n = gimple_switch_num_labels (entry);
952
953 for (i = 0; i < n; ++i)
954 {
955 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
956 basic_block label_bb = label_to_block (lab);
957 make_edge (bb, label_bb, 0);
958 assign_discriminator (entry_locus, label_bb);
959 }
960}
961
962
963/* Return the basic block holding label DEST. */
964
965basic_block
966label_to_block_fn (struct function *ifun, tree dest)
967{
968 int uid = LABEL_DECL_UID (dest);
969
970 /* We would die hard when faced by an undefined label. Emit a label to
971 the very first basic block. This will hopefully make even the dataflow
972 and undefined variable warnings quite right. */
973 if (seen_error () && uid < 0)
974 {
975 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
976 gimple stmt;
977
978 stmt = gimple_build_label (dest);
979 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
980 uid = LABEL_DECL_UID (dest);
981 }
982 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
983 <= (unsigned int) uid)
984 return NULL;
985 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
986}
987
988/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
989 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
990
991void
992make_abnormal_goto_edges (basic_block bb, bool for_call)
993{
994 basic_block target_bb;
995 gimple_stmt_iterator gsi;
996
997 FOR_EACH_BB (target_bb)
998 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
999 {
1000 gimple label_stmt = gsi_stmt (gsi);
1001 tree target;
1002
1003 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1004 break;
1005
1006 target = gimple_label_label (label_stmt);
1007
1008 /* Make an edge to every label block that has been marked as a
1009 potential target for a computed goto or a non-local goto. */
1010 if ((FORCED_LABEL (target) && !for_call)
1011 || (DECL_NONLOCAL (target) && for_call))
1012 {
1013 make_edge (bb, target_bb, EDGE_ABNORMAL);
1014 break;
1015 }
1016 }
1017}
1018
1019/* Create edges for a goto statement at block BB. */
1020
1021static void
1022make_goto_expr_edges (basic_block bb)
1023{
1024 gimple_stmt_iterator last = gsi_last_bb (bb);
1025 gimple goto_t = gsi_stmt (last);
1026
1027 /* A simple GOTO creates normal edges. */
1028 if (simple_goto_p (goto_t))
1029 {
1030 tree dest = gimple_goto_dest (goto_t);
1031 basic_block label_bb = label_to_block (dest);
1032 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1033 e->goto_locus = gimple_location (goto_t);
1034 assign_discriminator (e->goto_locus, label_bb);
1035 if (e->goto_locus)
1036 e->goto_block = gimple_block (goto_t);
1037 gsi_remove (&last, true);
1038 return;
1039 }
1040
1041 /* A computed GOTO creates abnormal edges. */
1042 make_abnormal_goto_edges (bb, false);
1043}
1044
1045/* Create edges for an asm statement with labels at block BB. */
1046
1047static void
1048make_gimple_asm_edges (basic_block bb)
1049{
1050 gimple stmt = last_stmt (bb);
1051 location_t stmt_loc = gimple_location (stmt);
1052 int i, n = gimple_asm_nlabels (stmt);
1053
1054 for (i = 0; i < n; ++i)
1055 {
1056 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1057 basic_block label_bb = label_to_block (label);
1058 make_edge (bb, label_bb, 0);
1059 assign_discriminator (stmt_loc, label_bb);
1060 }
1061}
1062
1063/*---------------------------------------------------------------------------
1064 Flowgraph analysis
1065---------------------------------------------------------------------------*/
1066
1067/* Cleanup useless labels in basic blocks. This is something we wish
1068 to do early because it allows us to group case labels before creating
1069 the edges for the CFG, and it speeds up block statement iterators in
1070 all passes later on.
1071 We rerun this pass after CFG is created, to get rid of the labels that
1072 are no longer referenced. After then we do not run it any more, since
1073 (almost) no new labels should be created. */
1074
1075/* A map from basic block index to the leading label of that block. */
1076static struct label_record
1077{
1078 /* The label. */
1079 tree label;
1080
1081 /* True if the label is referenced from somewhere. */
1082 bool used;
1083} *label_for_bb;
1084
1085/* Given LABEL return the first label in the same basic block. */
1086
1087static tree
1088main_block_label (tree label)
1089{
1090 basic_block bb = label_to_block (label);
1091 tree main_label = label_for_bb[bb->index].label;
1092
1093 /* label_to_block possibly inserted undefined label into the chain. */
1094 if (!main_label)
1095 {
1096 label_for_bb[bb->index].label = label;
1097 main_label = label;
1098 }
1099
1100 label_for_bb[bb->index].used = true;
1101 return main_label;
1102}
1103
1104/* Clean up redundant labels within the exception tree. */
1105
1106static void
1107cleanup_dead_labels_eh (void)
1108{
1109 eh_landing_pad lp;
1110 eh_region r;
1111 tree lab;
1112 int i;
1113
1114 if (cfun->eh == NULL)
1115 return;
1116
1117 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1118 if (lp && lp->post_landing_pad)
1119 {
1120 lab = main_block_label (lp->post_landing_pad);
1121 if (lab != lp->post_landing_pad)
1122 {
1123 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1124 EH_LANDING_PAD_NR (lab) = lp->index;
1125 }
1126 }
1127
1128 FOR_ALL_EH_REGION (r)
1129 switch (r->type)
1130 {
1131 case ERT_CLEANUP:
1132 case ERT_MUST_NOT_THROW:
1133 break;
1134
1135 case ERT_TRY:
1136 {
1137 eh_catch c;
1138 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1139 {
1140 lab = c->label;
1141 if (lab)
1142 c->label = main_block_label (lab);
1143 }
1144 }
1145 break;
1146
1147 case ERT_ALLOWED_EXCEPTIONS:
1148 lab = r->u.allowed.label;
1149 if (lab)
1150 r->u.allowed.label = main_block_label (lab);
1151 break;
1152 }
1153}
1154
1155
1156/* Cleanup redundant labels. This is a three-step process:
1157 1) Find the leading label for each block.
1158 2) Redirect all references to labels to the leading labels.
1159 3) Cleanup all useless labels. */
1160
1161void
1162cleanup_dead_labels (void)
1163{
1164 basic_block bb;
1165 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1166
1167 /* Find a suitable label for each block. We use the first user-defined
1168 label if there is one, or otherwise just the first label we see. */
1169 FOR_EACH_BB (bb)
1170 {
1171 gimple_stmt_iterator i;
1172
1173 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1174 {
1175 tree label;
1176 gimple stmt = gsi_stmt (i);
1177
1178 if (gimple_code (stmt) != GIMPLE_LABEL)
1179 break;
1180
1181 label = gimple_label_label (stmt);
1182
1183 /* If we have not yet seen a label for the current block,
1184 remember this one and see if there are more labels. */
1185 if (!label_for_bb[bb->index].label)
1186 {
1187 label_for_bb[bb->index].label = label;
1188 continue;
1189 }
1190
1191 /* If we did see a label for the current block already, but it
1192 is an artificially created label, replace it if the current
1193 label is a user defined label. */
1194 if (!DECL_ARTIFICIAL (label)
1195 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1196 {
1197 label_for_bb[bb->index].label = label;
1198 break;
1199 }
1200 }
1201 }
1202
1203 /* Now redirect all jumps/branches to the selected label.
1204 First do so for each block ending in a control statement. */
1205 FOR_EACH_BB (bb)
1206 {
1207 gimple stmt = last_stmt (bb);
1208 tree label, new_label;
1209
1210 if (!stmt)
1211 continue;
1212
1213 switch (gimple_code (stmt))
1214 {
1215 case GIMPLE_COND:
1216 label = gimple_cond_true_label (stmt);
1217 if (label)
1218 {
1219 new_label = main_block_label (label);
1220 if (new_label != label)
1221 gimple_cond_set_true_label (stmt, new_label);
1222 }
1223
1224 label = gimple_cond_false_label (stmt);
1225 if (label)
1226 {
1227 new_label = main_block_label (label);
1228 if (new_label != label)
1229 gimple_cond_set_false_label (stmt, new_label);
1230 }
1231 break;
1232
1233 case GIMPLE_SWITCH:
1234 {
1235 size_t i, n = gimple_switch_num_labels (stmt);
1236
1237 /* Replace all destination labels. */
1238 for (i = 0; i < n; ++i)
1239 {
1240 tree case_label = gimple_switch_label (stmt, i);
1241 label = CASE_LABEL (case_label);
1242 new_label = main_block_label (label);
1243 if (new_label != label)
1244 CASE_LABEL (case_label) = new_label;
1245 }
1246 break;
1247 }
1248
1249 case GIMPLE_ASM:
1250 {
1251 int i, n = gimple_asm_nlabels (stmt);
1252
1253 for (i = 0; i < n; ++i)
1254 {
1255 tree cons = gimple_asm_label_op (stmt, i);
1256 tree label = main_block_label (TREE_VALUE (cons));
1257 TREE_VALUE (cons) = label;
1258 }
1259 break;
1260 }
1261
1262 /* We have to handle gotos until they're removed, and we don't
1263 remove them until after we've created the CFG edges. */
1264 case GIMPLE_GOTO:
1265 if (!computed_goto_p (stmt))
1266 {
1267 label = gimple_goto_dest (stmt);
1268 new_label = main_block_label (label);
1269 if (new_label != label)
1270 gimple_goto_set_dest (stmt, new_label);
1271 }
1272 break;
1273
1274 case GIMPLE_TRANSACTION:
1275 {
1276 tree label = gimple_transaction_label (stmt);
1277 if (label)
1278 {
1279 tree new_label = main_block_label (label);
1280 if (new_label != label)
1281 gimple_transaction_set_label (stmt, new_label);
1282 }
1283 }
1284 break;
1285
1286 default:
1287 break;
1288 }
1289 }
1290
1291 /* Do the same for the exception region tree labels. */
1292 cleanup_dead_labels_eh ();
1293
1294 /* Finally, purge dead labels. All user-defined labels and labels that
1295 can be the target of non-local gotos and labels which have their
1296 address taken are preserved. */
1297 FOR_EACH_BB (bb)
1298 {
1299 gimple_stmt_iterator i;
1300 tree label_for_this_bb = label_for_bb[bb->index].label;
1301
1302 if (!label_for_this_bb)
1303 continue;
1304
1305 /* If the main label of the block is unused, we may still remove it. */
1306 if (!label_for_bb[bb->index].used)
1307 label_for_this_bb = NULL;
1308
1309 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1310 {
1311 tree label;
1312 gimple stmt = gsi_stmt (i);
1313
1314 if (gimple_code (stmt) != GIMPLE_LABEL)
1315 break;
1316
1317 label = gimple_label_label (stmt);
1318
1319 if (label == label_for_this_bb
1320 || !DECL_ARTIFICIAL (label)
1321 || DECL_NONLOCAL (label)
1322 || FORCED_LABEL (label))
1323 gsi_next (&i);
1324 else
1325 gsi_remove (&i, true);
1326 }
1327 }
1328
1329 free (label_for_bb);
1330}
1331
1332/* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1333 the ones jumping to the same label.
1334 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1335
1336static void
1337group_case_labels_stmt (gimple stmt)
1338{
1339 int old_size = gimple_switch_num_labels (stmt);
1340 int i, j, new_size = old_size;
1341 tree default_case = NULL_TREE;
1342 tree default_label = NULL_TREE;
1343 bool has_default;
1344
1345 /* The default label is always the first case in a switch
1346 statement after gimplification if it was not optimized
1347 away */
1348 if (!CASE_LOW (gimple_switch_default_label (stmt))
1349 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1350 {
1351 default_case = gimple_switch_default_label (stmt);
1352 default_label = CASE_LABEL (default_case);
1353 has_default = true;
1354 }
1355 else
1356 has_default = false;
1357
1358 /* Look for possible opportunities to merge cases. */
1359 if (has_default)
1360 i = 1;
1361 else
1362 i = 0;
1363 while (i < old_size)
1364 {
1365 tree base_case, base_label, base_high;
1366 base_case = gimple_switch_label (stmt, i);
1367
1368 gcc_assert (base_case);
1369 base_label = CASE_LABEL (base_case);
1370
1371 /* Discard cases that have the same destination as the
1372 default case. */
1373 if (base_label == default_label)
1374 {
1375 gimple_switch_set_label (stmt, i, NULL_TREE);
1376 i++;
1377 new_size--;
1378 continue;
1379 }
1380
1381 base_high = CASE_HIGH (base_case)
1382 ? CASE_HIGH (base_case)
1383 : CASE_LOW (base_case);
1384 i++;
1385
1386 /* Try to merge case labels. Break out when we reach the end
1387 of the label vector or when we cannot merge the next case
1388 label with the current one. */
1389 while (i < old_size)
1390 {
1391 tree merge_case = gimple_switch_label (stmt, i);
1392 tree merge_label = CASE_LABEL (merge_case);
1393 double_int bhp1 = double_int_add (tree_to_double_int (base_high),
1394 double_int_one);
1395
1396 /* Merge the cases if they jump to the same place,
1397 and their ranges are consecutive. */
1398 if (merge_label == base_label
1399 && double_int_equal_p (tree_to_double_int (CASE_LOW (merge_case)),
1400 bhp1))
1401 {
1402 base_high = CASE_HIGH (merge_case) ?
1403 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1404 CASE_HIGH (base_case) = base_high;
1405 gimple_switch_set_label (stmt, i, NULL_TREE);
1406 new_size--;
1407 i++;
1408 }
1409 else
1410 break;
1411 }
1412 }
1413
1414 /* Compress the case labels in the label vector, and adjust the
1415 length of the vector. */
1416 for (i = 0, j = 0; i < new_size; i++)
1417 {
1418 while (! gimple_switch_label (stmt, j))
1419 j++;
1420 gimple_switch_set_label (stmt, i,
1421 gimple_switch_label (stmt, j++));
1422 }
1423
1424 gcc_assert (new_size <= old_size);
1425 gimple_switch_set_num_labels (stmt, new_size);
1426}
1427
1428/* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1429 and scan the sorted vector of cases. Combine the ones jumping to the
1430 same label. */
1431
1432void
1433group_case_labels (void)
1434{
1435 basic_block bb;
1436
1437 FOR_EACH_BB (bb)
1438 {
1439 gimple stmt = last_stmt (bb);
1440 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1441 group_case_labels_stmt (stmt);
1442 }
1443}
1444
1445/* Checks whether we can merge block B into block A. */
1446
1447static bool
1448gimple_can_merge_blocks_p (basic_block a, basic_block b)
1449{
1450 gimple stmt;
1451 gimple_stmt_iterator gsi;
1452 gimple_seq phis;
1453
1454 if (!single_succ_p (a))
1455 return false;
1456
1457 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH | EDGE_PRESERVE))
1458 return false;
1459
1460 if (single_succ (a) != b)
1461 return false;
1462
1463 if (!single_pred_p (b))
1464 return false;
1465
1466 if (b == EXIT_BLOCK_PTR)
1467 return false;
1468
1469 /* If A ends by a statement causing exceptions or something similar, we
1470 cannot merge the blocks. */
1471 stmt = last_stmt (a);
1472 if (stmt && stmt_ends_bb_p (stmt))
1473 return false;
1474
1475 /* Do not allow a block with only a non-local label to be merged. */
1476 if (stmt
1477 && gimple_code (stmt) == GIMPLE_LABEL
1478 && DECL_NONLOCAL (gimple_label_label (stmt)))
1479 return false;
1480
1481 /* Examine the labels at the beginning of B. */
1482 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1483 {
1484 tree lab;
1485 stmt = gsi_stmt (gsi);
1486 if (gimple_code (stmt) != GIMPLE_LABEL)
1487 break;
1488 lab = gimple_label_label (stmt);
1489
1490 /* Do not remove user forced labels or for -O0 any user labels. */
1491 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1492 return false;
1493 }
1494
1495 /* Protect the loop latches. */
1496 if (current_loops && b->loop_father->latch == b)
1497 return false;
1498
1499 /* It must be possible to eliminate all phi nodes in B. If ssa form
1500 is not up-to-date and a name-mapping is registered, we cannot eliminate
1501 any phis. Symbols marked for renaming are never a problem though. */
1502 phis = phi_nodes (b);
1503 if (!gimple_seq_empty_p (phis)
1504 && name_mappings_registered_p ())
1505 return false;
1506
1507 /* When not optimizing, don't merge if we'd lose goto_locus. */
1508 if (!optimize
1509 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1510 {
1511 location_t goto_locus = single_succ_edge (a)->goto_locus;
1512 gimple_stmt_iterator prev, next;
1513 prev = gsi_last_nondebug_bb (a);
1514 next = gsi_after_labels (b);
1515 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1516 gsi_next_nondebug (&next);
1517 if ((gsi_end_p (prev)
1518 || gimple_location (gsi_stmt (prev)) != goto_locus)
1519 && (gsi_end_p (next)
1520 || gimple_location (gsi_stmt (next)) != goto_locus))
1521 return false;
1522 }
1523
1524 return true;
1525}
1526
1527/* Return true if the var whose chain of uses starts at PTR has no
1528 nondebug uses. */
1529bool
1530has_zero_uses_1 (const ssa_use_operand_t *head)
1531{
1532 const ssa_use_operand_t *ptr;
1533
1534 for (ptr = head->next; ptr != head; ptr = ptr->next)
1535 if (!is_gimple_debug (USE_STMT (ptr)))
1536 return false;
1537
1538 return true;
1539}
1540
1541/* Return true if the var whose chain of uses starts at PTR has a
1542 single nondebug use. Set USE_P and STMT to that single nondebug
1543 use, if so, or to NULL otherwise. */
1544bool
1545single_imm_use_1 (const ssa_use_operand_t *head,
1546 use_operand_p *use_p, gimple *stmt)
1547{
1548 ssa_use_operand_t *ptr, *single_use = 0;
1549
1550 for (ptr = head->next; ptr != head; ptr = ptr->next)
1551 if (!is_gimple_debug (USE_STMT (ptr)))
1552 {
1553 if (single_use)
1554 {
1555 single_use = NULL;
1556 break;
1557 }
1558 single_use = ptr;
1559 }
1560
1561 if (use_p)
1562 *use_p = single_use;
1563
1564 if (stmt)
1565 *stmt = single_use ? single_use->loc.stmt : NULL;
1566
1567 return !!single_use;
1568}
1569
1570/* Replaces all uses of NAME by VAL. */
1571
1572void
1573replace_uses_by (tree name, tree val)
1574{
1575 imm_use_iterator imm_iter;
1576 use_operand_p use;
1577 gimple stmt;
1578 edge e;
1579
1580 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1581 {
1582 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1583 {
1584 replace_exp (use, val);
1585
1586 if (gimple_code (stmt) == GIMPLE_PHI)
1587 {
1588 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1589 if (e->flags & EDGE_ABNORMAL)
1590 {
1591 /* This can only occur for virtual operands, since
1592 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1593 would prevent replacement. */
1594 gcc_checking_assert (!is_gimple_reg (name));
1595 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1596 }
1597 }
1598 }
1599
1600 if (gimple_code (stmt) != GIMPLE_PHI)
1601 {
1602 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1603 gimple orig_stmt = stmt;
1604 size_t i;
1605
1606 /* Mark the block if we changed the last stmt in it. */
1607 if (cfgcleanup_altered_bbs
1608 && stmt_ends_bb_p (stmt))
1609 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1610
1611 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1612 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1613 only change sth from non-invariant to invariant, and only
1614 when propagating constants. */
1615 if (is_gimple_min_invariant (val))
1616 for (i = 0; i < gimple_num_ops (stmt); i++)
1617 {
1618 tree op = gimple_op (stmt, i);
1619 /* Operands may be empty here. For example, the labels
1620 of a GIMPLE_COND are nulled out following the creation
1621 of the corresponding CFG edges. */
1622 if (op && TREE_CODE (op) == ADDR_EXPR)
1623 recompute_tree_invariant_for_addr_expr (op);
1624 }
1625
1626 if (fold_stmt (&gsi))
1627 stmt = gsi_stmt (gsi);
1628
1629 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1630 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1631
1632 update_stmt (stmt);
1633 }
1634 }
1635
1636 gcc_checking_assert (has_zero_uses (name));
1637
1638 /* Also update the trees stored in loop structures. */
1639 if (current_loops)
1640 {
1641 struct loop *loop;
1642 loop_iterator li;
1643
1644 FOR_EACH_LOOP (li, loop, 0)
1645 {
1646 substitute_in_loop_info (loop, name, val);
1647 }
1648 }
1649}
1650
1651/* Merge block B into block A. */
1652
1653static void
1654gimple_merge_blocks (basic_block a, basic_block b)
1655{
1656 gimple_stmt_iterator last, gsi, psi;
1657 gimple_seq phis = phi_nodes (b);
1658
1659 if (dump_file)
1660 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1661
1662 /* Remove all single-valued PHI nodes from block B of the form
1663 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1664 gsi = gsi_last_bb (a);
1665 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1666 {
1667 gimple phi = gsi_stmt (psi);
1668 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1669 gimple copy;
1670 bool may_replace_uses = !is_gimple_reg (def)
1671 || may_propagate_copy (def, use);
1672
1673 /* In case we maintain loop closed ssa form, do not propagate arguments
1674 of loop exit phi nodes. */
1675 if (current_loops
1676 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1677 && is_gimple_reg (def)
1678 && TREE_CODE (use) == SSA_NAME
1679 && a->loop_father != b->loop_father)
1680 may_replace_uses = false;
1681
1682 if (!may_replace_uses)
1683 {
1684 gcc_assert (is_gimple_reg (def));
1685
1686 /* Note that just emitting the copies is fine -- there is no problem
1687 with ordering of phi nodes. This is because A is the single
1688 predecessor of B, therefore results of the phi nodes cannot
1689 appear as arguments of the phi nodes. */
1690 copy = gimple_build_assign (def, use);
1691 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1692 remove_phi_node (&psi, false);
1693 }
1694 else
1695 {
1696 /* If we deal with a PHI for virtual operands, we can simply
1697 propagate these without fussing with folding or updating
1698 the stmt. */
1699 if (!is_gimple_reg (def))
1700 {
1701 imm_use_iterator iter;
1702 use_operand_p use_p;
1703 gimple stmt;
1704
1705 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1706 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1707 SET_USE (use_p, use);
1708
1709 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1710 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1711 }
1712 else
1713 replace_uses_by (def, use);
1714
1715 remove_phi_node (&psi, true);
1716 }
1717 }
1718
1719 /* Ensure that B follows A. */
1720 move_block_after (b, a);
1721
1722 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1723 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1724
1725 /* Remove labels from B and set gimple_bb to A for other statements. */
1726 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1727 {
1728 gimple stmt = gsi_stmt (gsi);
1729 if (gimple_code (stmt) == GIMPLE_LABEL)
1730 {
1731 tree label = gimple_label_label (stmt);
1732 int lp_nr;
1733
1734 gsi_remove (&gsi, false);
1735
1736 /* Now that we can thread computed gotos, we might have
1737 a situation where we have a forced label in block B
1738 However, the label at the start of block B might still be
1739 used in other ways (think about the runtime checking for
1740 Fortran assigned gotos). So we can not just delete the
1741 label. Instead we move the label to the start of block A. */
1742 if (FORCED_LABEL (label))
1743 {
1744 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1745 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1746 }
1747 /* Other user labels keep around in a form of a debug stmt. */
1748 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1749 {
1750 gimple dbg = gimple_build_debug_bind (label,
1751 integer_zero_node,
1752 stmt);
1753 gimple_debug_bind_reset_value (dbg);
1754 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1755 }
1756
1757 lp_nr = EH_LANDING_PAD_NR (label);
1758 if (lp_nr)
1759 {
1760 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1761 lp->post_landing_pad = NULL;
1762 }
1763 }
1764 else
1765 {
1766 gimple_set_bb (stmt, a);
1767 gsi_next (&gsi);
1768 }
1769 }
1770
1771 /* Merge the sequences. */
1772 last = gsi_last_bb (a);
1773 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1774 set_bb_seq (b, NULL);
1775
1776 if (cfgcleanup_altered_bbs)
1777 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1778}
1779
1780
1781/* Return the one of two successors of BB that is not reachable by a
1782 complex edge, if there is one. Else, return BB. We use
1783 this in optimizations that use post-dominators for their heuristics,
1784 to catch the cases in C++ where function calls are involved. */
1785
1786basic_block
1787single_noncomplex_succ (basic_block bb)
1788{
1789 edge e0, e1;
1790 if (EDGE_COUNT (bb->succs) != 2)
1791 return bb;
1792
1793 e0 = EDGE_SUCC (bb, 0);
1794 e1 = EDGE_SUCC (bb, 1);
1795 if (e0->flags & EDGE_COMPLEX)
1796 return e1->dest;
1797 if (e1->flags & EDGE_COMPLEX)
1798 return e0->dest;
1799
1800 return bb;
1801}
1802
1803/* T is CALL_EXPR. Set current_function_calls_* flags. */
1804
1805void
1806notice_special_calls (gimple call)
1807{
1808 int flags = gimple_call_flags (call);
1809
1810 if (flags & ECF_MAY_BE_ALLOCA)
1811 cfun->calls_alloca = true;
1812 if (flags & ECF_RETURNS_TWICE)
1813 cfun->calls_setjmp = true;
1814}
1815
1816
1817/* Clear flags set by notice_special_calls. Used by dead code removal
1818 to update the flags. */
1819
1820void
1821clear_special_calls (void)
1822{
1823 cfun->calls_alloca = false;
1824 cfun->calls_setjmp = false;
1825}
1826
1827/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1828
1829static void
1830remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1831{
1832 /* Since this block is no longer reachable, we can just delete all
1833 of its PHI nodes. */
1834 remove_phi_nodes (bb);
1835
1836 /* Remove edges to BB's successors. */
1837 while (EDGE_COUNT (bb->succs) > 0)
1838 remove_edge (EDGE_SUCC (bb, 0));
1839}
1840
1841
1842/* Remove statements of basic block BB. */
1843
1844static void
1845remove_bb (basic_block bb)
1846{
1847 gimple_stmt_iterator i;
1848
1849 if (dump_file)
1850 {
1851 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1852 if (dump_flags & TDF_DETAILS)
1853 {
1854 dump_bb (bb, dump_file, 0);
1855 fprintf (dump_file, "\n");
1856 }
1857 }
1858
1859 if (current_loops)
1860 {
1861 struct loop *loop = bb->loop_father;
1862
1863 /* If a loop gets removed, clean up the information associated
1864 with it. */
1865 if (loop->latch == bb
1866 || loop->header == bb)
1867 free_numbers_of_iterations_estimates_loop (loop);
1868 }
1869
1870 /* Remove all the instructions in the block. */
1871 if (bb_seq (bb) != NULL)
1872 {
1873 /* Walk backwards so as to get a chance to substitute all
1874 released DEFs into debug stmts. See
1875 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1876 details. */
1877 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1878 {
1879 gimple stmt = gsi_stmt (i);
1880 if (gimple_code (stmt) == GIMPLE_LABEL
1881 && (FORCED_LABEL (gimple_label_label (stmt))
1882 || DECL_NONLOCAL (gimple_label_label (stmt))))
1883 {
1884 basic_block new_bb;
1885 gimple_stmt_iterator new_gsi;
1886
1887 /* A non-reachable non-local label may still be referenced.
1888 But it no longer needs to carry the extra semantics of
1889 non-locality. */
1890 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1891 {
1892 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1893 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1894 }
1895
1896 new_bb = bb->prev_bb;
1897 new_gsi = gsi_start_bb (new_bb);
1898 gsi_remove (&i, false);
1899 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1900 }
1901 else
1902 {
1903 /* Release SSA definitions if we are in SSA. Note that we
1904 may be called when not in SSA. For example,
1905 final_cleanup calls this function via
1906 cleanup_tree_cfg. */
1907 if (gimple_in_ssa_p (cfun))
1908 release_defs (stmt);
1909
1910 gsi_remove (&i, true);
1911 }
1912
1913 if (gsi_end_p (i))
1914 i = gsi_last_bb (bb);
1915 else
1916 gsi_prev (&i);
1917 }
1918 }
1919
1920 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1921 bb->il.gimple = NULL;
1922}
1923
1924
1925/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1926 predicate VAL, return the edge that will be taken out of the block.
1927 If VAL does not match a unique edge, NULL is returned. */
1928
1929edge
1930find_taken_edge (basic_block bb, tree val)
1931{
1932 gimple stmt;
1933
1934 stmt = last_stmt (bb);
1935
1936 gcc_assert (stmt);
1937 gcc_assert (is_ctrl_stmt (stmt));
1938
1939 if (val == NULL)
1940 return NULL;
1941
1942 if (!is_gimple_min_invariant (val))
1943 return NULL;
1944
1945 if (gimple_code (stmt) == GIMPLE_COND)
1946 return find_taken_edge_cond_expr (bb, val);
1947
1948 if (gimple_code (stmt) == GIMPLE_SWITCH)
1949 return find_taken_edge_switch_expr (bb, val);
1950
1951 if (computed_goto_p (stmt))
1952 {
1953 /* Only optimize if the argument is a label, if the argument is
1954 not a label then we can not construct a proper CFG.
1955
1956 It may be the case that we only need to allow the LABEL_REF to
1957 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1958 appear inside a LABEL_EXPR just to be safe. */
1959 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1960 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1961 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1962 return NULL;
1963 }
1964
1965 gcc_unreachable ();
1966}
1967
1968/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1969 statement, determine which of the outgoing edges will be taken out of the
1970 block. Return NULL if either edge may be taken. */
1971
1972static edge
1973find_taken_edge_computed_goto (basic_block bb, tree val)
1974{
1975 basic_block dest;
1976 edge e = NULL;
1977
1978 dest = label_to_block (val);
1979 if (dest)
1980 {
1981 e = find_edge (bb, dest);
1982 gcc_assert (e != NULL);
1983 }
1984
1985 return e;
1986}
1987
1988/* Given a constant value VAL and the entry block BB to a COND_EXPR
1989 statement, determine which of the two edges will be taken out of the
1990 block. Return NULL if either edge may be taken. */
1991
1992static edge
1993find_taken_edge_cond_expr (basic_block bb, tree val)
1994{
1995 edge true_edge, false_edge;
1996
1997 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1998
1999 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2000 return (integer_zerop (val) ? false_edge : true_edge);
2001}
2002
2003/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2004 statement, determine which edge will be taken out of the block. Return
2005 NULL if any edge may be taken. */
2006
2007static edge
2008find_taken_edge_switch_expr (basic_block bb, tree val)
2009{
2010 basic_block dest_bb;
2011 edge e;
2012 gimple switch_stmt;
2013 tree taken_case;
2014
2015 switch_stmt = last_stmt (bb);
2016 taken_case = find_case_label_for_value (switch_stmt, val);
2017 dest_bb = label_to_block (CASE_LABEL (taken_case));
2018
2019 e = find_edge (bb, dest_bb);
2020 gcc_assert (e);
2021 return e;
2022}
2023
2024
2025/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2026 We can make optimal use here of the fact that the case labels are
2027 sorted: We can do a binary search for a case matching VAL. */
2028
2029static tree
2030find_case_label_for_value (gimple switch_stmt, tree val)
2031{
2032 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2033 tree default_case = gimple_switch_default_label (switch_stmt);
2034
2035 for (low = 0, high = n; high - low > 1; )
2036 {
2037 size_t i = (high + low) / 2;
2038 tree t = gimple_switch_label (switch_stmt, i);
2039 int cmp;
2040
2041 /* Cache the result of comparing CASE_LOW and val. */
2042 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2043
2044 if (cmp > 0)
2045 high = i;
2046 else
2047 low = i;
2048
2049 if (CASE_HIGH (t) == NULL)
2050 {
2051 /* A singe-valued case label. */
2052 if (cmp == 0)
2053 return t;
2054 }
2055 else
2056 {
2057 /* A case range. We can only handle integer ranges. */
2058 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2059 return t;
2060 }
2061 }
2062
2063 return default_case;
2064}
2065
2066
2067/* Dump a basic block on stderr. */
2068
2069void
2070gimple_debug_bb (basic_block bb)
2071{
2072 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2073}
2074
2075
2076/* Dump basic block with index N on stderr. */
2077
2078basic_block
2079gimple_debug_bb_n (int n)
2080{
2081 gimple_debug_bb (BASIC_BLOCK (n));
2082 return BASIC_BLOCK (n);
2083}
2084
2085
2086/* Dump the CFG on stderr.
2087
2088 FLAGS are the same used by the tree dumping functions
2089 (see TDF_* in tree-pass.h). */
2090
2091void
2092gimple_debug_cfg (int flags)
2093{
2094 gimple_dump_cfg (stderr, flags);
2095}
2096
2097
2098/* Dump the program showing basic block boundaries on the given FILE.
2099
2100 FLAGS are the same used by the tree dumping functions (see TDF_* in
2101 tree.h). */
2102
2103void
2104gimple_dump_cfg (FILE *file, int flags)
2105{
2106 if (flags & TDF_DETAILS)
2107 {
2108 dump_function_header (file, current_function_decl, flags);
2109 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2110 n_basic_blocks, n_edges, last_basic_block);
2111
2112 brief_dump_cfg (file);
2113 fprintf (file, "\n");
2114 }
2115
2116 if (flags & TDF_STATS)
2117 dump_cfg_stats (file);
2118
2119 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2120}
2121
2122
2123/* Dump CFG statistics on FILE. */
2124
2125void
2126dump_cfg_stats (FILE *file)
2127{
2128 static long max_num_merged_labels = 0;
2129 unsigned long size, total = 0;
2130 long num_edges;
2131 basic_block bb;
2132 const char * const fmt_str = "%-30s%-13s%12s\n";
2133 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2134 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2135 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2136 const char *funcname
2137 = lang_hooks.decl_printable_name (current_function_decl, 2);
2138
2139
2140 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2141
2142 fprintf (file, "---------------------------------------------------------\n");
2143 fprintf (file, fmt_str, "", " Number of ", "Memory");
2144 fprintf (file, fmt_str, "", " instances ", "used ");
2145 fprintf (file, "---------------------------------------------------------\n");
2146
2147 size = n_basic_blocks * sizeof (struct basic_block_def);
2148 total += size;
2149 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2150 SCALE (size), LABEL (size));
2151
2152 num_edges = 0;
2153 FOR_EACH_BB (bb)
2154 num_edges += EDGE_COUNT (bb->succs);
2155 size = num_edges * sizeof (struct edge_def);
2156 total += size;
2157 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2158
2159 fprintf (file, "---------------------------------------------------------\n");
2160 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2161 LABEL (total));
2162 fprintf (file, "---------------------------------------------------------\n");
2163 fprintf (file, "\n");
2164
2165 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2166 max_num_merged_labels = cfg_stats.num_merged_labels;
2167
2168 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2169 cfg_stats.num_merged_labels, max_num_merged_labels);
2170
2171 fprintf (file, "\n");
2172}
2173
2174
2175/* Dump CFG statistics on stderr. Keep extern so that it's always
2176 linked in the final executable. */
2177
2178DEBUG_FUNCTION void
2179debug_cfg_stats (void)
2180{
2181 dump_cfg_stats (stderr);
2182}
2183
2184
2185/* Dump the flowgraph to a .vcg FILE. */
2186
2187static void
2188gimple_cfg2vcg (FILE *file)
2189{
2190 edge e;
2191 edge_iterator ei;
2192 basic_block bb;
2193 const char *funcname
2194 = lang_hooks.decl_printable_name (current_function_decl, 2);
2195
2196 /* Write the file header. */
2197 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2198 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2199 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2200
2201 /* Write blocks and edges. */
2202 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2203 {
2204 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2205 e->dest->index);
2206
2207 if (e->flags & EDGE_FAKE)
2208 fprintf (file, " linestyle: dotted priority: 10");
2209 else
2210 fprintf (file, " linestyle: solid priority: 100");
2211
2212 fprintf (file, " }\n");
2213 }
2214 fputc ('\n', file);
2215
2216 FOR_EACH_BB (bb)
2217 {
2218 enum gimple_code head_code, end_code;
2219 const char *head_name, *end_name;
2220 int head_line = 0;
2221 int end_line = 0;
2222 gimple first = first_stmt (bb);
2223 gimple last = last_stmt (bb);
2224
2225 if (first)
2226 {
2227 head_code = gimple_code (first);
2228 head_name = gimple_code_name[head_code];
2229 head_line = get_lineno (first);
2230 }
2231 else
2232 head_name = "no-statement";
2233
2234 if (last)
2235 {
2236 end_code = gimple_code (last);
2237 end_name = gimple_code_name[end_code];
2238 end_line = get_lineno (last);
2239 }
2240 else
2241 end_name = "no-statement";
2242
2243 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2244 bb->index, bb->index, head_name, head_line, end_name,
2245 end_line);
2246
2247 FOR_EACH_EDGE (e, ei, bb->succs)
2248 {
2249 if (e->dest == EXIT_BLOCK_PTR)
2250 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2251 else
2252 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2253
2254 if (e->flags & EDGE_FAKE)
2255 fprintf (file, " priority: 10 linestyle: dotted");
2256 else
2257 fprintf (file, " priority: 100 linestyle: solid");
2258
2259 fprintf (file, " }\n");
2260 }
2261
2262 if (bb->next_bb != EXIT_BLOCK_PTR)
2263 fputc ('\n', file);
2264 }
2265
2266 fputs ("}\n\n", file);
2267}
2268
2269
2270
2271/*---------------------------------------------------------------------------
2272 Miscellaneous helpers
2273---------------------------------------------------------------------------*/
2274
2275/* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2276 flow. Transfers of control flow associated with EH are excluded. */
2277
2278static bool
2279call_can_make_abnormal_goto (gimple t)
2280{
2281 /* If the function has no non-local labels, then a call cannot make an
2282 abnormal transfer of control. */
2283 if (!cfun->has_nonlocal_label)
2284 return false;
2285
2286 /* Likewise if the call has no side effects. */
2287 if (!gimple_has_side_effects (t))
2288 return false;
2289
2290 /* Likewise if the called function is leaf. */
2291 if (gimple_call_flags (t) & ECF_LEAF)
2292 return false;
2293
2294 return true;
2295}
2296
2297
2298/* Return true if T can make an abnormal transfer of control flow.
2299 Transfers of control flow associated with EH are excluded. */
2300
2301bool
2302stmt_can_make_abnormal_goto (gimple t)
2303{
2304 if (computed_goto_p (t))
2305 return true;
2306 if (is_gimple_call (t))
2307 return call_can_make_abnormal_goto (t);
2308 return false;
2309}
2310
2311
2312/* Return true if T represents a stmt that always transfers control. */
2313
2314bool
2315is_ctrl_stmt (gimple t)
2316{
2317 switch (gimple_code (t))
2318 {
2319 case GIMPLE_COND:
2320 case GIMPLE_SWITCH:
2321 case GIMPLE_GOTO:
2322 case GIMPLE_RETURN:
2323 case GIMPLE_RESX:
2324 return true;
2325 default:
2326 return false;
2327 }
2328}
2329
2330
2331/* Return true if T is a statement that may alter the flow of control
2332 (e.g., a call to a non-returning function). */
2333
2334bool
2335is_ctrl_altering_stmt (gimple t)
2336{
2337 gcc_assert (t);
2338
2339 switch (gimple_code (t))
2340 {
2341 case GIMPLE_CALL:
2342 {
2343 int flags = gimple_call_flags (t);
2344
2345 /* A call alters control flow if it can make an abnormal goto. */
2346 if (call_can_make_abnormal_goto (t))
2347 return true;
2348
2349 /* A call also alters control flow if it does not return. */
2350 if (flags & ECF_NORETURN)
2351 return true;
2352
2353 /* TM ending statements have backedges out of the transaction.
2354 Return true so we split the basic block containing them.
2355 Note that the TM_BUILTIN test is merely an optimization. */
2356 if ((flags & ECF_TM_BUILTIN)
2357 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2358 return true;
2359
2360 /* BUILT_IN_RETURN call is same as return statement. */
2361 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2362 return true;
2363 }
2364 break;
2365
2366 case GIMPLE_EH_DISPATCH:
2367 /* EH_DISPATCH branches to the individual catch handlers at
2368 this level of a try or allowed-exceptions region. It can
2369 fallthru to the next statement as well. */
2370 return true;
2371
2372 case GIMPLE_ASM:
2373 if (gimple_asm_nlabels (t) > 0)
2374 return true;
2375 break;
2376
2377 CASE_GIMPLE_OMP:
2378 /* OpenMP directives alter control flow. */
2379 return true;
2380
2381 case GIMPLE_TRANSACTION:
2382 /* A transaction start alters control flow. */
2383 return true;
2384
2385 default:
2386 break;
2387 }
2388
2389 /* If a statement can throw, it alters control flow. */
2390 return stmt_can_throw_internal (t);
2391}
2392
2393
2394/* Return true if T is a simple local goto. */
2395
2396bool
2397simple_goto_p (gimple t)
2398{
2399 return (gimple_code (t) == GIMPLE_GOTO
2400 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2401}
2402
2403
2404/* Return true if STMT should start a new basic block. PREV_STMT is
2405 the statement preceding STMT. It is used when STMT is a label or a
2406 case label. Labels should only start a new basic block if their
2407 previous statement wasn't a label. Otherwise, sequence of labels
2408 would generate unnecessary basic blocks that only contain a single
2409 label. */
2410
2411static inline bool
2412stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2413{
2414 if (stmt == NULL)
2415 return false;
2416
2417 /* Labels start a new basic block only if the preceding statement
2418 wasn't a label of the same type. This prevents the creation of
2419 consecutive blocks that have nothing but a single label. */
2420 if (gimple_code (stmt) == GIMPLE_LABEL)
2421 {
2422 /* Nonlocal and computed GOTO targets always start a new block. */
2423 if (DECL_NONLOCAL (gimple_label_label (stmt))
2424 || FORCED_LABEL (gimple_label_label (stmt)))
2425 return true;
2426
2427 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2428 {
2429 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2430 return true;
2431
2432 cfg_stats.num_merged_labels++;
2433 return false;
2434 }
2435 else
2436 return true;
2437 }
2438
2439 return false;
2440}
2441
2442
2443/* Return true if T should end a basic block. */
2444
2445bool
2446stmt_ends_bb_p (gimple t)
2447{
2448 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2449}
2450
2451/* Remove block annotations and other data structures. */
2452
2453void
2454delete_tree_cfg_annotations (void)
2455{
2456 label_to_block_map = NULL;
2457}
2458
2459
2460/* Return the first statement in basic block BB. */
2461
2462gimple
2463first_stmt (basic_block bb)
2464{
2465 gimple_stmt_iterator i = gsi_start_bb (bb);
2466 gimple stmt = NULL;
2467
2468 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2469 {
2470 gsi_next (&i);
2471 stmt = NULL;
2472 }
2473 return stmt;
2474}
2475
2476/* Return the first non-label statement in basic block BB. */
2477
2478static gimple
2479first_non_label_stmt (basic_block bb)
2480{
2481 gimple_stmt_iterator i = gsi_start_bb (bb);
2482 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2483 gsi_next (&i);
2484 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2485}
2486
2487/* Return the last statement in basic block BB. */
2488
2489gimple
2490last_stmt (basic_block bb)
2491{
2492 gimple_stmt_iterator i = gsi_last_bb (bb);
2493 gimple stmt = NULL;
2494
2495 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2496 {
2497 gsi_prev (&i);
2498 stmt = NULL;
2499 }
2500 return stmt;
2501}
2502
2503/* Return the last statement of an otherwise empty block. Return NULL
2504 if the block is totally empty, or if it contains more than one
2505 statement. */
2506
2507gimple
2508last_and_only_stmt (basic_block bb)
2509{
2510 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2511 gimple last, prev;
2512
2513 if (gsi_end_p (i))
2514 return NULL;
2515
2516 last = gsi_stmt (i);
2517 gsi_prev_nondebug (&i);
2518 if (gsi_end_p (i))
2519 return last;
2520
2521 /* Empty statements should no longer appear in the instruction stream.
2522 Everything that might have appeared before should be deleted by
2523 remove_useless_stmts, and the optimizers should just gsi_remove
2524 instead of smashing with build_empty_stmt.
2525
2526 Thus the only thing that should appear here in a block containing
2527 one executable statement is a label. */
2528 prev = gsi_stmt (i);
2529 if (gimple_code (prev) == GIMPLE_LABEL)
2530 return last;
2531 else
2532 return NULL;
2533}
2534
2535/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2536
2537static void
2538reinstall_phi_args (edge new_edge, edge old_edge)
2539{
2540 edge_var_map_vector v;
2541 edge_var_map *vm;
2542 int i;
2543 gimple_stmt_iterator phis;
2544
2545 v = redirect_edge_var_map_vector (old_edge);
2546 if (!v)
2547 return;
2548
2549 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2550 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2551 i++, gsi_next (&phis))
2552 {
2553 gimple phi = gsi_stmt (phis);
2554 tree result = redirect_edge_var_map_result (vm);
2555 tree arg = redirect_edge_var_map_def (vm);
2556
2557 gcc_assert (result == gimple_phi_result (phi));
2558
2559 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2560 }
2561
2562 redirect_edge_var_map_clear (old_edge);
2563}
2564
2565/* Returns the basic block after which the new basic block created
2566 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2567 near its "logical" location. This is of most help to humans looking
2568 at debugging dumps. */
2569
2570static basic_block
2571split_edge_bb_loc (edge edge_in)
2572{
2573 basic_block dest = edge_in->dest;
2574 basic_block dest_prev = dest->prev_bb;
2575
2576 if (dest_prev)
2577 {
2578 edge e = find_edge (dest_prev, dest);
2579 if (e && !(e->flags & EDGE_COMPLEX))
2580 return edge_in->src;
2581 }
2582 return dest_prev;
2583}
2584
2585/* Split a (typically critical) edge EDGE_IN. Return the new block.
2586 Abort on abnormal edges. */
2587
2588static basic_block
2589gimple_split_edge (edge edge_in)
2590{
2591 basic_block new_bb, after_bb, dest;
2592 edge new_edge, e;
2593
2594 /* Abnormal edges cannot be split. */
2595 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2596
2597 dest = edge_in->dest;
2598
2599 after_bb = split_edge_bb_loc (edge_in);
2600
2601 new_bb = create_empty_bb (after_bb);
2602 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2603 new_bb->count = edge_in->count;
2604 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2605 new_edge->probability = REG_BR_PROB_BASE;
2606 new_edge->count = edge_in->count;
2607
2608 e = redirect_edge_and_branch (edge_in, new_bb);
2609 gcc_assert (e == edge_in);
2610 reinstall_phi_args (new_edge, e);
2611
2612 return new_bb;
2613}
2614
2615
2616/* Verify properties of the address expression T with base object BASE. */
2617
2618static tree
2619verify_address (tree t, tree base)
2620{
2621 bool old_constant;
2622 bool old_side_effects;
2623 bool new_constant;
2624 bool new_side_effects;
2625
2626 old_constant = TREE_CONSTANT (t);
2627 old_side_effects = TREE_SIDE_EFFECTS (t);
2628
2629 recompute_tree_invariant_for_addr_expr (t);
2630 new_side_effects = TREE_SIDE_EFFECTS (t);
2631 new_constant = TREE_CONSTANT (t);
2632
2633 if (old_constant != new_constant)
2634 {
2635 error ("constant not recomputed when ADDR_EXPR changed");
2636 return t;
2637 }
2638 if (old_side_effects != new_side_effects)
2639 {
2640 error ("side effects not recomputed when ADDR_EXPR changed");
2641 return t;
2642 }
2643
2644 if (!(TREE_CODE (base) == VAR_DECL
2645 || TREE_CODE (base) == PARM_DECL
2646 || TREE_CODE (base) == RESULT_DECL))
2647 return NULL_TREE;
2648
2649 if (DECL_GIMPLE_REG_P (base))
2650 {
2651 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2652 return base;
2653 }
2654
2655 return NULL_TREE;
2656}
2657
2658/* Callback for walk_tree, check that all elements with address taken are
2659 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2660 inside a PHI node. */
2661
2662static tree
2663verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2664{
2665 tree t = *tp, x;
2666
2667 if (TYPE_P (t))
2668 *walk_subtrees = 0;
2669
2670 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2671#define CHECK_OP(N, MSG) \
2672 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2673 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2674
2675 switch (TREE_CODE (t))
2676 {
2677 case SSA_NAME:
2678 if (SSA_NAME_IN_FREE_LIST (t))
2679 {
2680 error ("SSA name in freelist but still referenced");
2681 return *tp;
2682 }
2683 break;
2684
2685 case INDIRECT_REF:
2686 error ("INDIRECT_REF in gimple IL");
2687 return t;
2688
2689 case MEM_REF:
2690 x = TREE_OPERAND (t, 0);
2691 if (!POINTER_TYPE_P (TREE_TYPE (x))
2692 || !is_gimple_mem_ref_addr (x))
2693 {
2694 error ("invalid first operand of MEM_REF");
2695 return x;
2696 }
2697 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2698 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2699 {
2700 error ("invalid offset operand of MEM_REF");
2701 return TREE_OPERAND (t, 1);
2702 }
2703 if (TREE_CODE (x) == ADDR_EXPR
2704 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2705 return x;
2706 *walk_subtrees = 0;
2707 break;
2708
2709 case ASSERT_EXPR:
2710 x = fold (ASSERT_EXPR_COND (t));
2711 if (x == boolean_false_node)
2712 {
2713 error ("ASSERT_EXPR with an always-false condition");
2714 return *tp;
2715 }
2716 break;
2717
2718 case MODIFY_EXPR:
2719 error ("MODIFY_EXPR not expected while having tuples");
2720 return *tp;
2721
2722 case ADDR_EXPR:
2723 {
2724 tree tem;
2725
2726 gcc_assert (is_gimple_address (t));
2727
2728 /* Skip any references (they will be checked when we recurse down the
2729 tree) and ensure that any variable used as a prefix is marked
2730 addressable. */
2731 for (x = TREE_OPERAND (t, 0);
2732 handled_component_p (x);
2733 x = TREE_OPERAND (x, 0))
2734 ;
2735
2736 if ((tem = verify_address (t, x)))
2737 return tem;
2738
2739 if (!(TREE_CODE (x) == VAR_DECL
2740 || TREE_CODE (x) == PARM_DECL
2741 || TREE_CODE (x) == RESULT_DECL))
2742 return NULL;
2743
2744 if (!TREE_ADDRESSABLE (x))
2745 {
2746 error ("address taken, but ADDRESSABLE bit not set");
2747 return x;
2748 }
2749
2750 break;
2751 }
2752
2753 case COND_EXPR:
2754 x = COND_EXPR_COND (t);
2755 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2756 {
2757 error ("non-integral used in condition");
2758 return x;
2759 }
2760 if (!is_gimple_condexpr (x))
2761 {
2762 error ("invalid conditional operand");
2763 return x;
2764 }
2765 break;
2766
2767 case NON_LVALUE_EXPR:
2768 case TRUTH_NOT_EXPR:
2769 gcc_unreachable ();
2770
2771 CASE_CONVERT:
2772 case FIX_TRUNC_EXPR:
2773 case FLOAT_EXPR:
2774 case NEGATE_EXPR:
2775 case ABS_EXPR:
2776 case BIT_NOT_EXPR:
2777 CHECK_OP (0, "invalid operand to unary operator");
2778 break;
2779
2780 case REALPART_EXPR:
2781 case IMAGPART_EXPR:
2782 case COMPONENT_REF:
2783 case ARRAY_REF:
2784 case ARRAY_RANGE_REF:
2785 case BIT_FIELD_REF:
2786 case VIEW_CONVERT_EXPR:
2787 /* We have a nest of references. Verify that each of the operands
2788 that determine where to reference is either a constant or a variable,
2789 verify that the base is valid, and then show we've already checked
2790 the subtrees. */
2791 while (handled_component_p (t))
2792 {
2793 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2794 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2795 else if (TREE_CODE (t) == ARRAY_REF
2796 || TREE_CODE (t) == ARRAY_RANGE_REF)
2797 {
2798 CHECK_OP (1, "invalid array index");
2799 if (TREE_OPERAND (t, 2))
2800 CHECK_OP (2, "invalid array lower bound");
2801 if (TREE_OPERAND (t, 3))
2802 CHECK_OP (3, "invalid array stride");
2803 }
2804 else if (TREE_CODE (t) == BIT_FIELD_REF)
2805 {
2806 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2807 || !host_integerp (TREE_OPERAND (t, 2), 1))
2808 {
2809 error ("invalid position or size operand to BIT_FIELD_REF");
2810 return t;
2811 }
2812 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2813 && (TYPE_PRECISION (TREE_TYPE (t))
2814 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2815 {
2816 error ("integral result type precision does not match "
2817 "field size of BIT_FIELD_REF");
2818 return t;
2819 }
2820 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2821 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2822 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2823 {
2824 error ("mode precision of non-integral result does not "
2825 "match field size of BIT_FIELD_REF");
2826 return t;
2827 }
2828 }
2829
2830 t = TREE_OPERAND (t, 0);
2831 }
2832
2833 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2834 {
2835 error ("invalid reference prefix");
2836 return t;
2837 }
2838 *walk_subtrees = 0;
2839 break;
2840 case PLUS_EXPR:
2841 case MINUS_EXPR:
2842 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2843 POINTER_PLUS_EXPR. */
2844 if (POINTER_TYPE_P (TREE_TYPE (t)))
2845 {
2846 error ("invalid operand to plus/minus, type is a pointer");
2847 return t;
2848 }
2849 CHECK_OP (0, "invalid operand to binary operator");
2850 CHECK_OP (1, "invalid operand to binary operator");
2851 break;
2852
2853 case POINTER_PLUS_EXPR:
2854 /* Check to make sure the first operand is a pointer or reference type. */
2855 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2856 {
2857 error ("invalid operand to pointer plus, first operand is not a pointer");
2858 return t;
2859 }
2860 /* Check to make sure the second operand is a ptrofftype. */
2861 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2862 {
2863 error ("invalid operand to pointer plus, second operand is not an "
2864 "integer type of appropriate width");
2865 return t;
2866 }
2867 /* FALLTHROUGH */
2868 case LT_EXPR:
2869 case LE_EXPR:
2870 case GT_EXPR:
2871 case GE_EXPR:
2872 case EQ_EXPR:
2873 case NE_EXPR:
2874 case UNORDERED_EXPR:
2875 case ORDERED_EXPR:
2876 case UNLT_EXPR:
2877 case UNLE_EXPR:
2878 case UNGT_EXPR:
2879 case UNGE_EXPR:
2880 case UNEQ_EXPR:
2881 case LTGT_EXPR:
2882 case MULT_EXPR:
2883 case TRUNC_DIV_EXPR:
2884 case CEIL_DIV_EXPR:
2885 case FLOOR_DIV_EXPR:
2886 case ROUND_DIV_EXPR:
2887 case TRUNC_MOD_EXPR:
2888 case CEIL_MOD_EXPR:
2889 case FLOOR_MOD_EXPR:
2890 case ROUND_MOD_EXPR:
2891 case RDIV_EXPR:
2892 case EXACT_DIV_EXPR:
2893 case MIN_EXPR:
2894 case MAX_EXPR:
2895 case LSHIFT_EXPR:
2896 case RSHIFT_EXPR:
2897 case LROTATE_EXPR:
2898 case RROTATE_EXPR:
2899 case BIT_IOR_EXPR:
2900 case BIT_XOR_EXPR:
2901 case BIT_AND_EXPR:
2902 CHECK_OP (0, "invalid operand to binary operator");
2903 CHECK_OP (1, "invalid operand to binary operator");
2904 break;
2905
2906 case CONSTRUCTOR:
2907 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2908 *walk_subtrees = 0;
2909 break;
2910
2911 case CASE_LABEL_EXPR:
2912 if (CASE_CHAIN (t))
2913 {
2914 error ("invalid CASE_CHAIN");
2915 return t;
2916 }
2917 break;
2918
2919 default:
2920 break;
2921 }
2922 return NULL;
2923
2924#undef CHECK_OP
2925}
2926
2927
2928/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2929 Returns true if there is an error, otherwise false. */
2930
2931static bool
2932verify_types_in_gimple_min_lval (tree expr)
2933{
2934 tree op;
2935
2936 if (is_gimple_id (expr))
2937 return false;
2938
2939 if (TREE_CODE (expr) != TARGET_MEM_REF
2940 && TREE_CODE (expr) != MEM_REF)
2941 {
2942 error ("invalid expression for min lvalue");
2943 return true;
2944 }
2945
2946 /* TARGET_MEM_REFs are strange beasts. */
2947 if (TREE_CODE (expr) == TARGET_MEM_REF)
2948 return false;
2949
2950 op = TREE_OPERAND (expr, 0);
2951 if (!is_gimple_val (op))
2952 {
2953 error ("invalid operand in indirect reference");
2954 debug_generic_stmt (op);
2955 return true;
2956 }
2957 /* Memory references now generally can involve a value conversion. */
2958
2959 return false;
2960}
2961
2962/* Verify if EXPR is a valid GIMPLE reference expression. If
2963 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2964 if there is an error, otherwise false. */
2965
2966static bool
2967verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2968{
2969 while (handled_component_p (expr))
2970 {
2971 tree op = TREE_OPERAND (expr, 0);
2972
2973 if (TREE_CODE (expr) == ARRAY_REF
2974 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2975 {
2976 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2977 || (TREE_OPERAND (expr, 2)
2978 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2979 || (TREE_OPERAND (expr, 3)
2980 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2981 {
2982 error ("invalid operands to array reference");
2983 debug_generic_stmt (expr);
2984 return true;
2985 }
2986 }
2987
2988 /* Verify if the reference array element types are compatible. */
2989 if (TREE_CODE (expr) == ARRAY_REF
2990 && !useless_type_conversion_p (TREE_TYPE (expr),
2991 TREE_TYPE (TREE_TYPE (op))))
2992 {
2993 error ("type mismatch in array reference");
2994 debug_generic_stmt (TREE_TYPE (expr));
2995 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2996 return true;
2997 }
2998 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2999 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3000 TREE_TYPE (TREE_TYPE (op))))
3001 {
3002 error ("type mismatch in array range reference");
3003 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3004 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3005 return true;
3006 }
3007
3008 if ((TREE_CODE (expr) == REALPART_EXPR
3009 || TREE_CODE (expr) == IMAGPART_EXPR)
3010 && !useless_type_conversion_p (TREE_TYPE (expr),
3011 TREE_TYPE (TREE_TYPE (op))))
3012 {
3013 error ("type mismatch in real/imagpart reference");
3014 debug_generic_stmt (TREE_TYPE (expr));
3015 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3016 return true;
3017 }
3018
3019 if (TREE_CODE (expr) == COMPONENT_REF
3020 && !useless_type_conversion_p (TREE_TYPE (expr),
3021 TREE_TYPE (TREE_OPERAND (expr, 1))))
3022 {
3023 error ("type mismatch in component reference");
3024 debug_generic_stmt (TREE_TYPE (expr));
3025 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3026 return true;
3027 }
3028
3029 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3030 {
3031 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3032 that their operand is not an SSA name or an invariant when
3033 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3034 bug). Otherwise there is nothing to verify, gross mismatches at
3035 most invoke undefined behavior. */
3036 if (require_lvalue
3037 && (TREE_CODE (op) == SSA_NAME
3038 || is_gimple_min_invariant (op)))
3039 {
3040 error ("conversion of an SSA_NAME on the left hand side");
3041 debug_generic_stmt (expr);
3042 return true;
3043 }
3044 else if (TREE_CODE (op) == SSA_NAME
3045 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3046 {
3047 error ("conversion of register to a different size");
3048 debug_generic_stmt (expr);
3049 return true;
3050 }
3051 else if (!handled_component_p (op))
3052 return false;
3053 }
3054
3055 expr = op;
3056 }
3057
3058 if (TREE_CODE (expr) == MEM_REF)
3059 {
3060 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3061 {
3062 error ("invalid address operand in MEM_REF");
3063 debug_generic_stmt (expr);
3064 return true;
3065 }
3066 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3067 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3068 {
3069 error ("invalid offset operand in MEM_REF");
3070 debug_generic_stmt (expr);
3071 return true;
3072 }
3073 }
3074 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3075 {
3076 if (!TMR_BASE (expr)
3077 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3078 {
3079 error ("invalid address operand in TARGET_MEM_REF");
3080 return true;
3081 }
3082 if (!TMR_OFFSET (expr)
3083 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3084 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3085 {
3086 error ("invalid offset operand in TARGET_MEM_REF");
3087 debug_generic_stmt (expr);
3088 return true;
3089 }
3090 }
3091
3092 return ((require_lvalue || !is_gimple_min_invariant (expr))
3093 && verify_types_in_gimple_min_lval (expr));
3094}
3095
3096/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3097 list of pointer-to types that is trivially convertible to DEST. */
3098
3099static bool
3100one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3101{
3102 tree src;
3103
3104 if (!TYPE_POINTER_TO (src_obj))
3105 return true;
3106
3107 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3108 if (useless_type_conversion_p (dest, src))
3109 return true;
3110
3111 return false;
3112}
3113
3114/* Return true if TYPE1 is a fixed-point type and if conversions to and
3115 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3116
3117static bool
3118valid_fixed_convert_types_p (tree type1, tree type2)
3119{
3120 return (FIXED_POINT_TYPE_P (type1)
3121 && (INTEGRAL_TYPE_P (type2)
3122 || SCALAR_FLOAT_TYPE_P (type2)
3123 || FIXED_POINT_TYPE_P (type2)));
3124}
3125
3126/* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3127 is a problem, otherwise false. */
3128
3129static bool
3130verify_gimple_call (gimple stmt)
3131{
3132 tree fn = gimple_call_fn (stmt);
3133 tree fntype, fndecl;
3134 unsigned i;
3135
3136 if (gimple_call_internal_p (stmt))
3137 {
3138 if (fn)
3139 {
3140 error ("gimple call has two targets");
3141 debug_generic_stmt (fn);
3142 return true;
3143 }
3144 }
3145 else
3146 {
3147 if (!fn)
3148 {
3149 error ("gimple call has no target");
3150 return true;
3151 }
3152 }
3153
3154 if (fn && !is_gimple_call_addr (fn))
3155 {
3156 error ("invalid function in gimple call");
3157 debug_generic_stmt (fn);
3158 return true;
3159 }
3160
3161 if (fn
3162 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3163 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3164 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3165 {
3166 error ("non-function in gimple call");
3167 return true;
3168 }
3169
3170 fndecl = gimple_call_fndecl (stmt);
3171 if (fndecl
3172 && TREE_CODE (fndecl) == FUNCTION_DECL
3173 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3174 && !DECL_PURE_P (fndecl)
3175 && !TREE_READONLY (fndecl))
3176 {
3177 error ("invalid pure const state for function");
3178 return true;
3179 }
3180
3181 if (gimple_call_lhs (stmt)
3182 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3183 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3184 {
3185 error ("invalid LHS in gimple call");
3186 return true;
3187 }
3188
3189 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3190 {
3191 error ("LHS in noreturn call");
3192 return true;
3193 }
3194
3195 fntype = gimple_call_fntype (stmt);
3196 if (fntype
3197 && gimple_call_lhs (stmt)
3198 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3199 TREE_TYPE (fntype))
3200 /* ??? At least C++ misses conversions at assignments from
3201 void * call results.
3202 ??? Java is completely off. Especially with functions
3203 returning java.lang.Object.
3204 For now simply allow arbitrary pointer type conversions. */
3205 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3206 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3207 {
3208 error ("invalid conversion in gimple call");
3209 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3210 debug_generic_stmt (TREE_TYPE (fntype));
3211 return true;
3212 }
3213
3214 if (gimple_call_chain (stmt)
3215 && !is_gimple_val (gimple_call_chain (stmt)))
3216 {
3217 error ("invalid static chain in gimple call");
3218 debug_generic_stmt (gimple_call_chain (stmt));
3219 return true;
3220 }
3221
3222 /* If there is a static chain argument, this should not be an indirect
3223 call, and the decl should have DECL_STATIC_CHAIN set. */
3224 if (gimple_call_chain (stmt))
3225 {
3226 if (!gimple_call_fndecl (stmt))
3227 {
3228 error ("static chain in indirect gimple call");
3229 return true;
3230 }
3231 fn = TREE_OPERAND (fn, 0);
3232
3233 if (!DECL_STATIC_CHAIN (fn))
3234 {
3235 error ("static chain with function that doesn%'t use one");
3236 return true;
3237 }
3238 }
3239
3240 /* ??? The C frontend passes unpromoted arguments in case it
3241 didn't see a function declaration before the call. So for now
3242 leave the call arguments mostly unverified. Once we gimplify
3243 unit-at-a-time we have a chance to fix this. */
3244
3245 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3246 {
3247 tree arg = gimple_call_arg (stmt, i);
3248 if ((is_gimple_reg_type (TREE_TYPE (arg))
3249 && !is_gimple_val (arg))
3250 || (!is_gimple_reg_type (TREE_TYPE (arg))
3251 && !is_gimple_lvalue (arg)))
3252 {
3253 error ("invalid argument to gimple call");
3254 debug_generic_expr (arg);
3255 return true;
3256 }
3257 }
3258
3259 return false;
3260}
3261
3262/* Verifies the gimple comparison with the result type TYPE and
3263 the operands OP0 and OP1. */
3264
3265static bool
3266verify_gimple_comparison (tree type, tree op0, tree op1)
3267{
3268 tree op0_type = TREE_TYPE (op0);
3269 tree op1_type = TREE_TYPE (op1);
3270
3271 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3272 {
3273 error ("invalid operands in gimple comparison");
3274 return true;
3275 }
3276
3277 /* For comparisons we do not have the operations type as the
3278 effective type the comparison is carried out in. Instead
3279 we require that either the first operand is trivially
3280 convertible into the second, or the other way around.
3281 Because we special-case pointers to void we allow
3282 comparisons of pointers with the same mode as well. */
3283 if (!useless_type_conversion_p (op0_type, op1_type)
3284 && !useless_type_conversion_p (op1_type, op0_type)
3285 && (!POINTER_TYPE_P (op0_type)
3286 || !POINTER_TYPE_P (op1_type)
3287 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3288 {
3289 error ("mismatching comparison operand types");
3290 debug_generic_expr (op0_type);
3291 debug_generic_expr (op1_type);
3292 return true;
3293 }
3294
3295 /* The resulting type of a comparison may be an effective boolean type. */
3296 if (INTEGRAL_TYPE_P (type)
3297 && (TREE_CODE (type) == BOOLEAN_TYPE
3298 || TYPE_PRECISION (type) == 1))
3299 ;
3300 /* Or an integer vector type with the same size and element count
3301 as the comparison operand types. */
3302 else if (TREE_CODE (type) == VECTOR_TYPE
3303 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3304 {
3305 if (TREE_CODE (op0_type) != VECTOR_TYPE
3306 || TREE_CODE (op1_type) != VECTOR_TYPE)
3307 {
3308 error ("non-vector operands in vector comparison");
3309 debug_generic_expr (op0_type);
3310 debug_generic_expr (op1_type);
3311 return true;
3312 }
3313
3314 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3315 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3316 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type)))))
3317 {
3318 error ("invalid vector comparison resulting type");
3319 debug_generic_expr (type);
3320 return true;
3321 }
3322 }
3323 else
3324 {
3325 error ("bogus comparison result type");
3326 debug_generic_expr (type);
3327 return true;
3328 }
3329
3330 return false;
3331}
3332
3333/* Verify a gimple assignment statement STMT with an unary rhs.
3334 Returns true if anything is wrong. */
3335
3336static bool
3337verify_gimple_assign_unary (gimple stmt)
3338{
3339 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3340 tree lhs = gimple_assign_lhs (stmt);
3341 tree lhs_type = TREE_TYPE (lhs);
3342 tree rhs1 = gimple_assign_rhs1 (stmt);
3343 tree rhs1_type = TREE_TYPE (rhs1);
3344
3345 if (!is_gimple_reg (lhs))
3346 {
3347 error ("non-register as LHS of unary operation");
3348 return true;
3349 }
3350
3351 if (!is_gimple_val (rhs1))
3352 {
3353 error ("invalid operand in unary operation");
3354 return true;
3355 }
3356
3357 /* First handle conversions. */
3358 switch (rhs_code)
3359 {
3360 CASE_CONVERT:
3361 {
3362 /* Allow conversions from pointer type to integral type only if
3363 there is no sign or zero extension involved.
3364 For targets were the precision of ptrofftype doesn't match that
3365 of pointers we need to allow arbitrary conversions to ptrofftype. */
3366 if ((POINTER_TYPE_P (lhs_type)
3367 && INTEGRAL_TYPE_P (rhs1_type))
3368 || (POINTER_TYPE_P (rhs1_type)
3369 && INTEGRAL_TYPE_P (lhs_type)
3370 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3371 || ptrofftype_p (sizetype))))
3372 return false;
3373
3374 /* Allow conversion from integer to offset type and vice versa. */
3375 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3376 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3377 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3378 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3379 return false;
3380
3381 /* Otherwise assert we are converting between types of the
3382 same kind. */
3383 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3384 {
3385 error ("invalid types in nop conversion");
3386 debug_generic_expr (lhs_type);
3387 debug_generic_expr (rhs1_type);
3388 return true;
3389 }
3390
3391 return false;
3392 }
3393
3394 case ADDR_SPACE_CONVERT_EXPR:
3395 {
3396 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3397 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3398 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3399 {
3400 error ("invalid types in address space conversion");
3401 debug_generic_expr (lhs_type);
3402 debug_generic_expr (rhs1_type);
3403 return true;
3404 }
3405
3406 return false;
3407 }
3408
3409 case FIXED_CONVERT_EXPR:
3410 {
3411 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3412 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3413 {
3414 error ("invalid types in fixed-point conversion");
3415 debug_generic_expr (lhs_type);
3416 debug_generic_expr (rhs1_type);
3417 return true;
3418 }
3419
3420 return false;
3421 }
3422
3423 case FLOAT_EXPR:
3424 {
3425 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3426 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3427 || !VECTOR_FLOAT_TYPE_P(lhs_type)))
3428 {
3429 error ("invalid types in conversion to floating point");
3430 debug_generic_expr (lhs_type);
3431 debug_generic_expr (rhs1_type);
3432 return true;
3433 }
3434
3435 return false;
3436 }
3437
3438 case FIX_TRUNC_EXPR:
3439 {
3440 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3441 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3442 || !VECTOR_FLOAT_TYPE_P(rhs1_type)))
3443 {
3444 error ("invalid types in conversion to integer");
3445 debug_generic_expr (lhs_type);
3446 debug_generic_expr (rhs1_type);
3447 return true;
3448 }
3449
3450 return false;
3451 }
3452
3453 case VEC_UNPACK_HI_EXPR:
3454 case VEC_UNPACK_LO_EXPR:
3455 case REDUC_MAX_EXPR:
3456 case REDUC_MIN_EXPR:
3457 case REDUC_PLUS_EXPR:
3458 case VEC_UNPACK_FLOAT_HI_EXPR:
3459 case VEC_UNPACK_FLOAT_LO_EXPR:
3460 /* FIXME. */
3461 return false;
3462
3463 case NEGATE_EXPR:
3464 case ABS_EXPR:
3465 case BIT_NOT_EXPR:
3466 case PAREN_EXPR:
3467 case NON_LVALUE_EXPR:
3468 case CONJ_EXPR:
3469 break;
3470
3471 default:
3472 gcc_unreachable ();
3473 }
3474
3475 /* For the remaining codes assert there is no conversion involved. */
3476 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3477 {
3478 error ("non-trivial conversion in unary operation");
3479 debug_generic_expr (lhs_type);
3480 debug_generic_expr (rhs1_type);
3481 return true;
3482 }
3483
3484 return false;
3485}
3486
3487/* Verify a gimple assignment statement STMT with a binary rhs.
3488 Returns true if anything is wrong. */
3489
3490static bool
3491verify_gimple_assign_binary (gimple stmt)
3492{
3493 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3494 tree lhs = gimple_assign_lhs (stmt);
3495 tree lhs_type = TREE_TYPE (lhs);
3496 tree rhs1 = gimple_assign_rhs1 (stmt);
3497 tree rhs1_type = TREE_TYPE (rhs1);
3498 tree rhs2 = gimple_assign_rhs2 (stmt);
3499 tree rhs2_type = TREE_TYPE (rhs2);
3500
3501 if (!is_gimple_reg (lhs))
3502 {
3503 error ("non-register as LHS of binary operation");
3504 return true;
3505 }
3506
3507 if (!is_gimple_val (rhs1)
3508 || !is_gimple_val (rhs2))
3509 {
3510 error ("invalid operands in binary operation");
3511 return true;
3512 }
3513
3514 /* First handle operations that involve different types. */
3515 switch (rhs_code)
3516 {
3517 case COMPLEX_EXPR:
3518 {
3519 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3520 || !(INTEGRAL_TYPE_P (rhs1_type)
3521 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3522 || !(INTEGRAL_TYPE_P (rhs2_type)
3523 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3524 {
3525 error ("type mismatch in complex expression");
3526 debug_generic_expr (lhs_type);
3527 debug_generic_expr (rhs1_type);
3528 debug_generic_expr (rhs2_type);
3529 return true;
3530 }
3531
3532 return false;
3533 }
3534
3535 case LSHIFT_EXPR:
3536 case RSHIFT_EXPR:
3537 case LROTATE_EXPR:
3538 case RROTATE_EXPR:
3539 {
3540 /* Shifts and rotates are ok on integral types, fixed point
3541 types and integer vector types. */
3542 if ((!INTEGRAL_TYPE_P (rhs1_type)
3543 && !FIXED_POINT_TYPE_P (rhs1_type)
3544 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3545 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3546 || (!INTEGRAL_TYPE_P (rhs2_type)
3547 /* Vector shifts of vectors are also ok. */
3548 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3549 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3550 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3551 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3552 || !useless_type_conversion_p (lhs_type, rhs1_type))
3553 {
3554 error ("type mismatch in shift expression");
3555 debug_generic_expr (lhs_type);
3556 debug_generic_expr (rhs1_type);
3557 debug_generic_expr (rhs2_type);
3558 return true;
3559 }
3560
3561 return false;
3562 }
3563
3564 case VEC_LSHIFT_EXPR:
3565 case VEC_RSHIFT_EXPR:
3566 {
3567 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3568 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3569 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3570 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3571 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3572 || (!INTEGRAL_TYPE_P (rhs2_type)
3573 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3574 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3575 || !useless_type_conversion_p (lhs_type, rhs1_type))
3576 {
3577 error ("type mismatch in vector shift expression");
3578 debug_generic_expr (lhs_type);
3579 debug_generic_expr (rhs1_type);
3580 debug_generic_expr (rhs2_type);
3581 return true;
3582 }
3583 /* For shifting a vector of non-integral components we
3584 only allow shifting by a constant multiple of the element size. */
3585 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3586 && (TREE_CODE (rhs2) != INTEGER_CST
3587 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3588 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3589 {
3590 error ("non-element sized vector shift of floating point vector");
3591 return true;
3592 }
3593
3594 return false;
3595 }
3596
3597 case WIDEN_LSHIFT_EXPR:
3598 {
3599 if (!INTEGRAL_TYPE_P (lhs_type)
3600 || !INTEGRAL_TYPE_P (rhs1_type)
3601 || TREE_CODE (rhs2) != INTEGER_CST
3602 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3603 {
3604 error ("type mismatch in widening vector shift expression");
3605 debug_generic_expr (lhs_type);
3606 debug_generic_expr (rhs1_type);
3607 debug_generic_expr (rhs2_type);
3608 return true;
3609 }
3610
3611 return false;
3612 }
3613
3614 case VEC_WIDEN_LSHIFT_HI_EXPR:
3615 case VEC_WIDEN_LSHIFT_LO_EXPR:
3616 {
3617 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3618 || TREE_CODE (lhs_type) != VECTOR_TYPE
3619 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3620 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3621 || TREE_CODE (rhs2) != INTEGER_CST
3622 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3623 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3624 {
3625 error ("type mismatch in widening vector shift expression");
3626 debug_generic_expr (lhs_type);
3627 debug_generic_expr (rhs1_type);
3628 debug_generic_expr (rhs2_type);
3629 return true;
3630 }
3631
3632 return false;
3633 }
3634
3635 case PLUS_EXPR:
3636 case MINUS_EXPR:
3637 {
3638 /* We use regular PLUS_EXPR and MINUS_EXPR for vectors.
3639 ??? This just makes the checker happy and may not be what is
3640 intended. */
3641 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3642 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3643 {
3644 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3645 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3646 {
3647 error ("invalid non-vector operands to vector valued plus");
3648 return true;
3649 }
3650 lhs_type = TREE_TYPE (lhs_type);
3651 rhs1_type = TREE_TYPE (rhs1_type);
3652 rhs2_type = TREE_TYPE (rhs2_type);
3653 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3654 the pointer to 2nd place. */
3655 if (POINTER_TYPE_P (rhs2_type))
3656 {
3657 tree tem = rhs1_type;
3658 rhs1_type = rhs2_type;
3659 rhs2_type = tem;
3660 }
3661 goto do_pointer_plus_expr_check;
3662 }
3663 if (POINTER_TYPE_P (lhs_type)
3664 || POINTER_TYPE_P (rhs1_type)
3665 || POINTER_TYPE_P (rhs2_type))
3666 {
3667 error ("invalid (pointer) operands to plus/minus");
3668 return true;
3669 }
3670
3671 /* Continue with generic binary expression handling. */
3672 break;
3673 }
3674
3675 case POINTER_PLUS_EXPR:
3676 {
3677do_pointer_plus_expr_check:
3678 if (!POINTER_TYPE_P (rhs1_type)
3679 || !useless_type_conversion_p (lhs_type, rhs1_type)
3680 || !ptrofftype_p (rhs2_type))
3681 {
3682 error ("type mismatch in pointer plus expression");
3683 debug_generic_stmt (lhs_type);
3684 debug_generic_stmt (rhs1_type);
3685 debug_generic_stmt (rhs2_type);
3686 return true;
3687 }
3688
3689 return false;
3690 }
3691
3692 case TRUTH_ANDIF_EXPR:
3693 case TRUTH_ORIF_EXPR:
3694 case TRUTH_AND_EXPR:
3695 case TRUTH_OR_EXPR:
3696 case TRUTH_XOR_EXPR:
3697
3698 gcc_unreachable ();
3699
3700 case LT_EXPR:
3701 case LE_EXPR:
3702 case GT_EXPR:
3703 case GE_EXPR:
3704 case EQ_EXPR:
3705 case NE_EXPR:
3706 case UNORDERED_EXPR:
3707 case ORDERED_EXPR:
3708 case UNLT_EXPR:
3709 case UNLE_EXPR:
3710 case UNGT_EXPR:
3711 case UNGE_EXPR:
3712 case UNEQ_EXPR:
3713 case LTGT_EXPR:
3714 /* Comparisons are also binary, but the result type is not
3715 connected to the operand types. */
3716 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3717
3718 case WIDEN_MULT_EXPR:
3719 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3720 return true;
3721 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3722 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3723
3724 case WIDEN_SUM_EXPR:
3725 case VEC_WIDEN_MULT_HI_EXPR:
3726 case VEC_WIDEN_MULT_LO_EXPR:
3727 case VEC_PACK_TRUNC_EXPR:
3728 case VEC_PACK_SAT_EXPR:
3729 case VEC_PACK_FIX_TRUNC_EXPR:
3730 /* FIXME. */
3731 return false;
3732
3733 case MULT_EXPR:
3734 case TRUNC_DIV_EXPR:
3735 case CEIL_DIV_EXPR:
3736 case FLOOR_DIV_EXPR:
3737 case ROUND_DIV_EXPR:
3738 case TRUNC_MOD_EXPR:
3739 case CEIL_MOD_EXPR:
3740 case FLOOR_MOD_EXPR:
3741 case ROUND_MOD_EXPR:
3742 case RDIV_EXPR:
3743 case EXACT_DIV_EXPR:
3744 case MIN_EXPR:
3745 case MAX_EXPR:
3746 case BIT_IOR_EXPR:
3747 case BIT_XOR_EXPR:
3748 case BIT_AND_EXPR:
3749 /* Continue with generic binary expression handling. */
3750 break;
3751
3752 default:
3753 gcc_unreachable ();
3754 }
3755
3756 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3757 || !useless_type_conversion_p (lhs_type, rhs2_type))
3758 {
3759 error ("type mismatch in binary expression");
3760 debug_generic_stmt (lhs_type);
3761 debug_generic_stmt (rhs1_type);
3762 debug_generic_stmt (rhs2_type);
3763 return true;
3764 }
3765
3766 return false;
3767}
3768
3769/* Verify a gimple assignment statement STMT with a ternary rhs.
3770 Returns true if anything is wrong. */
3771
3772static bool
3773verify_gimple_assign_ternary (gimple stmt)
3774{
3775 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3776 tree lhs = gimple_assign_lhs (stmt);
3777 tree lhs_type = TREE_TYPE (lhs);
3778 tree rhs1 = gimple_assign_rhs1 (stmt);
3779 tree rhs1_type = TREE_TYPE (rhs1);
3780 tree rhs2 = gimple_assign_rhs2 (stmt);
3781 tree rhs2_type = TREE_TYPE (rhs2);
3782 tree rhs3 = gimple_assign_rhs3 (stmt);
3783 tree rhs3_type = TREE_TYPE (rhs3);
3784
3785 if (!is_gimple_reg (lhs))
3786 {
3787 error ("non-register as LHS of ternary operation");
3788 return true;
3789 }
3790
3791 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3792 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3793 || !is_gimple_val (rhs2)
3794 || !is_gimple_val (rhs3))
3795 {
3796 error ("invalid operands in ternary operation");
3797 return true;
3798 }
3799
3800 /* First handle operations that involve different types. */
3801 switch (rhs_code)
3802 {
3803 case WIDEN_MULT_PLUS_EXPR:
3804 case WIDEN_MULT_MINUS_EXPR:
3805 if ((!INTEGRAL_TYPE_P (rhs1_type)
3806 && !FIXED_POINT_TYPE_P (rhs1_type))
3807 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3808 || !useless_type_conversion_p (lhs_type, rhs3_type)
3809 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3810 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3811 {
3812 error ("type mismatch in widening multiply-accumulate expression");
3813 debug_generic_expr (lhs_type);
3814 debug_generic_expr (rhs1_type);
3815 debug_generic_expr (rhs2_type);
3816 debug_generic_expr (rhs3_type);
3817 return true;
3818 }
3819 break;
3820
3821 case FMA_EXPR:
3822 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3823 || !useless_type_conversion_p (lhs_type, rhs2_type)
3824 || !useless_type_conversion_p (lhs_type, rhs3_type))
3825 {
3826 error ("type mismatch in fused multiply-add expression");
3827 debug_generic_expr (lhs_type);
3828 debug_generic_expr (rhs1_type);
3829 debug_generic_expr (rhs2_type);
3830 debug_generic_expr (rhs3_type);
3831 return true;
3832 }
3833 break;
3834
3835 case COND_EXPR:
3836 case VEC_COND_EXPR:
3837 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3838 || !useless_type_conversion_p (lhs_type, rhs3_type))
3839 {
3840 error ("type mismatch in conditional expression");
3841 debug_generic_expr (lhs_type);
3842 debug_generic_expr (rhs2_type);
3843 debug_generic_expr (rhs3_type);
3844 return true;
3845 }
3846 break;
3847
3848 case VEC_PERM_EXPR:
3849