1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
34 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
46 #include "ipa-type-escape.h"
50 #include "pointer-set.h"
51 #include "alloc-pool.h"
53 /* Broad overview of how aliasing works:
55 First we compute points-to sets, which is done in
56 tree-ssa-structalias.c
58 During points-to set constraint finding, a bunch of little bits of
59 information is collected.
60 This is not done because it is necessary for points-to, but because
61 points-to has to walk every statement anyway. The function performing
62 this collecting is update_alias_info.
64 Bits update_alias_info collects include:
65 1. Directly escaping variables and variables whose value escapes
66 (using is_escape_site). This is the set of variables and values that
67 escape prior to transitive closure of the clobbers.
68 2. The set of variables dereferenced on the LHS (into
69 dereferenced_ptr_stores)
70 3. The set of variables dereferenced on the RHS (into
71 dereferenced_ptr_loads)
72 4. The set of all pointers we saw.
73 5. The number of loads and stores for each variable
74 6. The number of statements touching memory
75 7. The set of address taken variables.
78 #1 is computed by a combination of is_escape_site, and counting the
79 number of uses/deref operators. This function properly accounts for
80 situations like &ptr->field, which is *not* a dereference.
82 After points-to sets are computed, the sets themselves still
83 contain points-to specific variables, such as a variable that says
84 the pointer points to anything, a variable that says the pointer
85 points to readonly memory, etc.
87 These are eliminated in a later phase, as we will see.
89 The rest of the phases are located in tree-ssa-alias.c
91 The next phase after points-to set computation is called
92 "setup_pointers_and_addressables"
94 This pass does 3 main things:
96 1. All variables that can have TREE_ADDRESSABLE removed safely (IE
97 non-globals whose address is not taken), have TREE_ADDRESSABLE
99 2. All variables that may be aliased (which is the set of addressable
100 variables and globals) at all, are marked for renaming, and have
101 symbol memory tags created for them.
102 3. All variables which are stored into have their SMT's added to
106 After this function is run, all variables that will ever have an
107 SMT, have one, though its aliases are not filled in.
109 The next phase is to compute flow-insensitive aliasing, which in
110 our case, is a misnomer. it is really computing aliasing that
111 requires no transitive closure to be correct. In particular, it
112 uses stack vs non-stack, TBAA, etc, to determine whether two
113 symbols could *ever* alias . This phase works by going through all
114 the pointers we collected during update_alias_info, and for every
115 addressable variable in the program, seeing if they alias. If so,
116 the addressable variable is added to the symbol memory tag for the
119 As part of this, we handle symbol memory tags that conflict but
120 have no aliases in common, by forcing them to have a symbol in
121 common (through unioning alias sets or adding one as an alias of
122 the other), or by adding one as an alias of another. The case of
123 conflicts with no aliases in common occurs mainly due to aliasing
124 we cannot see. In particular, it generally means we have a load
125 through a pointer whose value came from outside the function.
126 Without an addressable symbol to point to, they would get the wrong
129 After flow insensitive aliasing is computed, we compute name tags
130 (called compute_flow_sensitive_info). We walk each pointer we
131 collected and see if it has a usable points-to set. If so, we
132 generate a name tag using that pointer, and make an alias bitmap for
133 it. Name tags are shared between all things with the same alias
134 bitmap. The alias bitmap will be translated from what points-to
135 computed. In particular, the "anything" variable in points-to will be
136 transformed into a pruned set of SMT's and their aliases that
137 compute_flow_insensitive_aliasing computed.
138 Note that since 4.3, every pointer that points-to computed a solution for
139 will get a name tag (whereas before 4.3, only those whose set did
140 *not* include the anything variable would). At the point where name
141 tags are all assigned, symbol memory tags are dead, and could be
142 deleted, *except* on global variables. Global variables still use
143 symbol memory tags as of right now.
145 After name tags are computed, the set of clobbered variables is
146 transitively closed. In particular, we compute the set of clobbered
147 variables based on the initial set of clobbers, plus the aliases of
148 pointers which either escape, or have their value escape.
150 After this, maybe_create_global_var is run, which handles a corner
151 case where we have no call clobbered variables, but have pure and
154 Staring at this function, I now remember it is a hack for the fact
155 that we do not mark all globals in the program as call clobbered for a
156 function unless they are actually used in that function. Instead, we
157 only mark the set that is actually clobbered. As a result, you can
158 end up with situations where you have no call clobbered vars set.
160 After maybe_create_global_var, we set pointers with the REF_ALL flag
161 to have alias sets that include all clobbered
162 memory tags and variables.
164 After this, memory partitioning is computed (by the function
165 compute_memory_partitions) and alias sets are reworked accordingly.
167 Lastly, we delete partitions with no symbols, and clean up after
171 /* Alias information used by compute_may_aliases and its helpers. */
174 /* SSA names visited while collecting points-to information. If bit I
175 is set, it means that SSA variable with version I has already been
177 sbitmap ssa_names_visited;
179 /* Array of SSA_NAME pointers processed by the points-to collector. */
180 VEC(tree,heap) *processed_ptrs;
182 /* ADDRESSABLE_VARS contains all the global variables and locals that
183 have had their address taken. */
184 struct alias_map_d **addressable_vars;
185 size_t num_addressable_vars;
187 /* POINTERS contains all the _DECL pointers with unique memory tags
188 that have been referenced in the program. */
189 struct alias_map_d **pointers;
192 /* Pointers that have been used in an indirect load/store operation. */
193 struct pointer_set_t *dereferenced_ptrs;
197 /* Structure to map a variable to its alias set. */
200 /* Variable and its alias set. */
206 /* Counters used to display statistics on alias analysis. */
209 unsigned int alias_queries;
210 unsigned int alias_mayalias;
211 unsigned int alias_noalias;
212 unsigned int simple_queries;
213 unsigned int simple_resolved;
214 unsigned int tbaa_queries;
215 unsigned int tbaa_resolved;
216 unsigned int structnoaddress_queries;
217 unsigned int structnoaddress_resolved;
221 /* Local variables. */
222 static struct alias_stats_d alias_stats;
223 static bitmap_obstack alias_bitmap_obstack;
225 /* Local functions. */
226 static void compute_flow_insensitive_aliasing (struct alias_info *);
227 static void dump_alias_stats (FILE *);
228 static tree create_memory_tag (tree type, bool is_type_tag);
229 static tree get_smt_for (tree, struct alias_info *);
230 static tree get_nmt_for (tree);
231 static void add_may_alias (tree, tree);
232 static struct alias_info *init_alias_info (void);
233 static void delete_alias_info (struct alias_info *);
234 static void compute_flow_sensitive_aliasing (struct alias_info *);
235 static void setup_pointers_and_addressables (struct alias_info *);
236 static void update_alias_info (struct alias_info *);
237 static void create_global_var (void);
238 static void maybe_create_global_var (void);
239 static void set_pt_anything (tree);
241 void debug_mp_info (VEC(mem_sym_stats_t,heap) *);
243 static alloc_pool mem_sym_stats_pool;
245 /* Return memory reference stats for symbol VAR. Create a new slot in
246 cfun->gimple_df->mem_sym_stats if needed. */
248 static struct mem_sym_stats_d *
249 get_mem_sym_stats_for (tree var)
252 struct mem_sym_stats_d *stats;
253 struct pointer_map_t *map = gimple_mem_ref_stats (cfun)->mem_sym_stats;
257 slot = pointer_map_insert (map, var);
260 stats = (struct mem_sym_stats_d *) pool_alloc (mem_sym_stats_pool);
261 memset (stats, 0, sizeof (*stats));
263 *slot = (void *) stats;
266 stats = (struct mem_sym_stats_d *) *slot;
272 /* Return memory reference statistics for variable VAR in function FN.
273 This is computed by alias analysis, but it is not kept
274 incrementally up-to-date. So, these stats are only accurate if
275 pass_may_alias has been run recently. If no alias information
276 exists, this function returns NULL. */
278 static mem_sym_stats_t
279 mem_sym_stats (struct function *fn, tree var)
282 struct pointer_map_t *stats_map = gimple_mem_ref_stats (fn)->mem_sym_stats;
284 if (stats_map == NULL)
287 slot = pointer_map_contains (stats_map, var);
291 return (mem_sym_stats_t) *slot;
295 /* Set MPT to be the memory partition associated with symbol SYM. */
298 set_memory_partition (tree sym, tree mpt)
300 #if defined ENABLE_CHECKING
302 gcc_assert (TREE_CODE (mpt) == MEMORY_PARTITION_TAG
303 && !is_gimple_reg (sym));
306 var_ann (sym)->mpt = mpt;
309 if (MPT_SYMBOLS (mpt) == NULL)
310 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&alias_bitmap_obstack);
312 bitmap_set_bit (MPT_SYMBOLS (mpt), DECL_UID (sym));
314 /* MPT inherits the call-clobbering attributes from SYM. */
315 if (is_call_clobbered (sym))
317 MTAG_GLOBAL (mpt) = 1;
318 mark_call_clobbered (mpt, ESCAPE_IS_GLOBAL);
324 /* Mark variable VAR as being non-addressable. */
327 mark_non_addressable (tree var)
331 if (!TREE_ADDRESSABLE (var))
334 mpt = memory_partition (var);
336 clear_call_clobbered (var);
337 TREE_ADDRESSABLE (var) = 0;
341 /* Note that it's possible for a symbol to have an associated
342 MPT and the MPT have a NULL empty set. During
343 init_alias_info, all MPTs get their sets cleared out, but the
344 symbols still point to the old MPTs that used to hold them.
345 This is done so that compute_memory_partitions can now which
346 symbols are losing or changing partitions and mark them for
348 if (MPT_SYMBOLS (mpt))
349 bitmap_clear_bit (MPT_SYMBOLS (mpt), DECL_UID (var));
350 set_memory_partition (var, NULL_TREE);
355 /* qsort comparison function to sort type/name tags by DECL_UID. */
358 sort_tags_by_id (const void *pa, const void *pb)
360 const_tree const a = *(const_tree const *)pa;
361 const_tree const b = *(const_tree const *)pb;
363 return DECL_UID (a) - DECL_UID (b);
366 /* Initialize WORKLIST to contain those memory tags that are marked call
367 clobbered. Initialized WORKLIST2 to contain the reasons these
368 memory tags escaped. */
371 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
372 VEC (int, heap) **worklist2,
375 referenced_var_iterator rvi;
378 FOR_EACH_REFERENCED_VAR (curr, rvi)
380 if (MTAG_P (curr) && is_call_clobbered (curr))
382 VEC_safe_push (tree, heap, *worklist, curr);
383 VEC_safe_push (int, heap, *worklist2,
384 var_ann (curr)->escape_mask);
385 bitmap_set_bit (on_worklist, DECL_UID (curr));
390 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
391 ALIAS is not already marked call clobbered, and is a memory
395 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
396 VEC (int, heap) **worklist2, int reason,
399 if (MTAG_P (alias) && !is_call_clobbered (alias)
400 && !bitmap_bit_p (on_worklist, DECL_UID (alias)))
402 VEC_safe_push (tree, heap, *worklist, alias);
403 VEC_safe_push (int, heap, *worklist2, reason);
404 bitmap_set_bit (on_worklist, DECL_UID (alias));
408 /* Mark aliases of TAG as call clobbered, and place any tags on the
409 alias list that were not already call clobbered on WORKLIST. */
412 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
413 VEC (int, heap) **worklist2, bitmap on_worklist)
419 var_ann_t ta = var_ann (tag);
423 aliases = may_aliases (tag);
427 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
429 entry = referenced_var (i);
430 /* If you clobber one part of a structure, you
431 clobber the entire thing. While this does not make
432 the world a particularly nice place, it is necessary
433 in order to allow C/C++ tricks that involve
434 pointer arithmetic to work. */
435 if (!unmodifiable_var_p (entry))
437 add_to_worklist (entry, worklist, worklist2, ta->escape_mask,
439 mark_call_clobbered (entry, ta->escape_mask);
444 /* Tags containing global vars need to be marked as global.
445 Tags containing call clobbered vars need to be marked as call
449 compute_tag_properties (void)
451 referenced_var_iterator rvi;
454 VEC (tree, heap) *taglist = NULL;
456 FOR_EACH_REFERENCED_VAR (tag, rvi)
460 VEC_safe_push (tree, heap, taglist, tag);
463 /* We sort the taglist by DECL_UID, for two reasons.
464 1. To get a sequential ordering to make the bitmap accesses
466 2. Because of the way we compute aliases, it's more likely that
467 an earlier tag is included in a later tag, and this will reduce
468 the number of iterations.
470 If we had a real tag graph, we would just topo-order it and be
472 qsort (VEC_address (tree, taglist),
473 VEC_length (tree, taglist),
477 /* Go through each tag not marked as global, and if it aliases
478 global vars, mark it global.
480 If the tag contains call clobbered vars, mark it call
483 This loop iterates because tags may appear in the may-aliases
484 list of other tags when we group. */
491 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
497 bool tagcc = is_call_clobbered (tag);
498 bool tagglobal = MTAG_GLOBAL (tag);
500 if (tagcc && tagglobal)
503 ma = may_aliases (tag);
507 EXECUTE_IF_SET_IN_BITMAP (ma, 0, i, bi)
509 entry = referenced_var (i);
510 /* Call clobbered entries cause the tag to be marked
512 if (!tagcc && is_call_clobbered (entry))
514 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
519 /* Global vars cause the tag to be marked global. */
520 if (!tagglobal && is_global_var (entry))
522 MTAG_GLOBAL (tag) = true;
527 /* Early exit once both global and cc are set, since the
528 loop can't do any more than that. */
529 if (tagcc && tagglobal)
534 VEC_free (tree, heap, taglist);
537 /* Set up the initial variable clobbers, call-uses and globalness.
538 When this function completes, only tags whose aliases need to be
539 clobbered will be set clobbered. Tags clobbered because they
540 contain call clobbered vars are handled in compute_tag_properties. */
543 set_initial_properties (struct alias_info *ai)
546 referenced_var_iterator rvi;
549 bool any_pt_anything = false;
550 enum escape_type pt_anything_mask = 0;
552 FOR_EACH_REFERENCED_VAR (var, rvi)
554 if (is_global_var (var))
556 if (!unmodifiable_var_p (var))
557 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
559 else if (TREE_CODE (var) == PARM_DECL
560 && gimple_default_def (cfun, var)
561 && POINTER_TYPE_P (TREE_TYPE (var)))
563 tree def = gimple_default_def (cfun, var);
564 get_ptr_info (def)->value_escapes_p = 1;
565 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
569 if (!clobber_what_escaped ())
571 any_pt_anything = true;
572 pt_anything_mask |= ESCAPE_TO_CALL;
575 compute_call_used_vars ();
577 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
579 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
580 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
582 /* A pointer that only escapes via a function return does not
583 add to the call clobber or call used solution.
584 To exclude ESCAPE_TO_PURE_CONST we would need to track
585 call used variables separately or compute those properly
586 in the operand scanner. */
587 if (pi->value_escapes_p
588 && pi->escape_mask & ~ESCAPE_TO_RETURN)
590 /* If PTR escapes then its associated memory tags and
591 pointed-to variables are call-clobbered. */
592 if (pi->name_mem_tag)
593 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
596 mark_call_clobbered (tag, pi->escape_mask);
599 /* If the name tag is call clobbered, so is the symbol tag
600 associated with the base VAR_DECL. */
603 && is_call_clobbered (pi->name_mem_tag))
604 mark_call_clobbered (tag, pi->escape_mask);
606 /* Name tags and symbol tags that we don't know where they point
607 to, might point to global memory, and thus, are clobbered.
609 FIXME: This is not quite right. They should only be
610 clobbered if value_escapes_p is true, regardless of whether
611 they point to global memory or not.
612 So removing this code and fixing all the bugs would be nice.
613 It is the cause of a bunch of clobbering. */
614 if ((pi->pt_global_mem || pi->pt_anything)
615 && pi->memory_tag_needed && pi->name_mem_tag)
617 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
618 MTAG_GLOBAL (pi->name_mem_tag) = true;
621 if ((pi->pt_global_mem || pi->pt_anything)
622 && pi->memory_tag_needed
625 mark_call_clobbered (tag, ESCAPE_IS_GLOBAL);
626 MTAG_GLOBAL (tag) = true;
630 /* If a pt_anything pointer escaped we need to mark all addressable
631 variables call clobbered. */
637 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, j, bi)
639 tree var = referenced_var (j);
640 if (!unmodifiable_var_p (var))
641 mark_call_clobbered (var, pt_anything_mask);
646 /* Compute which variables need to be marked call clobbered because
647 their tag is call clobbered, and which tags need to be marked
648 global because they contain global variables. */
651 compute_call_clobbered (struct alias_info *ai)
653 VEC (tree, heap) *worklist = NULL;
654 VEC (int,heap) *worklist2 = NULL;
657 timevar_push (TV_CALL_CLOBBER);
658 on_worklist = BITMAP_ALLOC (NULL);
660 set_initial_properties (ai);
661 init_transitive_clobber_worklist (&worklist, &worklist2, on_worklist);
662 while (VEC_length (tree, worklist) != 0)
664 tree curr = VEC_pop (tree, worklist);
665 int reason = VEC_pop (int, worklist2);
667 bitmap_clear_bit (on_worklist, DECL_UID (curr));
668 mark_call_clobbered (curr, reason);
669 mark_aliases_call_clobbered (curr, &worklist, &worklist2, on_worklist);
671 VEC_free (tree, heap, worklist);
672 VEC_free (int, heap, worklist2);
673 BITMAP_FREE (on_worklist);
674 compute_tag_properties ();
675 timevar_pop (TV_CALL_CLOBBER);
679 /* Dump memory partition information to FILE. */
682 dump_memory_partitions (FILE *file)
688 fprintf (file, "\nMemory partitions\n\n");
689 for (i = 0, npart = 0, nsyms = 0;
690 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
695 bitmap syms = MPT_SYMBOLS (mpt);
696 unsigned long n = (syms) ? bitmap_count_bits (syms) : 0;
698 fprintf (file, "#%u: ", i);
699 print_generic_expr (file, mpt, 0);
700 fprintf (file, ": %lu elements: ", n);
701 dump_decl_set (file, syms);
707 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
711 /* Dump memory partition information to stderr. */
714 debug_memory_partitions (void)
716 dump_memory_partitions (stderr);
720 /* Return true if memory partitioning is required given the memory
721 reference estimates in STATS. */
724 need_to_partition_p (struct mem_ref_stats_d *stats)
726 long num_vops = stats->num_vuses + stats->num_vdefs;
727 long avg_vops = CEIL (num_vops, stats->num_mem_stmts);
728 return (num_vops > (long) MAX_ALIASED_VOPS
729 && avg_vops > (long) AVG_ALIASED_VOPS);
733 /* Count the actual number of virtual operators in CFUN. Note that
734 this is only meaningful after virtual operands have been populated,
735 so it should be invoked at the end of compute_may_aliases.
737 The number of virtual operators are stored in *NUM_VDEFS_P and
738 *NUM_VUSES_P, the number of partitioned symbols in
739 *NUM_PARTITIONED_P and the number of unpartitioned symbols in
740 *NUM_UNPARTITIONED_P.
742 If any of these pointers is NULL the corresponding count is not
746 count_mem_refs (long *num_vuses_p, long *num_vdefs_p,
747 long *num_partitioned_p, long *num_unpartitioned_p)
749 gimple_stmt_iterator gsi;
751 long num_vdefs, num_vuses, num_partitioned, num_unpartitioned;
752 referenced_var_iterator rvi;
755 num_vuses = num_vdefs = num_partitioned = num_unpartitioned = 0;
757 if (num_vuses_p || num_vdefs_p)
759 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
761 gimple stmt = gsi_stmt (gsi);
762 if (gimple_references_memory_p (stmt))
764 num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
765 num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
769 if (num_partitioned_p || num_unpartitioned_p)
770 FOR_EACH_REFERENCED_VAR (sym, rvi)
772 if (is_gimple_reg (sym))
775 if (memory_partition (sym))
782 *num_vdefs_p = num_vdefs;
785 *num_vuses_p = num_vuses;
787 if (num_partitioned_p)
788 *num_partitioned_p = num_partitioned;
790 if (num_unpartitioned_p)
791 *num_unpartitioned_p = num_unpartitioned;
795 /* The list is sorted by increasing partitioning score (PSCORE).
796 This score is computed such that symbols with high scores are
797 those that are least likely to be partitioned. Given a symbol
798 MP->VAR, PSCORE(S) is the result of the following weighted sum
800 PSCORE(S) = FW * 64 + FR * 32
807 FW Execution frequency of writes to S
808 FR Execution frequency of reads from S
809 DW Number of direct writes to S
810 DR Number of direct reads from S
811 IW Number of indirect writes to S
812 IR Number of indirect reads from S
813 NO_ALIAS State of the NO_ALIAS* flags
815 The basic idea here is that symbols that are frequently
816 written-to in hot paths of the code are the last to be considered
820 mem_sym_score (mem_sym_stats_t mp)
822 return mp->frequency_writes * 64 + mp->frequency_reads * 32
823 + mp->num_direct_writes * 16 + mp->num_direct_reads * 8
824 + mp->num_indirect_writes * 4 + mp->num_indirect_reads * 2
825 + var_ann (mp->var)->noalias_state;
829 /* Dump memory reference stats for function CFUN to FILE. */
832 dump_mem_ref_stats (FILE *file)
834 long actual_num_vuses, actual_num_vdefs;
835 long num_partitioned, num_unpartitioned;
836 struct mem_ref_stats_d *stats;
838 stats = gimple_mem_ref_stats (cfun);
840 count_mem_refs (&actual_num_vuses, &actual_num_vdefs, &num_partitioned,
843 fprintf (file, "\nMemory reference statistics for %s\n\n",
844 lang_hooks.decl_printable_name (current_function_decl, 2));
846 fprintf (file, "Number of memory statements: %ld\n",
847 stats->num_mem_stmts);
848 fprintf (file, "Number of call sites: %ld\n",
849 stats->num_call_sites);
850 fprintf (file, "Number of pure/const call sites: %ld\n",
851 stats->num_pure_const_call_sites);
852 fprintf (file, "Number of asm sites: %ld\n",
853 stats->num_asm_sites);
854 fprintf (file, "Estimated number of loads: %ld (%ld/stmt)\n",
856 (stats->num_mem_stmts)
857 ? CEIL (stats->num_vuses, stats->num_mem_stmts)
859 fprintf (file, "Actual number of loads: %ld (%ld/stmt)\n",
861 (stats->num_mem_stmts)
862 ? CEIL (actual_num_vuses, stats->num_mem_stmts)
865 if (actual_num_vuses > stats->num_vuses + (stats->num_vuses / 25))
866 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
868 fprintf (file, "Estimated number of stores: %ld (%ld/stmt)\n",
870 (stats->num_mem_stmts)
871 ? CEIL (stats->num_vdefs, stats->num_mem_stmts)
873 fprintf (file, "Actual number of stores: %ld (%ld/stmt)\n",
875 (stats->num_mem_stmts)
876 ? CEIL (actual_num_vdefs, stats->num_mem_stmts)
879 if (actual_num_vdefs > stats->num_vdefs + (stats->num_vdefs / 25))
880 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
882 fprintf (file, "Partitioning thresholds: MAX = %d AVG = %d "
883 "(%sNEED TO PARTITION)\n", MAX_ALIASED_VOPS, AVG_ALIASED_VOPS,
884 stats->num_mem_stmts && need_to_partition_p (stats) ? "" : "NO ");
885 fprintf (file, "Number of partitioned symbols: %ld\n", num_partitioned);
886 fprintf (file, "Number of unpartitioned symbols: %ld\n", num_unpartitioned);
890 /* Dump memory reference stats for function FN to stderr. */
893 debug_mem_ref_stats (void)
895 dump_mem_ref_stats (stderr);
899 /* Dump memory reference stats for variable VAR to FILE. */
902 dump_mem_sym_stats (FILE *file, tree var)
904 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
909 fprintf (file, "read frequency: %6ld, write frequency: %6ld, "
910 "direct reads: %3ld, direct writes: %3ld, "
911 "indirect reads: %4ld, indirect writes: %4ld, symbol: ",
912 stats->frequency_reads, stats->frequency_writes,
913 stats->num_direct_reads, stats->num_direct_writes,
914 stats->num_indirect_reads, stats->num_indirect_writes);
915 print_generic_expr (file, stats->var, 0);
916 fprintf (file, ", tags: ");
917 dump_decl_set (file, stats->parent_tags);
921 /* Dump memory reference stats for variable VAR to stderr. */
924 debug_mem_sym_stats (tree var)
926 dump_mem_sym_stats (stderr, var);
929 /* Dump memory reference stats for variable VAR to FILE. For use
930 of tree-dfa.c:dump_variable. */
933 dump_mem_sym_stats_for_var (FILE *file, tree var)
935 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
940 fprintf (file, ", score: %ld", mem_sym_score (stats));
941 fprintf (file, ", direct reads: %ld", stats->num_direct_reads);
942 fprintf (file, ", direct writes: %ld", stats->num_direct_writes);
943 fprintf (file, ", indirect reads: %ld", stats->num_indirect_reads);
944 fprintf (file, ", indirect writes: %ld", stats->num_indirect_writes);
947 /* Dump memory reference stats for all memory symbols to FILE. */
950 dump_all_mem_sym_stats (FILE *file)
952 referenced_var_iterator rvi;
955 FOR_EACH_REFERENCED_VAR (sym, rvi)
957 if (is_gimple_reg (sym))
960 dump_mem_sym_stats (file, sym);
965 /* Dump memory reference stats for all memory symbols to stderr. */
968 debug_all_mem_sym_stats (void)
970 dump_all_mem_sym_stats (stderr);
974 /* Dump the MP_INFO array to FILE. */
977 dump_mp_info (FILE *file, VEC(mem_sym_stats_t,heap) *mp_info)
980 mem_sym_stats_t mp_p;
982 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
983 if (!mp_p->partitioned_p)
984 dump_mem_sym_stats (file, mp_p->var);
988 /* Dump the MP_INFO array to stderr. */
991 debug_mp_info (VEC(mem_sym_stats_t,heap) *mp_info)
993 dump_mp_info (stderr, mp_info);
997 /* Update memory reference stats for symbol VAR in statement STMT.
998 NUM_DIRECT_READS and NUM_DIRECT_WRITES specify the number of times
999 that VAR is read/written in STMT (indirect reads/writes are not
1000 recorded by this function, see compute_memory_partitions). */
1003 update_mem_sym_stats_from_stmt (tree var, gimple stmt, long num_direct_reads,
1004 long num_direct_writes)
1006 mem_sym_stats_t stats;
1008 gcc_assert (num_direct_reads >= 0 && num_direct_writes >= 0);
1010 stats = get_mem_sym_stats_for (var);
1012 stats->num_direct_reads += num_direct_reads;
1013 stats->frequency_reads += ((long) gimple_bb (stmt)->frequency
1014 * num_direct_reads);
1016 stats->num_direct_writes += num_direct_writes;
1017 stats->frequency_writes += ((long) gimple_bb (stmt)->frequency
1018 * num_direct_writes);
1022 /* Given two MP_INFO entries MP1 and MP2, return -1 if MP1->VAR should
1023 be partitioned before MP2->VAR, 0 if they are the same or 1 if
1024 MP1->VAR should be partitioned after MP2->VAR. */
1027 compare_mp_info_entries (mem_sym_stats_t mp1, mem_sym_stats_t mp2)
1029 long pscore1 = mem_sym_score (mp1);
1030 long pscore2 = mem_sym_score (mp2);
1032 if (pscore1 < pscore2)
1034 else if (pscore1 > pscore2)
1037 return DECL_UID (mp1->var) - DECL_UID (mp2->var);
1041 /* Comparison routine for qsort. The list is sorted by increasing
1042 partitioning score (PSCORE). This score is computed such that
1043 symbols with high scores are those that are least likely to be
1047 mp_info_cmp (const void *p, const void *q)
1049 mem_sym_stats_t e1 = *((const mem_sym_stats_t *) p);
1050 mem_sym_stats_t e2 = *((const mem_sym_stats_t *) q);
1051 return compare_mp_info_entries (e1, e2);
1055 /* Sort the array of reference counts used to compute memory partitions.
1056 Elements are sorted in ascending order of execution frequency and
1057 descending order of virtual operators needed. */
1060 sort_mp_info (VEC(mem_sym_stats_t,heap) *list)
1062 unsigned num = VEC_length (mem_sym_stats_t, list);
1069 if (compare_mp_info_entries (VEC_index (mem_sym_stats_t, list, 0),
1070 VEC_index (mem_sym_stats_t, list, 1)) > 0)
1072 /* Swap elements if they are in the wrong order. */
1073 mem_sym_stats_t tmp = VEC_index (mem_sym_stats_t, list, 0);
1074 VEC_replace (mem_sym_stats_t, list, 0,
1075 VEC_index (mem_sym_stats_t, list, 1));
1076 VEC_replace (mem_sym_stats_t, list, 1, tmp);
1082 /* There are 3 or more elements, call qsort. */
1083 qsort (VEC_address (mem_sym_stats_t, list),
1084 VEC_length (mem_sym_stats_t, list),
1085 sizeof (mem_sym_stats_t),
1090 /* Return the memory partition tag (MPT) associated with memory
1094 get_mpt_for (tree sym)
1098 /* Don't create a new tag unnecessarily. */
1099 mpt = memory_partition (sym);
1100 if (mpt == NULL_TREE)
1102 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
1103 TREE_ADDRESSABLE (mpt) = 0;
1104 add_referenced_var (mpt);
1105 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
1106 gcc_assert (MPT_SYMBOLS (mpt) == NULL);
1107 set_memory_partition (sym, mpt);
1114 /* Add MP_P->VAR to a memory partition and return the partition. */
1117 find_partition_for (mem_sym_stats_t mp_p)
1120 VEC(tree,heap) *mpt_table;
1123 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1126 /* Find an existing partition for MP_P->VAR. */
1127 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
1129 mem_sym_stats_t mpt_stats;
1131 /* If MPT does not have any symbols yet, use it. */
1132 if (MPT_SYMBOLS (mpt) == NULL)
1135 /* Otherwise, see if MPT has common parent tags with MP_P->VAR,
1136 but avoid grouping clobbered variables with non-clobbered
1137 variables (otherwise, this tends to creates a single memory
1138 partition because other call-clobbered variables may have
1139 common parent tags with non-clobbered ones). */
1140 mpt_stats = get_mem_sym_stats_for (mpt);
1141 if (mp_p->parent_tags
1142 && mpt_stats->parent_tags
1143 && is_call_clobbered (mpt) == is_call_clobbered (mp_p->var)
1144 && bitmap_intersect_p (mpt_stats->parent_tags, mp_p->parent_tags))
1147 /* If no common parent tags are found, see if both MPT and
1148 MP_P->VAR are call-clobbered. */
1149 if (is_call_clobbered (mpt) && is_call_clobbered (mp_p->var))
1153 if (mpt == NULL_TREE)
1154 mpt = get_mpt_for (mp_p->var);
1156 set_memory_partition (mp_p->var, mpt);
1158 mp_p->partitioned_p = true;
1160 mark_sym_for_renaming (mp_p->var);
1161 mark_sym_for_renaming (mpt);
1167 /* Rewrite the alias set for TAG to use the newly created partitions.
1168 If TAG is NULL, rewrite the set of call-clobbered variables.
1169 NEW_ALIASES is a scratch bitmap to build the new set of aliases for
1173 rewrite_alias_set_for (tree tag, bitmap new_aliases)
1179 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, i, bi)
1181 sym = referenced_var (i);
1182 mpt = memory_partition (sym);
1184 bitmap_set_bit (new_aliases, DECL_UID (mpt));
1186 bitmap_set_bit (new_aliases, DECL_UID (sym));
1189 /* Rebuild the may-alias array for TAG. */
1190 bitmap_copy (MTAG_ALIASES (tag), new_aliases);
1194 /* Determine how many virtual operands can be saved by partitioning
1195 MP_P->VAR into MPT. When a symbol S is thrown inside a partition
1196 P, every virtual operand that used to reference S will now
1197 reference P. Whether it reduces the number of virtual operands
1200 1- Direct references to S are never saved. Instead of the virtual
1201 operand to S, we will now have a virtual operand to P.
1203 2- Indirect references to S are reduced only for those memory tags
1204 holding S that already had other symbols partitioned into P.
1205 For instance, if a memory tag T has the alias set { a b S c },
1206 the first time we partition S into P, the alias set will become
1207 { a b P c }, so no virtual operands will be saved. However, if
1208 we now partition symbol 'c' into P, then the alias set for T
1209 will become { a b P }, so we will be saving one virtual operand
1210 for every indirect reference to 'c'.
1212 3- Is S is call-clobbered, we save as many virtual operands as
1213 call/asm sites exist in the code, but only if other
1214 call-clobbered symbols have been grouped into P. The first
1215 call-clobbered symbol that we group does not produce any
1218 MEM_REF_STATS points to CFUN's memory reference information. */
1221 estimate_vop_reduction (struct mem_ref_stats_d *mem_ref_stats,
1222 mem_sym_stats_t mp_p, tree mpt)
1226 mem_sym_stats_t mpt_stats;
1228 /* We should only get symbols with indirect references here. */
1229 gcc_assert (mp_p->num_indirect_reads > 0 || mp_p->num_indirect_writes > 0);
1231 /* Note that the only statistics we keep for MPT is the set of
1232 parent tags to know which memory tags have had alias members
1233 partitioned, and the indicator has_call_clobbered_vars.
1234 Reference counts are not important for MPT. */
1235 mpt_stats = get_mem_sym_stats_for (mpt);
1237 /* Traverse all the parent tags for MP_P->VAR. For every tag T, if
1238 partition P is already grouping aliases of T, then reduce the
1239 number of virtual operands by the number of direct references
1241 if (mp_p->parent_tags)
1243 if (mpt_stats->parent_tags == NULL)
1244 mpt_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1246 EXECUTE_IF_SET_IN_BITMAP (mp_p->parent_tags, 0, i, bi)
1248 if (bitmap_bit_p (mpt_stats->parent_tags, i))
1250 /* Partition MPT is already partitioning symbols in the
1251 alias set for TAG. This means that we are now saving
1252 1 virtual operand for every direct reference to TAG. */
1253 tree tag = referenced_var (i);
1254 mem_sym_stats_t tag_stats = mem_sym_stats (cfun, tag);
1255 mem_ref_stats->num_vuses -= tag_stats->num_direct_reads;
1256 mem_ref_stats->num_vdefs -= tag_stats->num_direct_writes;
1260 /* This is the first symbol in tag I's alias set that is
1261 being grouped under MPT. We will not save any
1262 virtual operands this time, but record that MPT is
1263 grouping a symbol from TAG's alias set so that the
1264 next time we get the savings. */
1265 bitmap_set_bit (mpt_stats->parent_tags, i);
1270 /* If MP_P->VAR is call-clobbered, and MPT is already grouping
1271 call-clobbered symbols, then we will save as many virtual
1272 operands as asm/call sites there are. */
1273 if (is_call_clobbered (mp_p->var))
1275 if (mpt_stats->has_call_clobbered_vars)
1276 mem_ref_stats->num_vdefs -= mem_ref_stats->num_call_sites
1277 + mem_ref_stats->num_asm_sites;
1279 mpt_stats->has_call_clobbered_vars = true;
1284 /* Helper for compute_memory_partitions. Transfer reference counts
1285 from pointers to their pointed-to sets. Counters for pointers were
1286 computed by update_alias_info. MEM_REF_STATS points to CFUN's
1287 memory reference information. */
1290 update_reference_counts (struct mem_ref_stats_d *mem_ref_stats)
1294 mem_sym_stats_t sym_stats;
1296 for (i = 1; i < num_ssa_names; i++)
1299 struct ptr_info_def *pi;
1303 && POINTER_TYPE_P (TREE_TYPE (ptr))
1304 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1305 && pi->memory_tag_needed)
1310 mem_sym_stats_t ptr_stats, tag_stats;
1312 /* If PTR has flow-sensitive points-to information, use
1313 PTR's name tag, otherwise use the symbol tag associated
1314 with PTR's symbol. */
1315 if (pi->name_mem_tag)
1316 tag = pi->name_mem_tag;
1318 tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
1320 ptr_stats = get_mem_sym_stats_for (ptr);
1321 tag_stats = get_mem_sym_stats_for (tag);
1323 /* TAG has as many direct references as dereferences we
1324 found for its parent pointer. */
1325 tag_stats->num_direct_reads += ptr_stats->num_direct_reads;
1326 tag_stats->num_direct_writes += ptr_stats->num_direct_writes;
1328 /* All the dereferences of pointer PTR are considered direct
1329 references to PTR's memory tag (TAG). In turn,
1330 references to TAG will become virtual operands for every
1331 symbol in TAG's alias set. So, for every symbol ALIAS in
1332 TAG's alias set, add as many indirect references to ALIAS
1333 as direct references there are for TAG. */
1334 if (MTAG_ALIASES (tag))
1335 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, j, bj)
1337 tree alias = referenced_var (j);
1338 sym_stats = get_mem_sym_stats_for (alias);
1340 /* All the direct references to TAG are indirect references
1342 sym_stats->num_indirect_reads += ptr_stats->num_direct_reads;
1343 sym_stats->num_indirect_writes += ptr_stats->num_direct_writes;
1344 sym_stats->frequency_reads += ptr_stats->frequency_reads;
1345 sym_stats->frequency_writes += ptr_stats->frequency_writes;
1347 /* Indicate that TAG is one of ALIAS's parent tags. */
1348 if (sym_stats->parent_tags == NULL)
1349 sym_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1350 bitmap_set_bit (sym_stats->parent_tags, DECL_UID (tag));
1355 /* Call-clobbered symbols are indirectly written at every
1357 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1359 tree sym = referenced_var (i);
1360 sym_stats = get_mem_sym_stats_for (sym);
1361 sym_stats->num_indirect_writes += mem_ref_stats->num_call_sites
1362 + mem_ref_stats->num_asm_sites;
1365 /* Addressable symbols are indirectly written at some ASM sites.
1366 Since only ASM sites that clobber memory actually affect
1367 addressable symbols, this is an over-estimation. */
1368 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1370 tree sym = referenced_var (i);
1371 sym_stats = get_mem_sym_stats_for (sym);
1372 sym_stats->num_indirect_writes += mem_ref_stats->num_asm_sites;
1377 /* Helper for compute_memory_partitions. Add all memory symbols to
1378 *MP_INFO_P and compute the initial estimate for the total number of
1379 virtual operands needed. MEM_REF_STATS points to CFUN's memory
1380 reference information. On exit, *TAGS_P will contain the list of
1381 memory tags whose alias set need to be rewritten after
1385 build_mp_info (struct mem_ref_stats_d *mem_ref_stats,
1386 VEC(mem_sym_stats_t,heap) **mp_info_p,
1387 VEC(tree,heap) **tags_p)
1390 referenced_var_iterator rvi;
1392 FOR_EACH_REFERENCED_VAR (var, rvi)
1394 mem_sym_stats_t sym_stats;
1397 /* We are only interested in memory symbols other than MPTs. */
1398 if (is_gimple_reg (var) || TREE_CODE (var) == MEMORY_PARTITION_TAG)
1401 /* Collect memory tags into the TAGS array so that we can
1402 rewrite their alias sets after partitioning. */
1403 if (MTAG_P (var) && MTAG_ALIASES (var))
1404 VEC_safe_push (tree, heap, *tags_p, var);
1406 /* Since we are going to re-compute partitions, any symbols that
1407 used to belong to a partition must be detached from it and
1408 marked for renaming. */
1409 if ((old_mpt = memory_partition (var)) != NULL)
1411 mark_sym_for_renaming (old_mpt);
1412 set_memory_partition (var, NULL_TREE);
1413 mark_sym_for_renaming (var);
1416 sym_stats = get_mem_sym_stats_for (var);
1418 /* Add VAR's reference info to MP_INFO. Note that the only
1419 symbols that make sense to partition are those that have
1420 indirect references. If a symbol S is always directly
1421 referenced, partitioning it will not reduce the number of
1422 virtual operators. The only symbols that are profitable to
1423 partition are those that belong to alias sets and/or are
1425 if (sym_stats->num_indirect_reads > 0
1426 || sym_stats->num_indirect_writes > 0)
1427 VEC_safe_push (mem_sym_stats_t, heap, *mp_info_p, sym_stats);
1429 /* Update the number of estimated VOPS. Note that direct
1430 references to memory tags are always counted as indirect
1431 references to their alias set members, so if a memory tag has
1432 aliases, do not count its direct references to avoid double
1434 if (!MTAG_P (var) || !MTAG_ALIASES (var))
1436 mem_ref_stats->num_vuses += sym_stats->num_direct_reads;
1437 mem_ref_stats->num_vdefs += sym_stats->num_direct_writes;
1440 mem_ref_stats->num_vuses += sym_stats->num_indirect_reads;
1441 mem_ref_stats->num_vdefs += sym_stats->num_indirect_writes;
1446 /* Compute memory partitions. A memory partition (MPT) is an
1447 arbitrary grouping of memory symbols, such that references to one
1448 member of the group is considered a reference to all the members of
1451 As opposed to alias sets in memory tags, the grouping into
1452 partitions is completely arbitrary and only done to reduce the
1453 number of virtual operands. The only rule that needs to be
1454 observed when creating memory partitions is that given two memory
1455 partitions MPT.i and MPT.j, they must not contain symbols in
1458 Memory partitions are used when putting the program into Memory-SSA
1459 form. In particular, in Memory-SSA PHI nodes are not computed for
1460 individual memory symbols. They are computed for memory
1461 partitions. This reduces the amount of PHI nodes in the SSA graph
1462 at the expense of precision (i.e., it makes unrelated stores affect
1465 However, it is possible to increase precision by changing this
1466 partitioning scheme. For instance, if the partitioning scheme is
1467 such that get_mpt_for is the identity function (that is,
1468 get_mpt_for (s) = s), this will result in ultimate precision at the
1469 expense of huge SSA webs.
1471 At the other extreme, a partitioning scheme that groups all the
1472 symbols in the same set results in minimal SSA webs and almost
1473 total loss of precision.
1475 There partitioning heuristic uses three parameters to decide the
1476 order in which symbols are processed. The list of symbols is
1477 sorted so that symbols that are more likely to be partitioned are
1478 near the top of the list:
1480 - Execution frequency. If a memory references is in a frequently
1481 executed code path, grouping it into a partition may block useful
1482 transformations and cause sub-optimal code generation. So, the
1483 partition heuristic tries to avoid grouping symbols with high
1484 execution frequency scores. Execution frequency is taken
1485 directly from the basic blocks where every reference is made (see
1486 update_mem_sym_stats_from_stmt), which in turn uses the
1487 profile guided machinery, so if the program is compiled with PGO
1488 enabled, more accurate partitioning decisions will be made.
1490 - Number of references. Symbols with few references in the code,
1491 are partitioned before symbols with many references.
1493 - NO_ALIAS attributes. Symbols with any of the NO_ALIAS*
1494 attributes are partitioned after symbols marked MAY_ALIAS.
1496 Once the list is sorted, the partitioning proceeds as follows:
1498 1- For every symbol S in MP_INFO, create a new memory partition MP,
1499 if necessary. To avoid memory partitions that contain symbols
1500 from non-conflicting alias sets, memory partitions are
1501 associated to the memory tag that holds S in its alias set. So,
1502 when looking for a memory partition for S, the memory partition
1503 associated with one of the memory tags holding S is chosen. If
1504 none exists, a new one is created.
1506 2- Add S to memory partition MP.
1508 3- Reduce by 1 the number of VOPS for every memory tag holding S.
1510 4- If the total number of VOPS is less than MAX_ALIASED_VOPS or the
1511 average number of VOPS per statement is less than
1512 AVG_ALIASED_VOPS, stop. Otherwise, go to the next symbol in the
1516 compute_memory_partitions (void)
1520 mem_sym_stats_t mp_p;
1521 VEC(mem_sym_stats_t,heap) *mp_info;
1523 VEC(tree,heap) *tags;
1524 struct mem_ref_stats_d *mem_ref_stats;
1525 int prev_max_aliased_vops;
1527 mem_ref_stats = gimple_mem_ref_stats (cfun);
1528 gcc_assert (mem_ref_stats->num_vuses == 0 && mem_ref_stats->num_vdefs == 0);
1530 if (mem_ref_stats->num_mem_stmts == 0)
1533 timevar_push (TV_MEMORY_PARTITIONING);
1537 prev_max_aliased_vops = MAX_ALIASED_VOPS;
1539 /* Since we clearly cannot lower the number of virtual operators
1540 below the total number of memory statements in the function, we
1541 may need to adjust MAX_ALIASED_VOPS beforehand. */
1542 if (MAX_ALIASED_VOPS < mem_ref_stats->num_mem_stmts)
1543 MAX_ALIASED_VOPS = mem_ref_stats->num_mem_stmts;
1545 /* Update reference stats for all the pointed-to variables and
1547 update_reference_counts (mem_ref_stats);
1549 /* Add all the memory symbols to MP_INFO. */
1550 build_mp_info (mem_ref_stats, &mp_info, &tags);
1552 /* No partitions required if we are below the threshold. */
1553 if (!need_to_partition_p (mem_ref_stats))
1556 fprintf (dump_file, "\nMemory partitioning NOT NEEDED for %s\n",
1557 get_name (current_function_decl));
1561 /* Sort the MP_INFO array so that symbols that should be partitioned
1562 first are near the top of the list. */
1563 sort_mp_info (mp_info);
1567 fprintf (dump_file, "\nMemory partitioning NEEDED for %s\n\n",
1568 get_name (current_function_decl));
1569 fprintf (dump_file, "Memory symbol references before partitioning:\n");
1570 dump_mp_info (dump_file, mp_info);
1573 /* Create partitions for variables in MP_INFO until we have enough
1574 to lower the total number of VOPS below MAX_ALIASED_VOPS or if
1575 the average number of VOPS per statement is below
1576 AVG_ALIASED_VOPS. */
1577 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
1581 /* If we are below the threshold, stop. */
1582 if (!need_to_partition_p (mem_ref_stats))
1585 mpt = find_partition_for (mp_p);
1586 estimate_vop_reduction (mem_ref_stats, mp_p, mpt);
1589 /* After partitions have been created, rewrite alias sets to use
1590 them instead of the original symbols. This way, if the alias set
1591 was computed as { a b c d e f }, and the subset { b e f } was
1592 grouped into partition MPT.3, then the new alias set for the tag
1593 will be { a c d MPT.3 }.
1595 Note that this is not strictly necessary. The operand scanner
1596 will always check if a symbol belongs to a partition when adding
1597 virtual operands. However, by reducing the size of the alias
1598 sets to be scanned, the work needed inside the operand scanner is
1599 significantly reduced. */
1600 new_aliases = BITMAP_ALLOC (&alias_bitmap_obstack);
1602 for (i = 0; VEC_iterate (tree, tags, i, tag); i++)
1604 rewrite_alias_set_for (tag, new_aliases);
1605 bitmap_clear (new_aliases);
1608 BITMAP_FREE (new_aliases);
1612 fprintf (dump_file, "\nMemory symbol references after partitioning:\n");
1613 dump_mp_info (dump_file, mp_info);
1617 /* Free allocated memory. */
1618 VEC_free (mem_sym_stats_t, heap, mp_info);
1619 VEC_free (tree, heap, tags);
1621 MAX_ALIASED_VOPS = prev_max_aliased_vops;
1623 timevar_pop (TV_MEMORY_PARTITIONING);
1626 /* Compute may-alias information for every variable referenced in function
1629 Alias analysis proceeds in 3 main phases:
1631 1- Points-to and escape analysis.
1633 This phase walks the use-def chains in the SSA web looking for three
1636 * Assignments of the form P_i = &VAR
1637 * Assignments of the form P_i = malloc()
1638 * Pointers and ADDR_EXPR that escape the current function.
1640 The concept of 'escaping' is the same one used in the Java world. When
1641 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
1642 outside of the current function. So, assignment to global variables,
1643 function arguments and returning a pointer are all escape sites, as are
1644 conversions between pointers and integers.
1646 This is where we are currently limited. Since not everything is renamed
1647 into SSA, we lose track of escape properties when a pointer is stashed
1648 inside a field in a structure, for instance. In those cases, we are
1649 assuming that the pointer does escape.
1651 We use escape analysis to determine whether a variable is
1652 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
1653 is call-clobbered. If a pointer P_i escapes, then all the variables
1654 pointed-to by P_i (and its memory tag) also escape.
1656 2- Compute flow-sensitive aliases
1658 We have two classes of memory tags. Memory tags associated with the
1659 pointed-to data type of the pointers in the program. These tags are
1660 called "symbol memory tag" (SMT). The other class are those associated
1661 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
1662 when adding operands for an INDIRECT_REF *P_i, we will first check
1663 whether P_i has a name tag, if it does we use it, because that will have
1664 more precise aliasing information. Otherwise, we use the standard symbol
1667 In this phase, we go through all the pointers we found in points-to
1668 analysis and create alias sets for the name memory tags associated with
1669 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
1670 it points to and its tag.
1673 3- Compute flow-insensitive aliases
1675 This pass will compare the alias set of every symbol memory tag and
1676 every addressable variable found in the program. Given a symbol
1677 memory tag SMT and an addressable variable V. If the alias sets of
1678 SMT and V conflict (as computed by may_alias_p), then V is marked
1679 as an alias tag and added to the alias set of SMT.
1681 For instance, consider the following function:
1697 After aliasing analysis has finished, the symbol memory tag for pointer
1698 'p' will have two aliases, namely variables 'a' and 'b'. Every time
1699 pointer 'p' is dereferenced, we want to mark the operation as a
1700 potential reference to 'a' and 'b'.
1710 # p_1 = PHI <p_4(1), p_6(2)>;
1725 In certain cases, the list of may aliases for a pointer may grow too
1726 large. This may cause an explosion in the number of virtual operands
1727 inserted in the code. Resulting in increased memory consumption and
1730 When the number of virtual operands needed to represent aliased
1731 loads and stores grows too large (configurable with option --param
1732 max-aliased-vops and --param avg-aliased-vops), alias sets are
1733 grouped to avoid severe compile-time slow downs and memory
1734 consumption. See compute_memory_partitions. */
1737 compute_may_aliases (void)
1739 struct alias_info *ai;
1741 timevar_push (TV_TREE_MAY_ALIAS);
1743 memset (&alias_stats, 0, sizeof (alias_stats));
1745 /* Initialize aliasing information. */
1746 ai = init_alias_info ();
1748 /* For each pointer P_i, determine the sets of variables that P_i may
1749 point-to. For every addressable variable V, determine whether the
1750 address of V escapes the current function, making V call-clobbered
1751 (i.e., whether &V is stored in a global variable or if its passed as a
1752 function call argument). */
1753 compute_points_to_sets ();
1755 /* Update various related attributes like escaped addresses,
1756 pointer dereferences for loads and stores. This is used
1757 when creating name tags and alias sets. */
1758 update_alias_info (ai);
1760 /* Collect all pointers and addressable variables, compute alias sets,
1761 create memory tags for pointers and promote variables whose address is
1762 not needed anymore. */
1763 setup_pointers_and_addressables (ai);
1765 /* Compute type-based flow-insensitive aliasing for all the type
1767 compute_flow_insensitive_aliasing (ai);
1769 /* Compute flow-sensitive, points-to based aliasing for all the name
1771 compute_flow_sensitive_aliasing (ai);
1773 /* Compute call clobbering information. */
1774 compute_call_clobbered (ai);
1776 /* If the program makes no reference to global variables, but it
1777 contains a mixture of pure and non-pure functions, then we need
1778 to create use-def and def-def links between these functions to
1779 avoid invalid transformations on them. */
1780 maybe_create_global_var ();
1782 /* Compute memory partitions for every memory variable. */
1783 compute_memory_partitions ();
1785 /* Remove partitions with no symbols. Partitions may end up with an
1786 empty MPT_SYMBOLS set if a previous round of alias analysis
1787 needed to partition more symbols. Since we don't need those
1788 partitions anymore, remove them to free up the space. */
1792 VEC(tree,heap) *mpt_table;
1794 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1796 while (i < VEC_length (tree, mpt_table))
1798 mpt = VEC_index (tree, mpt_table, i);
1799 if (MPT_SYMBOLS (mpt) == NULL)
1800 VEC_unordered_remove (tree, mpt_table, i);
1806 /* Populate all virtual operands and newly promoted register operands. */
1808 gimple_stmt_iterator gsi;
1811 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1812 update_stmt_if_modified (gsi_stmt (gsi));
1815 /* Debugging dumps. */
1818 dump_mem_ref_stats (dump_file);
1819 dump_alias_info (dump_file);
1820 dump_points_to_info (dump_file);
1822 if (dump_flags & TDF_STATS)
1823 dump_alias_stats (dump_file);
1825 if (dump_flags & TDF_DETAILS)
1826 dump_referenced_vars (dump_file);
1829 /* Deallocate memory used by aliasing data structures. */
1830 delete_alias_info (ai);
1832 if (need_ssa_update_p ())
1833 update_ssa (TODO_update_ssa);
1835 timevar_pop (TV_TREE_MAY_ALIAS);
1840 /* Data structure used to count the number of dereferences to PTR
1841 inside an expression. */
1845 unsigned num_stores;
1850 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
1851 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
1854 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
1856 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1857 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
1859 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
1860 pointer 'ptr' is *not* dereferenced, it is simply used to compute
1861 the address of 'fld' as 'ptr + offsetof(fld)'. */
1862 if (TREE_CODE (*tp) == ADDR_EXPR)
1868 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
1871 count_p->num_stores++;
1873 count_p->num_loads++;
1880 /* Count the number of direct and indirect uses for pointer PTR in
1881 statement STMT. The number of direct uses is stored in
1882 *NUM_USES_P. Indirect references are counted separately depending
1883 on whether they are store or load operations. The counts are
1884 stored in *NUM_STORES_P and *NUM_LOADS_P. */
1887 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
1888 unsigned *num_loads_p, unsigned *num_stores_p)
1897 /* Find out the total number of uses of PTR in STMT. */
1898 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
1902 /* Now count the number of indirect references to PTR. This is
1903 truly awful, but we don't have much choice. There are no parent
1904 pointers inside INDIRECT_REFs, so an expression like
1905 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
1906 find all the indirect and direct uses of x_1 inside. The only
1907 shortcut we can take is the fact that GIMPLE only allows
1908 INDIRECT_REFs inside the expressions below. */
1909 if (is_gimple_assign (stmt)
1910 || gimple_code (stmt) == GIMPLE_RETURN
1911 || gimple_code (stmt) == GIMPLE_ASM
1912 || is_gimple_call (stmt))
1914 struct walk_stmt_info wi;
1915 struct count_ptr_d count;
1918 count.num_stores = 0;
1919 count.num_loads = 0;
1921 memset (&wi, 0, sizeof (wi));
1923 walk_gimple_op (stmt, count_ptr_derefs, &wi);
1925 *num_stores_p = count.num_stores;
1926 *num_loads_p = count.num_loads;
1929 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
1932 /* Remove memory references stats for function FN. */
1935 delete_mem_ref_stats (struct function *fn)
1937 if (gimple_mem_ref_stats (fn)->mem_sym_stats)
1939 free_alloc_pool (mem_sym_stats_pool);
1940 pointer_map_destroy (gimple_mem_ref_stats (fn)->mem_sym_stats);
1942 gimple_mem_ref_stats (fn)->mem_sym_stats = NULL;
1946 /* Initialize memory reference stats. */
1949 init_mem_ref_stats (void)
1951 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
1953 mem_sym_stats_pool = create_alloc_pool ("Mem sym stats",
1954 sizeof (struct mem_sym_stats_d),
1956 memset (mem_ref_stats, 0, sizeof (struct mem_ref_stats_d));
1957 mem_ref_stats->mem_sym_stats = pointer_map_create ();
1961 /* Helper for init_alias_info. Reset existing aliasing information. */
1964 reset_alias_info (void)
1966 referenced_var_iterator rvi;
1969 bitmap active_nmts, all_nmts;
1971 /* Clear the set of addressable variables. We do not need to clear
1972 the TREE_ADDRESSABLE bit on every symbol because we are going to
1973 re-compute addressability here. */
1974 bitmap_clear (gimple_addressable_vars (cfun));
1976 active_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
1977 all_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
1979 /* Clear flow-insensitive alias information from each symbol. */
1980 FOR_EACH_REFERENCED_VAR (var, rvi)
1982 if (is_gimple_reg (var))
1986 MTAG_ALIASES (var) = NULL;
1988 /* Memory partition information will be computed from scratch. */
1989 if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
1990 MPT_SYMBOLS (var) = NULL;
1992 /* Collect all the name tags to determine if we have any
1993 orphaned that need to be removed from the IL. A name tag
1994 will be orphaned if it is not associated with any active SSA
1996 if (TREE_CODE (var) == NAME_MEMORY_TAG)
1997 bitmap_set_bit (all_nmts, DECL_UID (var));
1999 /* Since we are about to re-discover call-clobbered
2000 variables, clear the call-clobbered flag. */
2001 clear_call_clobbered (var);
2004 /* There should be no call-clobbered variable left. */
2005 gcc_assert (bitmap_empty_p (gimple_call_clobbered_vars (cfun)));
2007 /* Clear the call-used variables. */
2008 bitmap_clear (gimple_call_used_vars (cfun));
2010 /* Clear flow-sensitive points-to information from each SSA name. */
2011 for (i = 1; i < num_ssa_names; i++)
2013 tree name = ssa_name (i);
2015 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
2018 if (SSA_NAME_PTR_INFO (name))
2020 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
2022 /* Clear all the flags but keep the name tag to
2023 avoid creating new temporaries unnecessarily. If
2024 this pointer is found to point to a subset or
2025 superset of its former points-to set, then a new
2026 tag will need to be created in create_name_tags. */
2027 pi->pt_anything = 0;
2029 pi->value_escapes_p = 0;
2030 pi->memory_tag_needed = 0;
2031 pi->is_dereferenced = 0;
2033 bitmap_clear (pi->pt_vars);
2035 /* Add NAME's name tag to the set of active tags. */
2036 if (pi->name_mem_tag)
2037 bitmap_set_bit (active_nmts, DECL_UID (pi->name_mem_tag));
2041 /* Name memory tags that are no longer associated with an SSA name
2042 are considered stale and should be removed from the IL. All the
2043 name tags that are in the set ALL_NMTS but not in ACTIVE_NMTS are
2044 considered stale and marked for renaming. */
2045 bitmap_and_compl_into (all_nmts, active_nmts);
2046 mark_set_for_renaming (all_nmts);
2048 BITMAP_FREE (all_nmts);
2049 BITMAP_FREE (active_nmts);
2053 /* Initialize the data structures used for alias analysis. */
2055 static struct alias_info *
2056 init_alias_info (void)
2058 struct alias_info *ai;
2059 referenced_var_iterator rvi;
2061 static bool alias_bitmap_obstack_initialized;
2063 ai = XCNEW (struct alias_info);
2064 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
2065 sbitmap_zero (ai->ssa_names_visited);
2066 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
2067 ai->dereferenced_ptrs = pointer_set_create ();
2069 /* Clear out all memory reference stats. */
2070 init_mem_ref_stats ();
2072 /* If aliases have been computed before, clear existing information. */
2073 if (gimple_aliases_computed_p (cfun))
2074 reset_alias_info ();
2077 /* If this is the first time we compute aliasing information,
2078 every non-register symbol will need to be put into SSA form
2079 (the initial SSA form only operates on GIMPLE registers). */
2080 FOR_EACH_REFERENCED_VAR (var, rvi)
2081 if (!is_gimple_reg (var))
2082 mark_sym_for_renaming (var);
2085 /* Next time, we will need to reset alias information. */
2086 cfun->gimple_df->aliases_computed_p = true;
2087 if (alias_bitmap_obstack_initialized)
2088 bitmap_obstack_release (&alias_bitmap_obstack);
2089 bitmap_obstack_initialize (&alias_bitmap_obstack);
2090 alias_bitmap_obstack_initialized = true;
2096 /* Deallocate memory used by alias analysis. */
2099 delete_alias_info (struct alias_info *ai)
2103 sbitmap_free (ai->ssa_names_visited);
2105 VEC_free (tree, heap, ai->processed_ptrs);
2107 for (i = 0; i < ai->num_addressable_vars; i++)
2108 free (ai->addressable_vars[i]);
2109 free (ai->addressable_vars);
2111 for (i = 0; i < ai->num_pointers; i++)
2112 free (ai->pointers[i]);
2113 free (ai->pointers);
2115 pointer_set_destroy (ai->dereferenced_ptrs);
2118 delete_mem_ref_stats (cfun);
2119 delete_points_to_sets ();
2123 /* Used for hashing to identify pointer infos with identical
2127 eq_ptr_info (const void *p1, const void *p2)
2129 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
2130 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
2131 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
2135 ptr_info_hash (const void *p)
2137 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
2138 return bitmap_hash (n->pt_vars);
2142 /* Create name tags for all the pointers that have been dereferenced.
2143 We only create a name tag for a pointer P if P is found to point to
2144 a set of variables (so that we can alias them to *P) or if it is
2145 the result of a call to malloc (which means that P cannot point to
2146 anything else nor alias any other variable).
2148 If two pointers P and Q point to the same set of variables, they
2149 are assigned the same name tag. */
2152 create_name_tags (void)
2155 VEC (tree, heap) *with_ptvars = NULL;
2159 /* Collect the list of pointers with a non-empty points to set. */
2160 for (i = 1; i < num_ssa_names; i++)
2162 tree ptr = ssa_name (i);
2163 struct ptr_info_def *pi;
2166 || !POINTER_TYPE_P (TREE_TYPE (ptr))
2167 || !SSA_NAME_PTR_INFO (ptr))
2170 pi = SSA_NAME_PTR_INFO (ptr);
2172 if (pi->pt_anything || !pi->memory_tag_needed)
2174 /* No name tags for pointers that have not been
2175 dereferenced or point to an arbitrary location. */
2176 pi->name_mem_tag = NULL_TREE;
2180 /* Set pt_anything on the pointers without pt_vars filled in so
2181 that they are assigned a symbol tag. */
2182 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
2183 VEC_safe_push (tree, heap, with_ptvars, ptr);
2185 set_pt_anything (ptr);
2188 /* If we didn't find any pointers with pt_vars set, we're done. */
2192 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
2194 /* Now go through the pointers with pt_vars, and find a name tag
2195 with the same pt_vars as this pointer, or create one if one
2197 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
2199 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2200 tree old_name_tag = pi->name_mem_tag;
2201 struct ptr_info_def **slot;
2203 /* If PTR points to a set of variables, check if we don't
2204 have another pointer Q with the same points-to set before
2205 creating a tag. If so, use Q's tag instead of creating a
2208 This is important for not creating unnecessary symbols
2209 and also for copy propagation. If we ever need to
2210 propagate PTR into Q or vice-versa, we would run into
2211 problems if they both had different name tags because
2212 they would have different SSA version numbers (which
2213 would force us to take the name tags in and out of SSA). */
2214 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
2216 pi->name_mem_tag = (*slot)->name_mem_tag;
2221 /* If we didn't find a pointer with the same points-to set
2222 as PTR, create a new name tag if needed. */
2223 if (pi->name_mem_tag == NULL_TREE)
2224 pi->name_mem_tag = get_nmt_for (ptr);
2227 /* If the new name tag computed for PTR is different than
2228 the old name tag that it used to have, then the old tag
2229 needs to be removed from the IL, so we mark it for
2231 if (old_name_tag && old_name_tag != pi->name_mem_tag)
2232 mark_sym_for_renaming (old_name_tag);
2234 /* Inherit volatility from the pointed-to type. */
2235 TREE_THIS_VOLATILE (pi->name_mem_tag)
2236 |= TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
2238 /* Mark the new name tag for renaming. */
2239 mark_sym_for_renaming (pi->name_mem_tag);
2242 htab_delete (ptr_hash);
2244 VEC_free (tree, heap, with_ptvars);
2248 /* Union the alias set SET into the may-aliases for TAG. */
2251 union_alias_set_into (tree tag, bitmap set)
2253 bitmap ma = MTAG_ALIASES (tag);
2255 if (bitmap_empty_p (set))
2259 ma = MTAG_ALIASES (tag) = BITMAP_ALLOC (&alias_bitmap_obstack);
2260 bitmap_ior_into (ma, set);
2264 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
2265 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
2266 name tag and the variables it points-to are call-clobbered. Finally, if
2267 P_i escapes and we could not determine where it points to, then all the
2268 variables in the same alias set as *P_i are marked call-clobbered. This
2269 is necessary because we must assume that P_i may take the address of any
2270 variable in the same alias set. */
2273 compute_flow_sensitive_aliasing (struct alias_info *ai)
2278 timevar_push (TV_FLOW_SENSITIVE);
2280 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2282 if (!find_what_p_points_to (ptr))
2283 set_pt_anything (ptr);
2286 create_name_tags ();
2288 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2290 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2292 /* Set up aliasing information for PTR's name memory tag (if it has
2293 one). Note that only pointers that have been dereferenced will
2294 have a name memory tag. */
2295 if (pi->name_mem_tag && pi->pt_vars)
2297 if (!bitmap_empty_p (pi->pt_vars))
2298 union_alias_set_into (pi->name_mem_tag, pi->pt_vars);
2301 timevar_pop (TV_FLOW_SENSITIVE);
2305 /* Return TRUE if at least one symbol in TAG2's alias set is also
2306 present in TAG1's alias set. */
2309 have_common_aliases_p (bitmap tag1aliases, bitmap tag2aliases)
2312 /* This is the old behavior of have_common_aliases_p, which is to
2313 return false if both sets are empty, or one set is and the other
2315 if (tag1aliases == NULL || tag2aliases == NULL)
2318 return bitmap_intersect_p (tag1aliases, tag2aliases);
2321 /* Compute type-based alias sets. Traverse all the pointers and
2322 addressable variables found in setup_pointers_and_addressables.
2324 For every pointer P in AI->POINTERS and addressable variable V in
2325 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
2326 memory tag (SMT) if their alias sets conflict. V is then marked as
2327 an aliased symbol so that the operand scanner knows that statements
2328 containing V have aliased operands. */
2331 compute_flow_insensitive_aliasing (struct alias_info *ai)
2333 referenced_var_iterator rvi;
2337 timevar_push (TV_FLOW_INSENSITIVE);
2339 /* Since this analysis is based exclusively on symbols, it fails to
2340 handle cases where two pointers P and Q have different memory
2341 tags with conflicting alias set numbers but no aliased symbols in
2344 For example, suppose that we have two memory tags SMT.1 and SMT.2
2347 may-aliases (SMT.1) = { a }
2348 may-aliases (SMT.2) = { b }
2350 and the alias set number of SMT.1 conflicts with that of SMT.2.
2351 Since they don't have symbols in common, loads and stores from
2352 SMT.1 and SMT.2 will seem independent of each other, which will
2353 lead to the optimizers making invalid transformations (see
2354 testsuite/gcc.c-torture/execute/pr15262-[12].c).
2356 To avoid this problem, we do a final traversal of AI->POINTERS
2357 looking for pairs of pointers that have no aliased symbols in
2358 common and yet have conflicting alias set numbers.
2360 Note this has to be done first as we only can avoid adding
2361 aliases for common memory tag aliases, not for common symbol
2362 aliases as they might get pruned by the operand scanner later. */
2363 for (i = 0; i < ai->num_pointers; i++)
2366 struct alias_map_d *p_map1 = ai->pointers[i];
2367 tree tag1 = symbol_mem_tag (p_map1->var);
2368 bitmap may_aliases1 = MTAG_ALIASES (tag1);
2370 for (j = 0; j < ai->num_pointers; j++)
2372 struct alias_map_d *p_map2 = ai->pointers[j];
2373 tree tag2 = symbol_mem_tag (p_map2->var);
2374 bitmap may_aliases2 = may_aliases (tag2);
2376 /* By convention tags don't alias themselves. */
2380 /* If the pointers may not point to each other, do nothing. */
2381 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
2384 /* The two pointers may alias each other. If they already have
2385 symbols in common, do nothing. */
2386 if (have_common_aliases_p (may_aliases1, may_aliases2))
2389 add_may_alias (tag1, tag2);
2393 /* For every pointer P, determine which addressable variables may alias
2394 with P's symbol memory tag. */
2395 for (i = 0; i < ai->num_pointers; i++)
2398 struct alias_map_d *p_map = ai->pointers[i];
2399 tree tag = symbol_mem_tag (p_map->var);
2402 for (j = 0; j < ai->num_addressable_vars; j++)
2404 struct alias_map_d *v_map;
2407 v_map = ai->addressable_vars[j];
2409 v_ann = var_ann (var);
2411 /* We used to skip variables that have never been written to
2412 if the memory tag has been never written to directly (or
2413 either of them were call clobbered). This is not enough
2414 though, as this misses writes through the tags aliases.
2415 So, for correctness we need to include any aliased
2418 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
2420 /* Add VAR to TAG's may-aliases set. */
2421 add_may_alias (tag, var);
2426 /* We have to add all HEAP variables to all SMTs aliases bitmaps.
2427 As we don't know which effective type the HEAP will have we cannot
2428 do better here and we need the conflicts with obfuscated pointers
2429 (a simple (*(int[n] *)ptr)[i] will do, with ptr from a VLA array
2431 for (i = 0; i < ai->num_pointers; i++)
2433 struct alias_map_d *p_map = ai->pointers[i];
2434 tree tag = symbol_mem_tag (p_map->var);
2436 FOR_EACH_REFERENCED_VAR (var, rvi)
2438 if (var_ann (var)->is_heapvar)
2439 add_may_alias (tag, var);
2443 timevar_pop (TV_FLOW_INSENSITIVE);
2447 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
2450 create_alias_map_for (tree var, struct alias_info *ai)
2452 struct alias_map_d *alias_map;
2453 alias_map = XCNEW (struct alias_map_d);
2454 alias_map->var = var;
2455 alias_map->set = get_alias_set (var);
2456 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
2460 /* Update related alias information kept in AI. This is used when
2461 building name tags, alias sets and deciding grouping heuristics.
2462 STMT is the statement to process. This function also updates
2463 ADDRESSABLE_VARS. */
2466 update_alias_info_1 (gimple stmt, struct alias_info *ai)
2469 use_operand_p use_p;
2471 bool stmt_dereferences_ptr_p;
2472 enum escape_type stmt_escape_type = is_escape_site (stmt);
2473 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
2475 stmt_dereferences_ptr_p = false;
2477 if (stmt_escape_type == ESCAPE_TO_CALL
2478 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
2480 mem_ref_stats->num_call_sites++;
2481 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
2482 mem_ref_stats->num_pure_const_call_sites++;
2484 else if (stmt_escape_type == ESCAPE_TO_ASM)
2485 mem_ref_stats->num_asm_sites++;
2487 /* Mark all the variables whose address are taken by the statement. */
2488 addr_taken = gimple_addresses_taken (stmt);
2490 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
2492 /* If we have a call or an assignment, see if the lhs contains
2493 a local decl that requires not to be a gimple register. */
2494 if (gimple_code (stmt) == GIMPLE_ASSIGN
2495 || gimple_code (stmt) == GIMPLE_CALL)
2497 tree lhs = gimple_get_lhs (stmt);
2498 /* A plain decl does not need it set. */
2499 if (lhs && handled_component_p (lhs))
2501 tree var = get_base_address (lhs);
2503 /* We are not going to mess with RESULT_DECL anyway. */
2504 && TREE_CODE (var) != RESULT_DECL
2505 && is_gimple_reg_type (TREE_TYPE (var)))
2506 bitmap_set_bit (gimple_addressable_vars (cfun), DECL_UID (var));
2510 /* Process each operand use. For pointers, determine whether they
2511 are dereferenced by the statement, or whether their value
2513 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
2517 struct ptr_info_def *pi;
2518 unsigned num_uses, num_loads, num_stores;
2520 op = USE_FROM_PTR (use_p);
2522 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
2523 to the set of addressable variables. */
2524 if (TREE_CODE (op) == ADDR_EXPR)
2526 bitmap addressable_vars = gimple_addressable_vars (cfun);
2528 gcc_assert (gimple_code (stmt) == GIMPLE_PHI);
2529 gcc_assert (addressable_vars);
2531 /* PHI nodes don't have annotations for pinning the set
2532 of addresses taken, so we collect them here.
2534 FIXME, should we allow PHI nodes to have annotations
2535 so that they can be treated like regular statements?
2536 Currently, they are treated as second-class
2538 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
2542 /* Ignore constants (they may occur in PHI node arguments). */
2543 if (TREE_CODE (op) != SSA_NAME)
2546 var = SSA_NAME_VAR (op);
2547 v_ann = var_ann (var);
2549 /* The base variable of an SSA name must be a GIMPLE register, and thus
2550 it cannot be aliased. */
2551 gcc_assert (!may_be_aliased (var));
2553 /* We are only interested in pointers. */
2554 if (!POINTER_TYPE_P (TREE_TYPE (op)))
2557 pi = get_ptr_info (op);
2559 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
2560 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
2562 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
2563 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
2566 /* If STMT is a PHI node, then it will not have pointer
2567 dereferences and it will not be an escape point. */
2568 if (gimple_code (stmt) == GIMPLE_PHI)
2571 /* Determine whether OP is a dereferenced pointer, and if STMT
2572 is an escape point, whether OP escapes. */
2573 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
2575 /* For directly dereferenced pointers we can apply
2576 TBAA-pruning to their points-to set. We may not count the
2577 implicit dereferences &PTR->FLD here. */
2578 if (num_loads + num_stores > 0)
2579 pi->is_dereferenced = 1;
2581 /* Handle a corner case involving address expressions of the
2582 form '&PTR->FLD'. The problem with these expressions is that
2583 they do not represent a dereference of PTR. However, if some
2584 other transformation propagates them into an INDIRECT_REF
2585 expression, we end up with '*(&PTR->FLD)' which is folded
2588 So, if the original code had no other dereferences of PTR,
2589 the aliaser will not create memory tags for it, and when
2590 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
2591 memory operations will receive no VDEF/VUSE operands.
2593 One solution would be to have count_uses_and_derefs consider
2594 &PTR->FLD a dereference of PTR. But that is wrong, since it
2595 is not really a dereference but an offset calculation.
2597 What we do here is to recognize these special ADDR_EXPR
2598 nodes. Since these expressions are never GIMPLE values (they
2599 are not GIMPLE invariants), they can only appear on the RHS
2600 of an assignment and their base address is always an
2601 INDIRECT_REF expression. */
2602 if (is_gimple_assign (stmt)
2603 && gimple_assign_rhs_code (stmt) == ADDR_EXPR
2604 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2606 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
2607 this represents a potential dereference of PTR. */
2608 tree rhs = gimple_assign_rhs1 (stmt);
2609 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2610 if (TREE_CODE (base) == INDIRECT_REF
2611 && TREE_OPERAND (base, 0) == op)
2615 if (num_loads + num_stores > 0)
2617 /* Mark OP as dereferenced. In a subsequent pass,
2618 dereferenced pointers that point to a set of
2619 variables will be assigned a name tag to alias
2620 all the variables OP points to. */
2621 pi->memory_tag_needed = 1;
2623 /* ??? For always executed direct dereferences we can
2624 apply TBAA-pruning to their escape set. */
2626 /* Mark OP as being dereferenced. */
2627 pointer_set_insert (ai->dereferenced_ptrs, var);
2629 /* Update the frequency estimate for all the dereferences of
2631 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
2633 /* Indicate that STMT contains pointer dereferences. */
2634 stmt_dereferences_ptr_p = true;
2637 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
2639 /* If STMT is an escape point and STMT contains at
2640 least one direct use of OP, then the value of OP
2641 escapes and so the pointed-to variables need to
2642 be marked call-clobbered. */
2643 pi->value_escapes_p = 1;
2644 pi->escape_mask |= stmt_escape_type;
2646 /* If the statement makes a function call, assume
2647 that pointer OP will be dereferenced in a store
2648 operation inside the called function. */
2649 if (is_gimple_call (stmt)
2650 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
2652 pointer_set_insert (ai->dereferenced_ptrs, var);
2653 pi->memory_tag_needed = 1;
2658 if (gimple_code (stmt) == GIMPLE_PHI)
2661 /* Mark stored variables in STMT as being written to and update the
2662 memory reference stats for all memory symbols referenced by STMT. */
2663 if (gimple_references_memory_p (stmt))
2668 mem_ref_stats->num_mem_stmts++;
2670 /* Notice that we only update memory reference stats for symbols
2671 loaded and stored by the statement if the statement does not
2672 contain pointer dereferences and it is not a call/asm site.
2673 This is to avoid double accounting problems when creating
2674 memory partitions. After computing points-to information,
2675 pointer dereference statistics are used to update the
2676 reference stats of the pointed-to variables, so here we
2677 should only update direct references to symbols.
2679 Indirect references are not updated here for two reasons: (1)
2680 The first time we compute alias information, the sets
2681 LOADED/STORED are empty for pointer dereferences, (2) After
2682 partitioning, LOADED/STORED may have references to
2683 partitions, not the original pointed-to variables. So, if we
2684 always counted LOADED/STORED here and during partitioning, we
2685 would count many symbols more than once.
2687 This does cause some imprecision when a statement has a
2688 combination of direct symbol references and pointer
2689 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
2690 memory symbols in its argument list, but these cases do not
2691 occur so frequently as to constitute a serious problem. */
2692 if (!stmt_dereferences_ptr_p
2693 && stmt_escape_type != ESCAPE_TO_CALL
2694 && stmt_escape_type != ESCAPE_TO_PURE_CONST
2695 && stmt_escape_type != ESCAPE_TO_ASM)
2697 if (gimple_stored_syms (stmt))
2698 EXECUTE_IF_SET_IN_BITMAP (gimple_stored_syms (stmt), 0, i, bi)
2699 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 0, 1);
2701 if (gimple_loaded_syms (stmt))
2702 EXECUTE_IF_SET_IN_BITMAP (gimple_loaded_syms (stmt), 0, i, bi)
2703 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
2708 /* Update various related attributes like escaped addresses,
2709 pointer dereferences for loads and stores. This is used
2710 when creating name tags and alias sets. */
2713 update_alias_info (struct alias_info *ai)
2719 gimple_stmt_iterator gsi;
2722 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2724 phi = gsi_stmt (gsi);
2725 if (is_gimple_reg (PHI_RESULT (phi)))
2726 update_alias_info_1 (phi, ai);
2729 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2730 update_alias_info_1 (gsi_stmt (gsi), ai);
2734 /* Create memory tags for all the dereferenced pointers and build the
2735 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
2736 sets. Based on the address escape and points-to information collected
2737 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
2738 variables whose address is not needed anymore. */
2741 setup_pointers_and_addressables (struct alias_info *ai)
2743 size_t num_addressable_vars, num_pointers;
2744 referenced_var_iterator rvi;
2746 VEC (tree, heap) *varvec = NULL;
2747 safe_referenced_var_iterator srvi;
2749 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
2750 num_addressable_vars = num_pointers = 0;
2752 FOR_EACH_REFERENCED_VAR (var, rvi)
2754 if (may_be_aliased (var))
2755 num_addressable_vars++;
2757 if (POINTER_TYPE_P (TREE_TYPE (var)))
2759 /* Since we don't keep track of volatile variables, assume that
2760 these pointers are used in indirect store operations. */
2761 if (TREE_THIS_VOLATILE (var))
2762 pointer_set_insert (ai->dereferenced_ptrs, var);
2768 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
2769 always going to be slightly bigger than we actually need them
2770 because some TREE_ADDRESSABLE variables will be marked
2771 non-addressable below and only pointers with unique symbol tags are
2772 going to be added to POINTERS. */
2773 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
2774 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
2775 ai->num_addressable_vars = 0;
2776 ai->num_pointers = 0;
2778 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
2780 /* Name memory tags already have flow-sensitive aliasing
2781 information, so they need not be processed by
2782 compute_flow_insensitive_aliasing. Similarly, symbol memory
2783 tags are already accounted for when we process their
2786 Structure fields, on the other hand, have to have some of this
2787 information processed for them, but it's pointless to mark them
2788 non-addressable (since they are fake variables anyway). */
2792 /* Remove the ADDRESSABLE flag from every addressable variable whose
2793 address is not needed anymore. This is caused by the propagation
2794 of ADDR_EXPR constants into INDIRECT_REF expressions and the
2795 removal of dead pointer assignments done by the early scalar
2797 if (TREE_ADDRESSABLE (var))
2799 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
2800 && TREE_CODE (var) != RESULT_DECL
2801 && !is_global_var (var))
2803 bool okay_to_mark = true;
2805 /* Since VAR is now a regular GIMPLE register, we will need
2806 to rename VAR into SSA afterwards. */
2807 mark_sym_for_renaming (var);
2809 /* The address of VAR is not needed, remove the
2810 addressable bit, so that it can be optimized as a
2811 regular variable. */
2814 /* The memory partition holding VAR will no longer
2815 contain VAR, and statements referencing it will need
2817 if (memory_partition (var))
2818 mark_sym_for_renaming (memory_partition (var));
2820 mark_non_addressable (var);
2825 /* Global variables and addressable locals may be aliased. Create an
2826 entry in ADDRESSABLE_VARS for VAR. */
2827 if (may_be_aliased (var))
2829 create_alias_map_for (var, ai);
2830 mark_sym_for_renaming (var);
2833 /* Add pointer variables that have been dereferenced to the POINTERS
2834 array and create a symbol memory tag for them. */
2835 if (POINTER_TYPE_P (TREE_TYPE (var)))
2837 if (pointer_set_contains (ai->dereferenced_ptrs, var))
2842 /* If pointer VAR still doesn't have a memory tag
2843 associated with it, create it now or re-use an
2845 tag = get_smt_for (var, ai);
2846 t_ann = var_ann (tag);
2848 /* The symbol tag will need to be renamed into SSA
2849 afterwards. Note that we cannot do this inside
2850 get_smt_for because aliasing may run multiple times
2851 and we only create symbol tags the first time. */
2852 mark_sym_for_renaming (tag);
2854 /* Similarly, if pointer VAR used to have another type
2855 tag, we will need to process it in the renamer to
2856 remove the stale virtual operands. */
2857 old_tag = symbol_mem_tag (var);
2859 mark_sym_for_renaming (old_tag);
2861 /* Associate the tag with pointer VAR. */
2862 set_symbol_mem_tag (var, tag);
2866 /* The pointer has not been dereferenced. If it had a
2867 symbol memory tag, remove it and mark the old tag for
2868 renaming to remove it out of the IL. */
2869 tree tag = symbol_mem_tag (var);
2872 mark_sym_for_renaming (tag);
2873 set_symbol_mem_tag (var, NULL_TREE);
2879 VEC_free (tree, heap, varvec);
2883 /* Determine whether to use .GLOBAL_VAR to model call clobbering
2884 semantics. If the function makes no references to global
2885 variables and contains at least one call to a non-pure function,
2886 then we need to mark the side-effects of the call using .GLOBAL_VAR
2887 to represent all possible global memory referenced by the callee. */
2890 maybe_create_global_var (void)
2892 /* No need to create it, if we have one already. */
2893 if (gimple_global_var (cfun) == NULL_TREE)
2895 struct mem_ref_stats_d *stats = gimple_mem_ref_stats (cfun);
2897 /* Create .GLOBAL_VAR if there are no call-clobbered
2898 variables and the program contains a mixture of pure/const
2899 and regular function calls. This is to avoid the problem
2900 described in PR 20115:
2903 int func_pure (void) { return X; }
2904 int func_non_pure (int a) { X += a; }
2907 int a = func_pure ();
2913 Since foo() has no call-clobbered variables, there is
2914 no relationship between the calls to func_pure and
2915 func_non_pure. Since func_pure has no side-effects, value
2916 numbering optimizations elide the second call to func_pure.
2917 So, if we have some pure/const and some regular calls in the
2918 program we create .GLOBAL_VAR to avoid missing these
2920 if (bitmap_empty_p (gimple_call_clobbered_vars (cfun))
2921 && stats->num_call_sites > 0
2922 && stats->num_pure_const_call_sites > 0
2923 && stats->num_call_sites > stats->num_pure_const_call_sites)
2924 create_global_var ();
2929 /* Return TRUE if pointer PTR may point to variable VAR.
2931 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
2932 This is needed because when checking for type conflicts we are
2933 interested in the alias set of the memory location pointed-to by
2934 PTR. The alias set of PTR itself is irrelevant.
2936 VAR_ALIAS_SET is the alias set for VAR. */
2939 may_alias_p (tree ptr, alias_set_type mem_alias_set,
2940 tree var, alias_set_type var_alias_set,
2941 bool alias_set_only)
2945 alias_stats.alias_queries++;
2946 alias_stats.simple_queries++;
2948 /* By convention, a variable cannot alias itself. */
2949 mem = symbol_mem_tag (ptr);
2952 alias_stats.alias_noalias++;
2953 alias_stats.simple_resolved++;
2957 /* If -fargument-noalias-global is > 2, pointer arguments may
2958 not point to anything else. */
2959 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
2961 alias_stats.alias_noalias++;
2962 alias_stats.simple_resolved++;
2966 /* If -fargument-noalias-global is > 1, pointer arguments may
2967 not point to global variables. */
2968 if (flag_argument_noalias > 1 && is_global_var (var)
2969 && TREE_CODE (ptr) == PARM_DECL)
2971 alias_stats.alias_noalias++;
2972 alias_stats.simple_resolved++;
2976 /* If the pointed to memory has alias set zero, or the pointer
2977 is ref-all, or the pointer decl is marked that no TBAA is to
2978 be applied, the MEM can alias VAR. */
2979 if (mem_alias_set == 0
2980 || DECL_POINTER_ALIAS_SET (ptr) == 0
2981 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (ptr))
2982 || DECL_NO_TBAA_P (ptr))
2984 alias_stats.alias_mayalias++;
2985 alias_stats.simple_resolved++;
2989 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
2991 alias_stats.tbaa_queries++;
2993 /* If the alias sets don't conflict then MEM cannot alias VAR. */
2994 if (mem_alias_set != var_alias_set
2995 && !alias_set_subset_of (mem_alias_set, var_alias_set))
2997 alias_stats.alias_noalias++;
2998 alias_stats.tbaa_resolved++;
3002 /* If VAR is a record or union type, PTR cannot point into VAR
3003 unless there is some explicit address operation in the
3004 program that can reference a field of the type pointed-to by
3005 PTR. This also assumes that the types of both VAR and PTR
3006 are contained within the compilation unit, and that there is
3007 no fancy addressing arithmetic associated with any of the
3009 if (mem_alias_set != 0 && var_alias_set != 0)
3011 tree ptr_type = TREE_TYPE (ptr);
3012 tree var_type = TREE_TYPE (var);
3014 /* The star count is -1 if the type at the end of the
3015 pointer_to chain is not a record or union type. */
3016 if (!alias_set_only &&
3017 0 /* FIXME tuples ipa_type_escape_star_count_of_interesting_type (var_type) >= 0*/)
3019 int ptr_star_count = 0;
3021 /* ipa_type_escape_star_count_of_interesting_type is a
3022 little too restrictive for the pointer type, need to
3023 allow pointers to primitive types as long as those
3024 types cannot be pointers to everything. */
3025 while (POINTER_TYPE_P (ptr_type))
3027 /* Strip the *s off. */
3028 ptr_type = TREE_TYPE (ptr_type);
3032 /* There does not appear to be a better test to see if
3033 the pointer type was one of the pointer to everything
3035 if (ptr_star_count > 0)
3037 alias_stats.structnoaddress_queries++;
3038 if (ipa_type_escape_field_does_not_clobber_p (var_type,
3041 alias_stats.structnoaddress_resolved++;
3042 alias_stats.alias_noalias++;
3046 else if (ptr_star_count == 0)
3048 /* If PTR_TYPE was not really a pointer to type, it cannot
3050 alias_stats.structnoaddress_queries++;
3051 alias_stats.structnoaddress_resolved++;
3052 alias_stats.alias_noalias++;
3058 alias_stats.alias_mayalias++;
3062 /* Return true, if PTR may point to a global variable. */
3065 may_point_to_global_var (tree ptr)
3067 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3069 /* If we do not have points-to information for this variable,
3072 || !pi->name_mem_tag)
3075 /* The name memory tag is marked as global variable if the points-to
3076 set contains a global variable. */
3077 return is_global_var (pi->name_mem_tag);
3080 /* Add ALIAS to the set of variables that may alias VAR. */
3083 add_may_alias (tree var, tree alias)
3085 /* Don't allow self-referential aliases. */
3086 gcc_assert (var != alias);
3088 /* ALIAS must be addressable if it's being added to an alias set. */
3090 TREE_ADDRESSABLE (alias) = 1;
3092 gcc_assert (may_be_aliased (alias));
3095 /* VAR must be a symbol or a name tag. */
3096 gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
3097 || TREE_CODE (var) == NAME_MEMORY_TAG);
3099 if (MTAG_ALIASES (var) == NULL)
3100 MTAG_ALIASES (var) = BITMAP_ALLOC (&alias_bitmap_obstack);
3102 bitmap_set_bit (MTAG_ALIASES (var), DECL_UID (alias));
3106 /* Mark pointer PTR as pointing to an arbitrary memory location. */
3109 set_pt_anything (tree ptr)
3111 struct ptr_info_def *pi = get_ptr_info (ptr);
3113 pi->pt_anything = 1;
3114 /* Anything includes global memory. */
3115 pi->pt_global_mem = 1;
3118 /* The pointer used to have a name tag, but we now found it pointing
3119 to an arbitrary location. The name tag needs to be renamed and
3120 disassociated from PTR. */
3121 if (pi->name_mem_tag)
3123 mark_sym_for_renaming (pi->name_mem_tag);
3124 pi->name_mem_tag = NULL_TREE;
3129 /* Return true if STMT is an "escape" site from the current function. Escape
3130 sites those statements which might expose the address of a variable
3131 outside the current function. STMT is an escape site iff:
3133 1- STMT is a function call, or
3134 2- STMT is an __asm__ expression, or
3135 3- STMT is an assignment to a non-local variable, or
3136 4- STMT is a return statement.
3138 Return the type of escape site found, if we found one, or NO_ESCAPE
3142 is_escape_site (gimple stmt)
3144 if (is_gimple_call (stmt))
3146 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3147 return ESCAPE_TO_PURE_CONST;
3149 return ESCAPE_TO_CALL;
3151 else if (gimple_code (stmt) == GIMPLE_ASM)
3152 return ESCAPE_TO_ASM;
3153 else if (is_gimple_assign (stmt))
3155 tree lhs = gimple_assign_lhs (stmt);
3157 /* Get to the base of _REF nodes. */
3158 if (TREE_CODE (lhs) != SSA_NAME)
3159 lhs = get_base_address (lhs);
3161 /* If we couldn't recognize the LHS of the assignment, assume that it
3162 is a non-local store. */
3163 if (lhs == NULL_TREE)
3164 return ESCAPE_UNKNOWN;
3166 if (gimple_assign_cast_p (stmt))
3168 tree from = TREE_TYPE (gimple_assign_rhs1 (stmt));
3169 tree to = TREE_TYPE (lhs);
3171 /* If the RHS is a conversion between a pointer and an integer, the
3172 pointer escapes since we can't track the integer. */
3173 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
3174 return ESCAPE_BAD_CAST;
3177 /* If the LHS is an SSA name, it can't possibly represent a non-local
3179 if (TREE_CODE (lhs) == SSA_NAME)
3182 /* If the LHS is a non-global decl, it isn't a non-local memory store.
3183 If the LHS escapes, the RHS escape is dealt with in the PTA solver. */
3185 && !is_global_var (lhs))
3188 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
3189 local variables we cannot be sure if it will escape, because we
3190 don't have information about objects not in SSA form. Need to
3191 implement something along the lines of
3193 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
3194 Midkiff, ``Escape analysis for java,'' in Proceedings of the
3195 Conference on Object-Oriented Programming Systems, Languages, and
3196 Applications (OOPSLA), pp. 1-19, 1999. */
3197 return ESCAPE_STORED_IN_GLOBAL;
3199 else if (gimple_code (stmt) == GIMPLE_RETURN)
3200 return ESCAPE_TO_RETURN;
3205 /* Create a new memory tag of type TYPE.
3206 Does NOT push it into the current binding. */
3209 create_tag_raw (enum tree_code code, tree type, const char *prefix)
3213 tmp_var = build_decl (code, create_tmp_var_name (prefix), type);
3215 /* Memory tags are always writable and non-static. */
3216 TREE_READONLY (tmp_var) = 0;
3217 TREE_STATIC (tmp_var) = 0;
3219 /* It doesn't start out global. */
3220 MTAG_GLOBAL (tmp_var) = 0;
3221 TREE_USED (tmp_var) = 1;
3226 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
3227 is considered to represent all the pointers whose pointed-to types are
3228 in the same alias set class. Otherwise, the tag represents a single
3229 SSA_NAME pointer variable. */
3232 create_memory_tag (tree type, bool is_type_tag)
3234 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
3235 type, (is_type_tag) ? "SMT" : "NMT");
3237 /* By default, memory tags are local variables. Alias analysis will
3238 determine whether they should be considered globals. */
3239 DECL_CONTEXT (tag) = current_function_decl;
3241 /* Memory tags are by definition addressable. */
3242 TREE_ADDRESSABLE (tag) = 1;
3244 set_symbol_mem_tag (tag, NULL_TREE);
3246 /* Add the tag to the symbol table. */
3247 add_referenced_var (tag);
3253 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
3254 This is used if P_i has been found to point to a specific set of
3255 variables or to a non-aliased memory location like the address returned
3256 by malloc functions. */
3259 get_nmt_for (tree ptr)
3261 struct ptr_info_def *pi = get_ptr_info (ptr);
3262 tree tag = pi->name_mem_tag;
3264 if (tag == NULL_TREE)
3265 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
3270 /* Return the symbol memory tag associated to pointer PTR. A memory
3271 tag is an artificial variable that represents the memory location
3272 pointed-to by PTR. It is used to model the effects of pointer
3273 de-references on addressable variables.
3275 AI points to the data gathered during alias analysis. This
3276 function populates the array AI->POINTERS. */
3279 get_smt_for (tree ptr, struct alias_info *ai)
3283 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3284 alias_set_type tag_set;
3286 /* Get the alias set to be used for the pointed-to memory. If that
3287 differs from what we would get from looking at the type adjust
3288 the tag_type to void to make sure we get a proper alias set from
3289 just looking at the SMT we create. */
3290 tag_set = get_alias_set (tag_type);
3291 if (TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (ptr))
3292 /* This is overly conservative but we do not want to assign
3293 restrict alias sets here (which if they are not assigned
3294 are -2 but still "known"). */
3295 || DECL_POINTER_ALIAS_SET_KNOWN_P (ptr))
3298 tag_type = void_type_node;
3301 /* To avoid creating unnecessary memory tags, only create one memory tag
3302 per alias set class. Note that it may be tempting to group
3303 memory tags based on conflicting alias sets instead of
3304 equivalence. That would be wrong because alias sets are not
3305 necessarily transitive (as demonstrated by the libstdc++ test
3306 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
3307 such that conflicts (A, B) == true and conflicts (A, C) == true,
3308 it does not necessarily follow that conflicts (B, C) == true. */
3309 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
3311 struct alias_map_d *curr = ai->pointers[i];
3312 tree curr_tag = symbol_mem_tag (curr->var);
3313 if (tag_set == curr->set)
3320 /* If VAR cannot alias with any of the existing memory tags, create a new
3321 tag for PTR and add it to the POINTERS array. */
3322 if (tag == NULL_TREE)
3324 struct alias_map_d *alias_map;
3326 /* If PTR did not have a symbol tag already, create a new SMT.*
3327 artificial variable representing the memory location
3328 pointed-to by PTR. */
3329 tag = symbol_mem_tag (ptr);
3330 if (tag == NULL_TREE
3331 || tag_set != get_alias_set (tag))
3332 tag = create_memory_tag (tag_type, true);
3334 /* Add PTR to the POINTERS array. Note that we are not interested in
3335 PTR's alias set. Instead, we cache the alias set for the memory that
3337 alias_map = XCNEW (struct alias_map_d);
3338 alias_map->var = ptr;
3339 alias_map->set = tag_set;
3340 ai->pointers[ai->num_pointers++] = alias_map;
3343 /* If the pointed-to type is volatile, so is the tag. */
3344 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
3346 /* Make sure that the symbol tag has the same alias set as the
3347 pointed-to type or at least accesses through the pointer will
3348 alias that set. The latter can happen after the vectorizer
3349 created pointers of vector type. */
3350 gcc_assert (tag_set == get_alias_set (tag)
3351 || alias_set_subset_of (tag_set, get_alias_set (tag)));
3357 /* Create GLOBAL_VAR, an artificial global variable to act as a
3358 representative of all the variables that may be clobbered by function
3362 create_global_var (void)
3364 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
3366 DECL_ARTIFICIAL (global_var) = 1;
3367 TREE_READONLY (global_var) = 0;
3368 DECL_EXTERNAL (global_var) = 1;
3369 TREE_STATIC (global_var) = 1;
3370 TREE_USED (global_var) = 1;
3371 DECL_CONTEXT (global_var) = NULL_TREE;
3372 TREE_THIS_VOLATILE (global_var) = 0;
3373 TREE_ADDRESSABLE (global_var) = 0;
3375 create_var_ann (global_var);
3376 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
3377 add_referenced_var (global_var);
3378 mark_sym_for_renaming (global_var);
3379 cfun->gimple_df->global_var = global_var;
3383 /* Dump alias statistics on FILE. */
3386 dump_alias_stats (FILE *file)
3388 const char *funcname
3389 = lang_hooks.decl_printable_name (current_function_decl, 2);
3390 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
3391 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
3392 fprintf (file, "Total alias mayalias results:\t%u\n",
3393 alias_stats.alias_mayalias);
3394 fprintf (file, "Total alias noalias results:\t%u\n",
3395 alias_stats.alias_noalias);
3396 fprintf (file, "Total simple queries:\t%u\n",
3397 alias_stats.simple_queries);
3398 fprintf (file, "Total simple resolved:\t%u\n",
3399 alias_stats.simple_resolved);
3400 fprintf (file, "Total TBAA queries:\t%u\n",
3401 alias_stats.tbaa_queries);
3402 fprintf (file, "Total TBAA resolved:\t%u\n",
3403 alias_stats.tbaa_resolved);
3404 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
3405 alias_stats.structnoaddress_queries);
3406 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
3407 alias_stats.structnoaddress_resolved);
3411 /* Dump alias information on FILE. */
3414 dump_alias_info (FILE *file)
3417 const char *funcname
3418 = lang_hooks.decl_printable_name (current_function_decl, 2);
3419 referenced_var_iterator rvi;
3422 fprintf (file, "\nAlias information for %s\n\n", funcname);
3424 dump_memory_partitions (file);
3426 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
3428 fprintf (file, "Aliased symbols\n\n");
3430 FOR_EACH_REFERENCED_VAR (var, rvi)
3432 if (may_be_aliased (var))
3433 dump_variable (file, var);
3436 fprintf (file, "\nDereferenced pointers\n\n");
3438 FOR_EACH_REFERENCED_VAR (var, rvi)
3439 if (symbol_mem_tag (var))
3440 dump_variable (file, var);
3442 fprintf (file, "\nSymbol memory tags\n\n");
3444 FOR_EACH_REFERENCED_VAR (var, rvi)
3446 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
3447 dump_variable (file, var);
3450 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
3452 fprintf (file, "SSA_NAME pointers\n\n");
3453 for (i = 1; i < num_ssa_names; i++)
3455 tree ptr = ssa_name (i);
3456 struct ptr_info_def *pi;
3458 if (ptr == NULL_TREE)
3461 pi = SSA_NAME_PTR_INFO (ptr);
3462 if (!SSA_NAME_IN_FREE_LIST (ptr)
3464 && pi->name_mem_tag)
3465 dump_points_to_info_for (file, ptr);
3468 fprintf (file, "\nName memory tags\n\n");
3470 FOR_EACH_REFERENCED_VAR (var, rvi)
3472 if (TREE_CODE (var) == NAME_MEMORY_TAG)
3473 dump_variable (file, var);
3476 fprintf (file, "\n");
3480 /* Dump alias information on stderr. */
3483 debug_alias_info (void)
3485 dump_alias_info (stderr);
3489 /* Return the alias information associated with pointer T. It creates a
3490 new instance if none existed. */
3492 struct ptr_info_def *
3493 get_ptr_info (tree t)
3495 struct ptr_info_def *pi;
3497 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
3499 pi = SSA_NAME_PTR_INFO (t);
3502 pi = GGC_CNEW (struct ptr_info_def);
3503 SSA_NAME_PTR_INFO (t) = pi;
3509 /* Dump points-to information for SSA_NAME PTR into FILE. */
3512 dump_points_to_info_for (FILE *file, tree ptr)
3514 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3516 print_generic_expr (file, ptr, dump_flags);
3520 if (pi->name_mem_tag)
3522 fprintf (file, ", name memory tag: ");
3523 print_generic_expr (file, pi->name_mem_tag, dump_flags);
3526 if (pi->is_dereferenced)
3527 fprintf (file, ", is dereferenced");
3528 else if (pi->memory_tag_needed)
3529 fprintf (file, ", is dereferenced in call");
3531 if (pi->value_escapes_p)
3532 fprintf (file, ", its value escapes");
3534 if (pi->pt_anything)
3535 fprintf (file, ", points-to anything");
3538 fprintf (file, ", points-to NULL");
3542 fprintf (file, ", points-to vars: ");
3543 dump_decl_set (file, pi->pt_vars);
3547 fprintf (file, "\n");
3551 /* Dump points-to information for VAR into stderr. */
3554 debug_points_to_info_for (tree var)
3556 dump_points_to_info_for (stderr, var);
3560 /* Dump points-to information into FILE. NOTE: This function is slow, as
3561 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
3564 dump_points_to_info (FILE *file ATTRIBUTE_UNUSED)
3567 gimple_stmt_iterator si;
3570 lang_hooks.decl_printable_name (current_function_decl, 2);
3571 referenced_var_iterator rvi;
3574 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
3576 /* First dump points-to information for the default definitions of
3577 pointer variables. This is necessary because default definitions are
3578 not part of the code. */
3579 FOR_EACH_REFERENCED_VAR (var, rvi)
3581 if (POINTER_TYPE_P (TREE_TYPE (var)))
3583 tree def = gimple_default_def (cfun, var);
3585 dump_points_to_info_for (file, def);
3589 /* Dump points-to information for every pointer defined in the program. */
3592 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
3594 gimple phi = gsi_stmt (si);
3595 tree ptr = PHI_RESULT (phi);
3596 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
3597 dump_points_to_info_for (file, ptr);
3600 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
3602 gimple stmt = gsi_stmt (si);
3604 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
3605 if (TREE_CODE (def) == SSA_NAME
3606 && POINTER_TYPE_P (TREE_TYPE (def)))
3607 dump_points_to_info_for (file, def);
3611 fprintf (file, "\n");
3615 /* Dump points-to info pointed to by PTO into STDERR. */
3618 debug_points_to_info (void)
3620 dump_points_to_info (stderr);
3623 /* Dump to FILE the list of variables that may be aliasing VAR. */
3626 dump_may_aliases_for (FILE *file, tree var)
3630 aliases = MTAG_ALIASES (var);
3637 fprintf (file, "{ ");
3638 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
3640 al = referenced_var (i);
3641 print_generic_expr (file, al, dump_flags);
3642 fprintf (file, " ");
3644 fprintf (file, "}");
3649 /* Dump to stderr the list of variables that may be aliasing VAR. */
3652 debug_may_aliases_for (tree var)
3654 dump_may_aliases_for (stderr, var);
3657 /* Return true if VAR may be aliased. */
3660 may_be_aliased (tree var)
3663 if (TREE_ADDRESSABLE (var))
3666 /* Globally visible variables can have their addresses taken by other
3667 translation units. */
3669 && MTAG_GLOBAL (var))
3671 else if (!MTAG_P (var)
3672 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
3675 /* Automatic variables can't have their addresses escape any other
3676 way. This must be after the check for global variables, as
3677 extern declarations do not have TREE_STATIC set. */
3678 if (!TREE_STATIC (var))
3684 /* The following is based on code in add_stmt_operand to ensure that the
3685 same defs/uses/vdefs/vuses will be found after replacing a reference
3686 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
3687 is the address of var. Return a memtag for the ptr, after adding the
3688 proper may_aliases to it (which are the aliases of var, if it has any,
3692 add_may_alias_for_new_tag (tree tag, tree var)
3694 bitmap aliases = NULL;
3697 aliases = may_aliases (var);
3699 /* Case 1: |aliases| == 1 */
3701 && bitmap_single_bit_set_p (aliases))
3703 tree ali = referenced_var (bitmap_first_set_bit (aliases));
3704 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
3708 /* Case 2: |aliases| == 0 */
3709 if (aliases == NULL)
3710 add_may_alias (tag, var);
3713 /* Case 3: |aliases| > 1 */
3714 union_alias_set_into (tag, aliases);
3719 /* Create a new symbol tag for PTR. Construct the may-alias list of
3720 this type tag so that it has the aliasing of VAR according to the
3721 location accessed by EXPR.
3723 Note, the set of aliases represented by the new symbol tag are not
3724 marked for renaming. */
3727 new_type_alias (tree ptr, tree var, tree expr)
3729 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3731 tree ali = NULL_TREE;
3732 HOST_WIDE_INT offset, size, maxsize;
3735 gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
3736 gcc_assert (!MTAG_P (var));
3738 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
3741 tag = create_memory_tag (tag_type, true);
3742 set_symbol_mem_tag (ptr, tag);
3744 ali = add_may_alias_for_new_tag (tag, var);
3746 set_symbol_mem_tag (ptr, ali);
3747 MTAG_GLOBAL (tag) = is_global_var (var);
3751 /* Reset the call_clobbered flags on our referenced vars. In
3752 theory, this only needs to be done for globals. */
3755 reset_cc_flags (void)
3758 referenced_var_iterator rvi;
3760 FOR_EACH_REFERENCED_VAR (var, rvi)
3761 var_ann (var)->call_clobbered = false;
3765 struct gimple_opt_pass pass_reset_cc_flags =
3771 reset_cc_flags, /* execute */
3774 0, /* static_pass_number */
3776 PROP_referenced_vars |PROP_cfg, /* properties_required */
3777 0, /* properties_provided */
3778 0, /* properties_destroyed */
3779 0, /* todo_flags_start */
3780 0 /* todo_flags_finish */
3785 /* A dummy pass to cause aliases to be computed via TODO_rebuild_alias. */
3787 struct gimple_opt_pass pass_build_alias =
3796 0, /* static_pass_number */
3798 PROP_cfg | PROP_ssa, /* properties_required */
3799 PROP_alias, /* properties_provided */
3800 0, /* properties_destroyed */
3801 0, /* todo_flags_start */
3802 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */