Import pre-release gcc-5.0 to new vendor branch
[dragonfly.git] / contrib / gcc-5.0 / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2    Copyright (C) 2004-2015 Free Software Foundation, Inc.
3    Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "tree-object-size.h"
37 #include "diagnostic-core.h"
38 #include "gimple-pretty-print.h"
39 #include "bitmap.h"
40 #include "predict.h"
41 #include "hard-reg-set.h"
42 #include "input.h"
43 #include "function.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "gimple-iterator.h"
54 #include "gimple-ssa.h"
55 #include "stringpool.h"
56 #include "tree-ssanames.h"
57 #include "tree-pass.h"
58 #include "tree-ssa-propagate.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "builtins.h"
62
63 struct object_size_info
64 {
65   int object_size_type;
66   bitmap visited, reexamine;
67   int pass;
68   bool changed;
69   unsigned int *depths;
70   unsigned int *stack, *tos;
71 };
72
73 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
74
75 static tree compute_object_offset (const_tree, const_tree);
76 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
77                                                 const_tree, int);
78 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
79 static tree pass_through_call (const gcall *);
80 static void collect_object_sizes_for (struct object_size_info *, tree);
81 static void expr_object_size (struct object_size_info *, tree, tree);
82 static bool merge_object_sizes (struct object_size_info *, tree, tree,
83                                 unsigned HOST_WIDE_INT);
84 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
85 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
86 static void init_offset_limit (void);
87 static void check_for_plus_in_loops (struct object_size_info *, tree);
88 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
89                                        unsigned int);
90
91 /* object_sizes[0] is upper bound for number of bytes till the end of
92    the object.
93    object_sizes[1] is upper bound for number of bytes till the end of
94    the subobject (innermost array or field with address taken).
95    object_sizes[2] is lower bound for number of bytes till the end of
96    the object and object_sizes[3] lower bound for subobject.  */
97 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
98
99 /* Bitmaps what object sizes have been computed already.  */
100 static bitmap computed[4];
101
102 /* Maximum value of offset we consider to be addition.  */
103 static unsigned HOST_WIDE_INT offset_limit;
104
105
106 /* Initialize OFFSET_LIMIT variable.  */
107 static void
108 init_offset_limit (void)
109 {
110   if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
111     offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
112   else
113     offset_limit = -1;
114   offset_limit /= 2;
115 }
116
117
118 /* Compute offset of EXPR within VAR.  Return error_mark_node
119    if unknown.  */
120
121 static tree
122 compute_object_offset (const_tree expr, const_tree var)
123 {
124   enum tree_code code = PLUS_EXPR;
125   tree base, off, t;
126
127   if (expr == var)
128     return size_zero_node;
129
130   switch (TREE_CODE (expr))
131     {
132     case COMPONENT_REF:
133       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
134       if (base == error_mark_node)
135         return base;
136
137       t = TREE_OPERAND (expr, 1);
138       off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
139                         size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
140                                   / BITS_PER_UNIT));
141       break;
142
143     case REALPART_EXPR:
144     CASE_CONVERT:
145     case VIEW_CONVERT_EXPR:
146     case NON_LVALUE_EXPR:
147       return compute_object_offset (TREE_OPERAND (expr, 0), var);
148
149     case IMAGPART_EXPR:
150       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
151       if (base == error_mark_node)
152         return base;
153
154       off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
155       break;
156
157     case ARRAY_REF:
158       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
159       if (base == error_mark_node)
160         return base;
161
162       t = TREE_OPERAND (expr, 1);
163       if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
164         {
165           code = MINUS_EXPR;
166           t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
167         }
168       t = fold_convert (sizetype, t);
169       off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
170       break;
171
172     case MEM_REF:
173       gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
174       return wide_int_to_tree (sizetype, mem_ref_offset (expr));
175
176     default:
177       return error_mark_node;
178     }
179
180   return size_binop (code, base, off);
181 }
182
183
184 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
185    OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
186    If unknown, return unknown[object_size_type].  */
187
188 static unsigned HOST_WIDE_INT
189 addr_object_size (struct object_size_info *osi, const_tree ptr,
190                   int object_size_type)
191 {
192   tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
193
194   gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
195
196   pt_var = TREE_OPERAND (ptr, 0);
197   while (handled_component_p (pt_var))
198     pt_var = TREE_OPERAND (pt_var, 0);
199
200   if (pt_var
201       && TREE_CODE (pt_var) == MEM_REF)
202     {
203       unsigned HOST_WIDE_INT sz;
204
205       if (!osi || (object_size_type & 1) != 0
206           || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
207         {
208           sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
209                                             object_size_type & ~1);
210         }
211       else
212         {
213           tree var = TREE_OPERAND (pt_var, 0);
214           if (osi->pass == 0)
215             collect_object_sizes_for (osi, var);
216           if (bitmap_bit_p (computed[object_size_type],
217                             SSA_NAME_VERSION (var)))
218             sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
219           else
220             sz = unknown[object_size_type];
221         }
222       if (sz != unknown[object_size_type])
223         {
224           offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
225           if (wi::neg_p (dsz))
226             sz = 0;
227           else if (wi::fits_uhwi_p (dsz))
228             sz = dsz.to_uhwi ();
229           else
230             sz = unknown[object_size_type];
231         }
232
233       if (sz != unknown[object_size_type] && sz < offset_limit)
234         pt_var_size = size_int (sz);
235     }
236   else if (pt_var
237            && DECL_P (pt_var)
238            && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
239            && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
240     pt_var_size = DECL_SIZE_UNIT (pt_var);
241   else if (pt_var
242            && TREE_CODE (pt_var) == STRING_CST
243            && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
244            && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
245            && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
246               < offset_limit)
247     pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
248   else
249     return unknown[object_size_type];
250
251   if (pt_var != TREE_OPERAND (ptr, 0))
252     {
253       tree var;
254
255       if (object_size_type & 1)
256         {
257           var = TREE_OPERAND (ptr, 0);
258
259           while (var != pt_var
260                  && TREE_CODE (var) != BIT_FIELD_REF
261                  && TREE_CODE (var) != COMPONENT_REF
262                  && TREE_CODE (var) != ARRAY_REF
263                  && TREE_CODE (var) != ARRAY_RANGE_REF
264                  && TREE_CODE (var) != REALPART_EXPR
265                  && TREE_CODE (var) != IMAGPART_EXPR)
266             var = TREE_OPERAND (var, 0);
267           if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
268             var = TREE_OPERAND (var, 0);
269           if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
270               || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
271               || (pt_var_size
272                   && tree_int_cst_lt (pt_var_size,
273                                       TYPE_SIZE_UNIT (TREE_TYPE (var)))))
274             var = pt_var;
275           else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
276             {
277               tree v = var;
278               /* For &X->fld, compute object size only if fld isn't the last
279                  field, as struct { int i; char c[1]; } is often used instead
280                  of flexible array member.  */
281               while (v && v != pt_var)
282                 switch (TREE_CODE (v))
283                   {
284                   case ARRAY_REF:
285                     if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
286                         && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
287                       {
288                         tree domain
289                           = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
290                         if (domain
291                             && TYPE_MAX_VALUE (domain)
292                             && TREE_CODE (TYPE_MAX_VALUE (domain))
293                                == INTEGER_CST
294                             && tree_int_cst_lt (TREE_OPERAND (v, 1),
295                                                 TYPE_MAX_VALUE (domain)))
296                           {
297                             v = NULL_TREE;
298                             break;
299                           }
300                       }
301                     v = TREE_OPERAND (v, 0);
302                     break;
303                   case REALPART_EXPR:
304                   case IMAGPART_EXPR:
305                     v = NULL_TREE;
306                     break;
307                   case COMPONENT_REF:
308                     if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
309                       {
310                         v = NULL_TREE;
311                         break;
312                       }
313                     while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
314                       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
315                           != UNION_TYPE
316                           && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
317                           != QUAL_UNION_TYPE)
318                         break;
319                       else
320                         v = TREE_OPERAND (v, 0);
321                     if (TREE_CODE (v) == COMPONENT_REF
322                         && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
323                            == RECORD_TYPE)
324                       {
325                         tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
326                         for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
327                           if (TREE_CODE (fld_chain) == FIELD_DECL)
328                             break;
329
330                         if (fld_chain)
331                           {
332                             v = NULL_TREE;
333                             break;
334                           }
335                         v = TREE_OPERAND (v, 0);
336                       }
337                     while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
338                       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
339                           != UNION_TYPE
340                           && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
341                           != QUAL_UNION_TYPE)
342                         break;
343                       else
344                         v = TREE_OPERAND (v, 0);
345                     if (v != pt_var)
346                       v = NULL_TREE;
347                     else
348                       v = pt_var;
349                     break;
350                   default:
351                     v = pt_var;
352                     break;
353                   }
354               if (v == pt_var)
355                 var = pt_var;
356             }
357         }
358       else
359         var = pt_var;
360
361       if (var != pt_var)
362         var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
363       else if (!pt_var_size)
364         return unknown[object_size_type];
365       else
366         var_size = pt_var_size;
367       bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
368       if (bytes != error_mark_node)
369         {
370           if (TREE_CODE (bytes) == INTEGER_CST
371               && tree_int_cst_lt (var_size, bytes))
372             bytes = size_zero_node;
373           else
374             bytes = size_binop (MINUS_EXPR, var_size, bytes);
375         }
376       if (var != pt_var
377           && pt_var_size
378           && TREE_CODE (pt_var) == MEM_REF
379           && bytes != error_mark_node)
380         {
381           tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
382           if (bytes2 != error_mark_node)
383             {
384               if (TREE_CODE (bytes2) == INTEGER_CST
385                   && tree_int_cst_lt (pt_var_size, bytes2))
386                 bytes2 = size_zero_node;
387               else
388                 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
389               bytes = size_binop (MIN_EXPR, bytes, bytes2);
390             }
391         }
392     }
393   else if (!pt_var_size)
394     return unknown[object_size_type];
395   else
396     bytes = pt_var_size;
397
398   if (tree_fits_uhwi_p (bytes))
399     return tree_to_uhwi (bytes);
400
401   return unknown[object_size_type];
402 }
403
404
405 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
406    Handles various allocation calls.  OBJECT_SIZE_TYPE is the second
407    argument from __builtin_object_size.  If unknown, return
408    unknown[object_size_type].  */
409
410 static unsigned HOST_WIDE_INT
411 alloc_object_size (const gcall *call, int object_size_type)
412 {
413   tree callee, bytes = NULL_TREE;
414   tree alloc_size;
415   int arg1 = -1, arg2 = -1;
416
417   gcc_assert (is_gimple_call (call));
418
419   callee = gimple_call_fndecl (call);
420   if (!callee)
421     return unknown[object_size_type];
422
423   alloc_size = lookup_attribute ("alloc_size",
424                                  TYPE_ATTRIBUTES (TREE_TYPE (callee)));
425   if (alloc_size && TREE_VALUE (alloc_size))
426     {
427       tree p = TREE_VALUE (alloc_size);
428
429       arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
430       if (TREE_CHAIN (p))
431         arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
432     }
433
434   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
435     switch (DECL_FUNCTION_CODE (callee))
436       {
437       case BUILT_IN_CALLOC:
438         arg2 = 1;
439         /* fall through */
440       case BUILT_IN_MALLOC:
441       case BUILT_IN_ALLOCA:
442       case BUILT_IN_ALLOCA_WITH_ALIGN:
443         arg1 = 0;
444       default:
445         break;
446       }
447
448   if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
449       || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
450       || (arg2 >= 0
451           && (arg2 >= (int)gimple_call_num_args (call)
452               || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
453     return unknown[object_size_type];
454
455   if (arg2 >= 0)
456     bytes = size_binop (MULT_EXPR,
457         fold_convert (sizetype, gimple_call_arg (call, arg1)),
458         fold_convert (sizetype, gimple_call_arg (call, arg2)));
459   else if (arg1 >= 0)
460     bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
461
462   if (bytes && tree_fits_uhwi_p (bytes))
463     return tree_to_uhwi (bytes);
464
465   return unknown[object_size_type];
466 }
467
468
469 /* If object size is propagated from one of function's arguments directly
470    to its return value, return that argument for GIMPLE_CALL statement CALL.
471    Otherwise return NULL.  */
472
473 static tree
474 pass_through_call (const gcall *call)
475 {
476   tree callee = gimple_call_fndecl (call);
477
478   if (callee
479       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
480     switch (DECL_FUNCTION_CODE (callee))
481       {
482       case BUILT_IN_MEMCPY:
483       case BUILT_IN_MEMMOVE:
484       case BUILT_IN_MEMSET:
485       case BUILT_IN_STRCPY:
486       case BUILT_IN_STRNCPY:
487       case BUILT_IN_STRCAT:
488       case BUILT_IN_STRNCAT:
489       case BUILT_IN_MEMCPY_CHK:
490       case BUILT_IN_MEMMOVE_CHK:
491       case BUILT_IN_MEMSET_CHK:
492       case BUILT_IN_STRCPY_CHK:
493       case BUILT_IN_STRNCPY_CHK:
494       case BUILT_IN_STPNCPY_CHK:
495       case BUILT_IN_STRCAT_CHK:
496       case BUILT_IN_STRNCAT_CHK:
497       case BUILT_IN_ASSUME_ALIGNED:
498         if (gimple_call_num_args (call) >= 1)
499           return gimple_call_arg (call, 0);
500         break;
501       default:
502         break;
503       }
504
505   return NULL_TREE;
506 }
507
508
509 /* Compute __builtin_object_size value for PTR.  OBJECT_SIZE_TYPE is the
510    second argument from __builtin_object_size.  */
511
512 unsigned HOST_WIDE_INT
513 compute_builtin_object_size (tree ptr, int object_size_type)
514 {
515   gcc_assert (object_size_type >= 0 && object_size_type <= 3);
516
517   if (! offset_limit)
518     init_offset_limit ();
519
520   if (TREE_CODE (ptr) == ADDR_EXPR)
521     return addr_object_size (NULL, ptr, object_size_type);
522
523   if (TREE_CODE (ptr) == SSA_NAME
524       && POINTER_TYPE_P (TREE_TYPE (ptr))
525       && computed[object_size_type] != NULL)
526     {
527       if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
528         {
529           struct object_size_info osi;
530           bitmap_iterator bi;
531           unsigned int i;
532
533           if (num_ssa_names > object_sizes[object_size_type].length ())
534             object_sizes[object_size_type].safe_grow (num_ssa_names);
535           if (dump_file)
536             {
537               fprintf (dump_file, "Computing %s %sobject size for ",
538                        (object_size_type & 2) ? "minimum" : "maximum",
539                        (object_size_type & 1) ? "sub" : "");
540               print_generic_expr (dump_file, ptr, dump_flags);
541               fprintf (dump_file, ":\n");
542             }
543
544           osi.visited = BITMAP_ALLOC (NULL);
545           osi.reexamine = BITMAP_ALLOC (NULL);
546           osi.object_size_type = object_size_type;
547           osi.depths = NULL;
548           osi.stack = NULL;
549           osi.tos = NULL;
550
551           /* First pass: walk UD chains, compute object sizes that
552              can be computed.  osi.reexamine bitmap at the end will
553              contain what variables were found in dependency cycles
554              and therefore need to be reexamined.  */
555           osi.pass = 0;
556           osi.changed = false;
557           collect_object_sizes_for (&osi, ptr);
558
559           /* Second pass: keep recomputing object sizes of variables
560              that need reexamination, until no object sizes are
561              increased or all object sizes are computed.  */
562           if (! bitmap_empty_p (osi.reexamine))
563             {
564               bitmap reexamine = BITMAP_ALLOC (NULL);
565
566               /* If looking for minimum instead of maximum object size,
567                  detect cases where a pointer is increased in a loop.
568                  Although even without this detection pass 2 would eventually
569                  terminate, it could take a long time.  If a pointer is
570                  increasing this way, we need to assume 0 object size.
571                  E.g. p = &buf[0]; while (cond) p = p + 4;  */
572               if (object_size_type & 2)
573                 {
574                   osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
575                   osi.stack = XNEWVEC (unsigned int, num_ssa_names);
576                   osi.tos = osi.stack;
577                   osi.pass = 1;
578                   /* collect_object_sizes_for is changing
579                      osi.reexamine bitmap, so iterate over a copy.  */
580                   bitmap_copy (reexamine, osi.reexamine);
581                   EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
582                     if (bitmap_bit_p (osi.reexamine, i))
583                       check_for_plus_in_loops (&osi, ssa_name (i));
584
585                   free (osi.depths);
586                   osi.depths = NULL;
587                   free (osi.stack);
588                   osi.stack = NULL;
589                   osi.tos = NULL;
590                 }
591
592               do
593                 {
594                   osi.pass = 2;
595                   osi.changed = false;
596                   /* collect_object_sizes_for is changing
597                      osi.reexamine bitmap, so iterate over a copy.  */
598                   bitmap_copy (reexamine, osi.reexamine);
599                   EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
600                     if (bitmap_bit_p (osi.reexamine, i))
601                       {
602                         collect_object_sizes_for (&osi, ssa_name (i));
603                         if (dump_file && (dump_flags & TDF_DETAILS))
604                           {
605                             fprintf (dump_file, "Reexamining ");
606                             print_generic_expr (dump_file, ssa_name (i),
607                                                 dump_flags);
608                             fprintf (dump_file, "\n");
609                           }
610                       }
611                 }
612               while (osi.changed);
613
614               BITMAP_FREE (reexamine);
615             }
616           EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
617             bitmap_set_bit (computed[object_size_type], i);
618
619           /* Debugging dumps.  */
620           if (dump_file)
621             {
622               EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
623                 if (object_sizes[object_size_type][i]
624                     != unknown[object_size_type])
625                   {
626                     print_generic_expr (dump_file, ssa_name (i),
627                                         dump_flags);
628                     fprintf (dump_file,
629                              ": %s %sobject size "
630                              HOST_WIDE_INT_PRINT_UNSIGNED "\n",
631                              (object_size_type & 2) ? "minimum" : "maximum",
632                              (object_size_type & 1) ? "sub" : "",
633                              object_sizes[object_size_type][i]);
634                   }
635             }
636
637           BITMAP_FREE (osi.reexamine);
638           BITMAP_FREE (osi.visited);
639         }
640
641       return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
642     }
643
644   return unknown[object_size_type];
645 }
646
647 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME.  */
648
649 static void
650 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
651 {
652   int object_size_type = osi->object_size_type;
653   unsigned int varno = SSA_NAME_VERSION (ptr);
654   unsigned HOST_WIDE_INT bytes;
655
656   gcc_assert (object_sizes[object_size_type][varno]
657               != unknown[object_size_type]);
658   gcc_assert (osi->pass == 0);
659
660   if (TREE_CODE (value) == WITH_SIZE_EXPR)
661     value = TREE_OPERAND (value, 0);
662
663   /* Pointer variables should have been handled by merge_object_sizes.  */
664   gcc_assert (TREE_CODE (value) != SSA_NAME
665               || !POINTER_TYPE_P (TREE_TYPE (value)));
666
667   if (TREE_CODE (value) == ADDR_EXPR)
668     bytes = addr_object_size (osi, value, object_size_type);
669   else
670     bytes = unknown[object_size_type];
671
672   if ((object_size_type & 2) == 0)
673     {
674       if (object_sizes[object_size_type][varno] < bytes)
675         object_sizes[object_size_type][varno] = bytes;
676     }
677   else
678     {
679       if (object_sizes[object_size_type][varno] > bytes)
680         object_sizes[object_size_type][varno] = bytes;
681     }
682 }
683
684
685 /* Compute object_sizes for PTR, defined to the result of a call.  */
686
687 static void
688 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
689 {
690   int object_size_type = osi->object_size_type;
691   unsigned int varno = SSA_NAME_VERSION (ptr);
692   unsigned HOST_WIDE_INT bytes;
693
694   gcc_assert (is_gimple_call (call));
695
696   gcc_assert (object_sizes[object_size_type][varno]
697               != unknown[object_size_type]);
698   gcc_assert (osi->pass == 0);
699
700   bytes = alloc_object_size (call, object_size_type);
701
702   if ((object_size_type & 2) == 0)
703     {
704       if (object_sizes[object_size_type][varno] < bytes)
705         object_sizes[object_size_type][varno] = bytes;
706     }
707   else
708     {
709       if (object_sizes[object_size_type][varno] > bytes)
710         object_sizes[object_size_type][varno] = bytes;
711     }
712 }
713
714
715 /* Compute object_sizes for PTR, defined to an unknown value.  */
716
717 static void
718 unknown_object_size (struct object_size_info *osi, tree ptr)
719 {
720   int object_size_type = osi->object_size_type;
721   unsigned int varno = SSA_NAME_VERSION (ptr);
722   unsigned HOST_WIDE_INT bytes;
723
724   gcc_assert (object_sizes[object_size_type][varno]
725               != unknown[object_size_type]);
726   gcc_assert (osi->pass == 0);
727
728   bytes = unknown[object_size_type];
729
730   if ((object_size_type & 2) == 0)
731     {
732       if (object_sizes[object_size_type][varno] < bytes)
733         object_sizes[object_size_type][varno] = bytes;
734     }
735   else
736     {
737       if (object_sizes[object_size_type][varno] > bytes)
738         object_sizes[object_size_type][varno] = bytes;
739     }
740 }
741
742
743 /* Merge object sizes of ORIG + OFFSET into DEST.  Return true if
744    the object size might need reexamination later.  */
745
746 static bool
747 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
748                     unsigned HOST_WIDE_INT offset)
749 {
750   int object_size_type = osi->object_size_type;
751   unsigned int varno = SSA_NAME_VERSION (dest);
752   unsigned HOST_WIDE_INT orig_bytes;
753
754   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
755     return false;
756   if (offset >= offset_limit)
757     {
758       object_sizes[object_size_type][varno] = unknown[object_size_type];
759       return false;
760     }
761
762   if (osi->pass == 0)
763     collect_object_sizes_for (osi, orig);
764
765   orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
766   if (orig_bytes != unknown[object_size_type])
767     orig_bytes = (offset > orig_bytes)
768                  ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
769
770   if ((object_size_type & 2) == 0)
771     {
772       if (object_sizes[object_size_type][varno] < orig_bytes)
773         {
774           object_sizes[object_size_type][varno] = orig_bytes;
775           osi->changed = true;
776         }
777     }
778   else
779     {
780       if (object_sizes[object_size_type][varno] > orig_bytes)
781         {
782           object_sizes[object_size_type][varno] = orig_bytes;
783           osi->changed = true;
784         }
785     }
786   return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
787 }
788
789
790 /* Compute object_sizes for VAR, defined to the result of an assignment
791    with operator POINTER_PLUS_EXPR.  Return true if the object size might
792    need reexamination  later.  */
793
794 static bool
795 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
796 {
797   int object_size_type = osi->object_size_type;
798   unsigned int varno = SSA_NAME_VERSION (var);
799   unsigned HOST_WIDE_INT bytes;
800   tree op0, op1;
801
802   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
803     {
804       op0 = gimple_assign_rhs1 (stmt);
805       op1 = gimple_assign_rhs2 (stmt);
806     }
807   else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
808     {
809       tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
810       gcc_assert (TREE_CODE (rhs) == MEM_REF);
811       op0 = TREE_OPERAND (rhs, 0);
812       op1 = TREE_OPERAND (rhs, 1);
813     }
814   else
815     gcc_unreachable ();
816
817   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
818     return false;
819
820   /* Handle PTR + OFFSET here.  */
821   if (TREE_CODE (op1) == INTEGER_CST
822       && (TREE_CODE (op0) == SSA_NAME
823           || TREE_CODE (op0) == ADDR_EXPR))
824     {
825       if (! tree_fits_uhwi_p (op1))
826         bytes = unknown[object_size_type];
827       else if (TREE_CODE (op0) == SSA_NAME)
828         return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
829       else
830         {
831           unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
832
833           /* op0 will be ADDR_EXPR here.  */
834           bytes = addr_object_size (osi, op0, object_size_type);
835           if (bytes == unknown[object_size_type])
836             ;
837           else if (off > offset_limit)
838             bytes = unknown[object_size_type];
839           else if (off > bytes)
840             bytes = 0;
841           else
842             bytes -= off;
843         }
844     }
845   else
846     bytes = unknown[object_size_type];
847
848   if ((object_size_type & 2) == 0)
849     {
850       if (object_sizes[object_size_type][varno] < bytes)
851         object_sizes[object_size_type][varno] = bytes;
852     }
853   else
854     {
855       if (object_sizes[object_size_type][varno] > bytes)
856         object_sizes[object_size_type][varno] = bytes;
857     }
858   return false;
859 }
860
861
862 /* Compute object_sizes for VAR, defined at STMT, which is
863    a COND_EXPR.  Return true if the object size might need reexamination
864    later.  */
865
866 static bool
867 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
868 {
869   tree then_, else_;
870   int object_size_type = osi->object_size_type;
871   unsigned int varno = SSA_NAME_VERSION (var);
872   bool reexamine = false;
873
874   gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
875
876   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
877     return false;
878
879   then_ = gimple_assign_rhs2 (stmt);
880   else_ = gimple_assign_rhs3 (stmt);
881
882   if (TREE_CODE (then_) == SSA_NAME)
883     reexamine |= merge_object_sizes (osi, var, then_, 0);
884   else
885     expr_object_size (osi, var, then_);
886
887   if (TREE_CODE (else_) == SSA_NAME)
888     reexamine |= merge_object_sizes (osi, var, else_, 0);
889   else
890     expr_object_size (osi, var, else_);
891
892   return reexamine;
893 }
894
895 /* Compute object sizes for VAR.
896    For ADDR_EXPR an object size is the number of remaining bytes
897    to the end of the object (where what is considered an object depends on
898    OSI->object_size_type).
899    For allocation GIMPLE_CALL like malloc or calloc object size is the size
900    of the allocation.
901    For POINTER_PLUS_EXPR where second operand is a constant integer,
902    object size is object size of the first operand minus the constant.
903    If the constant is bigger than the number of remaining bytes until the
904    end of the object, object size is 0, but if it is instead a pointer
905    subtraction, object size is unknown[object_size_type].
906    To differentiate addition from subtraction, ADDR_EXPR returns
907    unknown[object_size_type] for all objects bigger than half of the address
908    space, and constants less than half of the address space are considered
909    addition, while bigger constants subtraction.
910    For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
911    object size is object size of that argument.
912    Otherwise, object size is the maximum of object sizes of variables
913    that it might be set to.  */
914
915 static void
916 collect_object_sizes_for (struct object_size_info *osi, tree var)
917 {
918   int object_size_type = osi->object_size_type;
919   unsigned int varno = SSA_NAME_VERSION (var);
920   gimple stmt;
921   bool reexamine;
922
923   if (bitmap_bit_p (computed[object_size_type], varno))
924     return;
925
926   if (osi->pass == 0)
927     {
928       if (bitmap_set_bit (osi->visited, varno))
929         {
930           object_sizes[object_size_type][varno]
931             = (object_size_type & 2) ? -1 : 0;
932         }
933       else
934         {
935           /* Found a dependency loop.  Mark the variable for later
936              re-examination.  */
937           bitmap_set_bit (osi->reexamine, varno);
938           if (dump_file && (dump_flags & TDF_DETAILS))
939             {
940               fprintf (dump_file, "Found a dependency loop at ");
941               print_generic_expr (dump_file, var, dump_flags);
942               fprintf (dump_file, "\n");
943             }
944           return;
945         }
946     }
947
948   if (dump_file && (dump_flags & TDF_DETAILS))
949     {
950       fprintf (dump_file, "Visiting use-def links for ");
951       print_generic_expr (dump_file, var, dump_flags);
952       fprintf (dump_file, "\n");
953     }
954
955   stmt = SSA_NAME_DEF_STMT (var);
956   reexamine = false;
957
958   switch (gimple_code (stmt))
959     {
960     case GIMPLE_ASSIGN:
961       {
962         tree rhs = gimple_assign_rhs1 (stmt);
963         if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
964             || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
965                 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
966           reexamine = plus_stmt_object_size (osi, var, stmt);
967         else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
968           reexamine = cond_expr_object_size (osi, var, stmt);
969         else if (gimple_assign_single_p (stmt)
970                  || gimple_assign_unary_nop_p (stmt))
971           {
972             if (TREE_CODE (rhs) == SSA_NAME
973                 && POINTER_TYPE_P (TREE_TYPE (rhs)))
974               reexamine = merge_object_sizes (osi, var, rhs, 0);
975             else
976               expr_object_size (osi, var, rhs);
977           }
978         else
979           unknown_object_size (osi, var);
980         break;
981       }
982
983     case GIMPLE_CALL:
984       {
985         gcall *call_stmt = as_a <gcall *> (stmt);
986         tree arg = pass_through_call (call_stmt);
987         if (arg)
988           {
989             if (TREE_CODE (arg) == SSA_NAME
990                 && POINTER_TYPE_P (TREE_TYPE (arg)))
991               reexamine = merge_object_sizes (osi, var, arg, 0);
992             else
993               expr_object_size (osi, var, arg);
994           }
995         else
996           call_object_size (osi, var, call_stmt);
997         break;
998       }
999
1000     case GIMPLE_ASM:
1001       /* Pointers defined by __asm__ statements can point anywhere.  */
1002       object_sizes[object_size_type][varno] = unknown[object_size_type];
1003       break;
1004
1005     case GIMPLE_NOP:
1006       if (SSA_NAME_VAR (var)
1007           && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1008         expr_object_size (osi, var, SSA_NAME_VAR (var));
1009       else
1010         /* Uninitialized SSA names point nowhere.  */
1011         object_sizes[object_size_type][varno] = unknown[object_size_type];
1012       break;
1013
1014     case GIMPLE_PHI:
1015       {
1016         unsigned i;
1017
1018         for (i = 0; i < gimple_phi_num_args (stmt); i++)
1019           {
1020             tree rhs = gimple_phi_arg (stmt, i)->def;
1021
1022             if (object_sizes[object_size_type][varno]
1023                 == unknown[object_size_type])
1024               break;
1025
1026             if (TREE_CODE (rhs) == SSA_NAME)
1027               reexamine |= merge_object_sizes (osi, var, rhs, 0);
1028             else if (osi->pass == 0)
1029               expr_object_size (osi, var, rhs);
1030           }
1031         break;
1032       }
1033
1034     default:
1035       gcc_unreachable ();
1036     }
1037
1038   if (! reexamine
1039       || object_sizes[object_size_type][varno] == unknown[object_size_type])
1040     {
1041       bitmap_set_bit (computed[object_size_type], varno);
1042       bitmap_clear_bit (osi->reexamine, varno);
1043     }
1044   else
1045     {
1046       bitmap_set_bit (osi->reexamine, varno);
1047       if (dump_file && (dump_flags & TDF_DETAILS))
1048         {
1049           fprintf (dump_file, "Need to reexamine ");
1050           print_generic_expr (dump_file, var, dump_flags);
1051           fprintf (dump_file, "\n");
1052         }
1053     }
1054 }
1055
1056
1057 /* Helper function for check_for_plus_in_loops.  Called recursively
1058    to detect loops.  */
1059
1060 static void
1061 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1062                            unsigned int depth)
1063 {
1064   gimple stmt = SSA_NAME_DEF_STMT (var);
1065   unsigned int varno = SSA_NAME_VERSION (var);
1066
1067   if (osi->depths[varno])
1068     {
1069       if (osi->depths[varno] != depth)
1070         {
1071           unsigned int *sp;
1072
1073           /* Found a loop involving pointer addition.  */
1074           for (sp = osi->tos; sp > osi->stack; )
1075             {
1076               --sp;
1077               bitmap_clear_bit (osi->reexamine, *sp);
1078               bitmap_set_bit (computed[osi->object_size_type], *sp);
1079               object_sizes[osi->object_size_type][*sp] = 0;
1080               if (*sp == varno)
1081                 break;
1082             }
1083         }
1084       return;
1085     }
1086   else if (! bitmap_bit_p (osi->reexamine, varno))
1087     return;
1088
1089   osi->depths[varno] = depth;
1090   *osi->tos++ = varno;
1091
1092   switch (gimple_code (stmt))
1093     {
1094
1095     case GIMPLE_ASSIGN:
1096       {
1097         if ((gimple_assign_single_p (stmt)
1098              || gimple_assign_unary_nop_p (stmt))
1099             && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1100           {
1101             tree rhs = gimple_assign_rhs1 (stmt);
1102
1103             check_for_plus_in_loops_1 (osi, rhs, depth);
1104           }
1105         else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1106           {
1107             tree basevar = gimple_assign_rhs1 (stmt);
1108             tree cst = gimple_assign_rhs2 (stmt);
1109
1110             gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1111
1112             check_for_plus_in_loops_1 (osi, basevar,
1113                                        depth + !integer_zerop (cst));
1114           }
1115         else
1116           gcc_unreachable ();
1117         break;
1118       }
1119
1120     case GIMPLE_CALL:
1121       {
1122         gcall *call_stmt = as_a <gcall *> (stmt);
1123         tree arg = pass_through_call (call_stmt);
1124         if (arg)
1125           {
1126             if (TREE_CODE (arg) == SSA_NAME)
1127               check_for_plus_in_loops_1 (osi, arg, depth);
1128             else
1129               gcc_unreachable ();
1130           }
1131         break;
1132       }
1133
1134     case GIMPLE_PHI:
1135       {
1136         unsigned i;
1137
1138         for (i = 0; i < gimple_phi_num_args (stmt); i++)
1139           {
1140             tree rhs = gimple_phi_arg (stmt, i)->def;
1141
1142             if (TREE_CODE (rhs) == SSA_NAME)
1143               check_for_plus_in_loops_1 (osi, rhs, depth);
1144           }
1145         break;
1146       }
1147
1148     default:
1149       gcc_unreachable ();
1150     }
1151
1152   osi->depths[varno] = 0;
1153   osi->tos--;
1154 }
1155
1156
1157 /* Check if some pointer we are computing object size of is being increased
1158    within a loop.  If yes, assume all the SSA variables participating in
1159    that loop have minimum object sizes 0.  */
1160
1161 static void
1162 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1163 {
1164   gimple stmt = SSA_NAME_DEF_STMT (var);
1165
1166   /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1167      and looked for a POINTER_PLUS_EXPR in the pass-through
1168      argument, if any.  In GIMPLE, however, such an expression
1169      is not a valid call operand.  */
1170
1171   if (is_gimple_assign (stmt)
1172       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1173     {
1174       tree basevar = gimple_assign_rhs1 (stmt);
1175       tree cst = gimple_assign_rhs2 (stmt);
1176
1177       gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1178
1179       if (integer_zerop (cst))
1180         return;
1181
1182       osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1183       *osi->tos++ = SSA_NAME_VERSION (basevar);
1184       check_for_plus_in_loops_1 (osi, var, 2);
1185       osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1186       osi->tos--;
1187     }
1188 }
1189
1190
1191 /* Initialize data structures for the object size computation.  */
1192
1193 void
1194 init_object_sizes (void)
1195 {
1196   int object_size_type;
1197
1198   if (computed[0])
1199     return;
1200
1201   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1202     {
1203       object_sizes[object_size_type].safe_grow (num_ssa_names);
1204       computed[object_size_type] = BITMAP_ALLOC (NULL);
1205     }
1206
1207   init_offset_limit ();
1208 }
1209
1210
1211 /* Destroy data structures after the object size computation.  */
1212
1213 static void
1214 fini_object_sizes (void)
1215 {
1216   int object_size_type;
1217
1218   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1219     {
1220       object_sizes[object_size_type].release ();
1221       BITMAP_FREE (computed[object_size_type]);
1222     }
1223 }
1224
1225
1226 /* Simple pass to optimize all __builtin_object_size () builtins.  */
1227
1228 namespace {
1229
1230 const pass_data pass_data_object_sizes =
1231 {
1232   GIMPLE_PASS, /* type */
1233   "objsz", /* name */
1234   OPTGROUP_NONE, /* optinfo_flags */
1235   TV_NONE, /* tv_id */
1236   ( PROP_cfg | PROP_ssa ), /* properties_required */
1237   0, /* properties_provided */
1238   0, /* properties_destroyed */
1239   0, /* todo_flags_start */
1240   0, /* todo_flags_finish */
1241 };
1242
1243 class pass_object_sizes : public gimple_opt_pass
1244 {
1245 public:
1246   pass_object_sizes (gcc::context *ctxt)
1247     : gimple_opt_pass (pass_data_object_sizes, ctxt)
1248   {}
1249
1250   /* opt_pass methods: */
1251   opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1252   virtual unsigned int execute (function *);
1253
1254 }; // class pass_object_sizes
1255
1256 unsigned int
1257 pass_object_sizes::execute (function *fun)
1258 {
1259   basic_block bb;
1260   FOR_EACH_BB_FN (bb, fun)
1261     {
1262       gimple_stmt_iterator i;
1263       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1264         {
1265           tree result;
1266           gimple call = gsi_stmt (i);
1267           if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1268             continue;
1269
1270           init_object_sizes ();
1271           result = fold_call_stmt (as_a <gcall *> (call), false);
1272           if (!result)
1273             {
1274               if (gimple_call_num_args (call) == 2
1275                   && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1276                 {
1277                   tree ost = gimple_call_arg (call, 1);
1278
1279                   if (tree_fits_uhwi_p (ost))
1280                     {
1281                       unsigned HOST_WIDE_INT object_size_type
1282                         = tree_to_uhwi (ost);
1283
1284                       if (object_size_type < 2)
1285                         result = fold_convert (size_type_node,
1286                                                integer_minus_one_node);
1287                       else if (object_size_type < 4)
1288                         result = build_zero_cst (size_type_node);
1289                     }
1290                 }
1291
1292               if (!result)
1293                 continue;
1294             }
1295
1296           gcc_assert (TREE_CODE (result) == INTEGER_CST);
1297
1298           if (dump_file && (dump_flags & TDF_DETAILS))
1299             {
1300               fprintf (dump_file, "Simplified\n  ");
1301               print_gimple_stmt (dump_file, call, 0, dump_flags);
1302               fprintf (dump_file, " to ");
1303               print_generic_expr (dump_file, result, 0);
1304               fprintf (dump_file, "\n");
1305             }
1306
1307           tree lhs = gimple_call_lhs (call);
1308           if (!lhs)
1309             continue;
1310
1311           /* Propagate into all uses and fold those stmts.  */
1312           gimple use_stmt;
1313           imm_use_iterator iter;
1314           FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1315             {
1316               use_operand_p use_p;
1317               FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1318                 SET_USE (use_p, result);
1319               gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1320               fold_stmt (&gsi);
1321               update_stmt (gsi_stmt (gsi));
1322             }
1323         }
1324     }
1325
1326   fini_object_sizes ();
1327   return 0;
1328 }
1329
1330 } // anon namespace
1331
1332 gimple_opt_pass *
1333 make_pass_object_sizes (gcc::context *ctxt)
1334 {
1335   return new pass_object_sizes (ctxt);
1336 }