Import GCC-8 to a new vendor branch
[dragonfly.git] / contrib / gcc-8.0 / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3    Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
37
38 struct object_size_info
39 {
40   int object_size_type;
41   unsigned char pass;
42   bool changed;
43   bitmap visited, reexamine;
44   unsigned int *depths;
45   unsigned int *stack, *tos;
46 };
47
48 static const unsigned HOST_WIDE_INT unknown[4] = {
49   HOST_WIDE_INT_M1U,
50   HOST_WIDE_INT_M1U,
51   0,
52   0
53 };
54
55 static tree compute_object_offset (const_tree, const_tree);
56 static bool addr_object_size (struct object_size_info *,
57                               const_tree, int, unsigned HOST_WIDE_INT *);
58 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
59 static tree pass_through_call (const gcall *);
60 static void collect_object_sizes_for (struct object_size_info *, tree);
61 static void expr_object_size (struct object_size_info *, tree, tree);
62 static bool merge_object_sizes (struct object_size_info *, tree, tree,
63                                 unsigned HOST_WIDE_INT);
64 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
65 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
66 static void init_offset_limit (void);
67 static void check_for_plus_in_loops (struct object_size_info *, tree);
68 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
69                                        unsigned int);
70
71 /* object_sizes[0] is upper bound for number of bytes till the end of
72    the object.
73    object_sizes[1] is upper bound for number of bytes till the end of
74    the subobject (innermost array or field with address taken).
75    object_sizes[2] is lower bound for number of bytes till the end of
76    the object and object_sizes[3] lower bound for subobject.  */
77 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
78
79 /* Bitmaps what object sizes have been computed already.  */
80 static bitmap computed[4];
81
82 /* Maximum value of offset we consider to be addition.  */
83 static unsigned HOST_WIDE_INT offset_limit;
84
85
86 /* Initialize OFFSET_LIMIT variable.  */
87 static void
88 init_offset_limit (void)
89 {
90   if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
91     offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
92   else
93     offset_limit = -1;
94   offset_limit /= 2;
95 }
96
97
98 /* Compute offset of EXPR within VAR.  Return error_mark_node
99    if unknown.  */
100
101 static tree
102 compute_object_offset (const_tree expr, const_tree var)
103 {
104   enum tree_code code = PLUS_EXPR;
105   tree base, off, t;
106
107   if (expr == var)
108     return size_zero_node;
109
110   switch (TREE_CODE (expr))
111     {
112     case COMPONENT_REF:
113       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
114       if (base == error_mark_node)
115         return base;
116
117       t = TREE_OPERAND (expr, 1);
118       off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
119                         size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
120                                   / BITS_PER_UNIT));
121       break;
122
123     case REALPART_EXPR:
124     CASE_CONVERT:
125     case VIEW_CONVERT_EXPR:
126     case NON_LVALUE_EXPR:
127       return compute_object_offset (TREE_OPERAND (expr, 0), var);
128
129     case IMAGPART_EXPR:
130       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131       if (base == error_mark_node)
132         return base;
133
134       off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
135       break;
136
137     case ARRAY_REF:
138       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
139       if (base == error_mark_node)
140         return base;
141
142       t = TREE_OPERAND (expr, 1);
143       tree low_bound, unit_size;
144       low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
145       unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
146       if (! integer_zerop (low_bound))
147         t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
148       if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
149         {
150           code = MINUS_EXPR;
151           t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
152         }
153       t = fold_convert (sizetype, t);
154       off = size_binop (MULT_EXPR, unit_size, t);
155       break;
156
157     case MEM_REF:
158       gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
159       return wide_int_to_tree (sizetype, mem_ref_offset (expr));
160
161     default:
162       return error_mark_node;
163     }
164
165   return size_binop (code, base, off);
166 }
167
168
169 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
170    OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
171    If unknown, return unknown[object_size_type].  */
172
173 static bool
174 addr_object_size (struct object_size_info *osi, const_tree ptr,
175                   int object_size_type, unsigned HOST_WIDE_INT *psize)
176 {
177   tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
178
179   gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
180
181   /* Set to unknown and overwrite just before returning if the size
182      could be determined.  */
183   *psize = unknown[object_size_type];
184
185   pt_var = TREE_OPERAND (ptr, 0);
186   while (handled_component_p (pt_var))
187     pt_var = TREE_OPERAND (pt_var, 0);
188
189   if (pt_var
190       && TREE_CODE (pt_var) == MEM_REF)
191     {
192       unsigned HOST_WIDE_INT sz;
193
194       if (!osi || (object_size_type & 1) != 0
195           || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
196         {
197           compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
198                                        object_size_type & ~1, &sz);
199         }
200       else
201         {
202           tree var = TREE_OPERAND (pt_var, 0);
203           if (osi->pass == 0)
204             collect_object_sizes_for (osi, var);
205           if (bitmap_bit_p (computed[object_size_type],
206                             SSA_NAME_VERSION (var)))
207             sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
208           else
209             sz = unknown[object_size_type];
210         }
211       if (sz != unknown[object_size_type])
212         {
213           offset_int mem_offset;
214           if (mem_ref_offset (pt_var).is_constant (&mem_offset))
215             {
216               offset_int dsz = wi::sub (sz, mem_offset);
217               if (wi::neg_p (dsz))
218                 sz = 0;
219               else if (wi::fits_uhwi_p (dsz))
220                 sz = dsz.to_uhwi ();
221               else
222                 sz = unknown[object_size_type];
223             }
224           else
225             sz = unknown[object_size_type];
226         }
227
228       if (sz != unknown[object_size_type] && sz < offset_limit)
229         pt_var_size = size_int (sz);
230     }
231   else if (pt_var
232            && DECL_P (pt_var)
233            && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
234            && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
235     pt_var_size = DECL_SIZE_UNIT (pt_var);
236   else if (pt_var
237            && TREE_CODE (pt_var) == STRING_CST
238            && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
239            && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
240            && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
241               < offset_limit)
242     pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
243   else
244     return false;
245
246   if (pt_var != TREE_OPERAND (ptr, 0))
247     {
248       tree var;
249
250       if (object_size_type & 1)
251         {
252           var = TREE_OPERAND (ptr, 0);
253
254           while (var != pt_var
255                  && TREE_CODE (var) != BIT_FIELD_REF
256                  && TREE_CODE (var) != COMPONENT_REF
257                  && TREE_CODE (var) != ARRAY_REF
258                  && TREE_CODE (var) != ARRAY_RANGE_REF
259                  && TREE_CODE (var) != REALPART_EXPR
260                  && TREE_CODE (var) != IMAGPART_EXPR)
261             var = TREE_OPERAND (var, 0);
262           if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
263             var = TREE_OPERAND (var, 0);
264           if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
265               || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
266               || (pt_var_size
267                   && tree_int_cst_lt (pt_var_size,
268                                       TYPE_SIZE_UNIT (TREE_TYPE (var)))))
269             var = pt_var;
270           else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
271             {
272               tree v = var;
273               /* For &X->fld, compute object size only if fld isn't the last
274                  field, as struct { int i; char c[1]; } is often used instead
275                  of flexible array member.  */
276               while (v && v != pt_var)
277                 switch (TREE_CODE (v))
278                   {
279                   case ARRAY_REF:
280                     if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
281                         && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
282                       {
283                         tree domain
284                           = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
285                         if (domain
286                             && TYPE_MAX_VALUE (domain)
287                             && TREE_CODE (TYPE_MAX_VALUE (domain))
288                                == INTEGER_CST
289                             && tree_int_cst_lt (TREE_OPERAND (v, 1),
290                                                 TYPE_MAX_VALUE (domain)))
291                           {
292                             v = NULL_TREE;
293                             break;
294                           }
295                       }
296                     v = TREE_OPERAND (v, 0);
297                     break;
298                   case REALPART_EXPR:
299                   case IMAGPART_EXPR:
300                     v = NULL_TREE;
301                     break;
302                   case COMPONENT_REF:
303                     if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
304                       {
305                         v = NULL_TREE;
306                         break;
307                       }
308                     while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
309                       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
310                           != UNION_TYPE
311                           && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
312                           != QUAL_UNION_TYPE)
313                         break;
314                       else
315                         v = TREE_OPERAND (v, 0);
316                     if (TREE_CODE (v) == COMPONENT_REF
317                         && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
318                            == RECORD_TYPE)
319                       {
320                         tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
321                         for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
322                           if (TREE_CODE (fld_chain) == FIELD_DECL)
323                             break;
324
325                         if (fld_chain)
326                           {
327                             v = NULL_TREE;
328                             break;
329                           }
330                         v = TREE_OPERAND (v, 0);
331                       }
332                     while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
333                       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
334                           != UNION_TYPE
335                           && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
336                           != QUAL_UNION_TYPE)
337                         break;
338                       else
339                         v = TREE_OPERAND (v, 0);
340                     if (v != pt_var)
341                       v = NULL_TREE;
342                     else
343                       v = pt_var;
344                     break;
345                   default:
346                     v = pt_var;
347                     break;
348                   }
349               if (v == pt_var)
350                 var = pt_var;
351             }
352         }
353       else
354         var = pt_var;
355
356       if (var != pt_var)
357         var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
358       else if (!pt_var_size)
359         return false;
360       else
361         var_size = pt_var_size;
362       bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
363       if (bytes != error_mark_node)
364         {
365           if (TREE_CODE (bytes) == INTEGER_CST
366               && tree_int_cst_lt (var_size, bytes))
367             bytes = size_zero_node;
368           else
369             bytes = size_binop (MINUS_EXPR, var_size, bytes);
370         }
371       if (var != pt_var
372           && pt_var_size
373           && TREE_CODE (pt_var) == MEM_REF
374           && bytes != error_mark_node)
375         {
376           tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
377           if (bytes2 != error_mark_node)
378             {
379               if (TREE_CODE (bytes2) == INTEGER_CST
380                   && tree_int_cst_lt (pt_var_size, bytes2))
381                 bytes2 = size_zero_node;
382               else
383                 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
384               bytes = size_binop (MIN_EXPR, bytes, bytes2);
385             }
386         }
387     }
388   else if (!pt_var_size)
389     return false;
390   else
391     bytes = pt_var_size;
392
393   if (tree_fits_uhwi_p (bytes))
394     {
395       *psize = tree_to_uhwi (bytes);
396       return true;
397     }
398
399   return false;
400 }
401
402
403 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
404    Handles various allocation calls.  OBJECT_SIZE_TYPE is the second
405    argument from __builtin_object_size.  If unknown, return
406    unknown[object_size_type].  */
407
408 static unsigned HOST_WIDE_INT
409 alloc_object_size (const gcall *call, int object_size_type)
410 {
411   tree callee, bytes = NULL_TREE;
412   tree alloc_size;
413   int arg1 = -1, arg2 = -1;
414
415   gcc_assert (is_gimple_call (call));
416
417   callee = gimple_call_fndecl (call);
418   if (!callee)
419     return unknown[object_size_type];
420
421   alloc_size = lookup_attribute ("alloc_size",
422                                  TYPE_ATTRIBUTES (TREE_TYPE (callee)));
423   if (alloc_size && TREE_VALUE (alloc_size))
424     {
425       tree p = TREE_VALUE (alloc_size);
426
427       arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
428       if (TREE_CHAIN (p))
429         arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
430     }
431
432   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
433     switch (DECL_FUNCTION_CODE (callee))
434       {
435       case BUILT_IN_CALLOC:
436         arg2 = 1;
437         /* fall through */
438       case BUILT_IN_MALLOC:
439       CASE_BUILT_IN_ALLOCA:
440         arg1 = 0;
441       default:
442         break;
443       }
444
445   if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
446       || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
447       || (arg2 >= 0
448           && (arg2 >= (int)gimple_call_num_args (call)
449               || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
450     return unknown[object_size_type];
451
452   if (arg2 >= 0)
453     bytes = size_binop (MULT_EXPR,
454         fold_convert (sizetype, gimple_call_arg (call, arg1)),
455         fold_convert (sizetype, gimple_call_arg (call, arg2)));
456   else if (arg1 >= 0)
457     bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
458
459   if (bytes && tree_fits_uhwi_p (bytes))
460     return tree_to_uhwi (bytes);
461
462   return unknown[object_size_type];
463 }
464
465
466 /* If object size is propagated from one of function's arguments directly
467    to its return value, return that argument for GIMPLE_CALL statement CALL.
468    Otherwise return NULL.  */
469
470 static tree
471 pass_through_call (const gcall *call)
472 {
473   unsigned rf = gimple_call_return_flags (call);
474   if (rf & ERF_RETURNS_ARG)
475     {
476       unsigned argnum = rf & ERF_RETURN_ARG_MASK;
477       if (argnum < gimple_call_num_args (call))
478         return gimple_call_arg (call, argnum);
479     }
480
481   /* __builtin_assume_aligned is intentionally not marked RET1.  */
482   if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
483     return gimple_call_arg (call, 0);
484
485   return NULL_TREE;
486 }
487
488
489 /* Compute __builtin_object_size value for PTR and set *PSIZE to
490    the resulting value.  OBJECT_SIZE_TYPE is the second argument
491    to __builtin_object_size.  Return true on success and false
492    when the object size could not be determined.  */
493
494 bool
495 compute_builtin_object_size (tree ptr, int object_size_type,
496                              unsigned HOST_WIDE_INT *psize)
497 {
498   gcc_assert (object_size_type >= 0 && object_size_type <= 3);
499
500   /* Set to unknown and overwrite just before returning if the size
501      could be determined.  */
502   *psize = unknown[object_size_type];
503
504   if (! offset_limit)
505     init_offset_limit ();
506
507   if (TREE_CODE (ptr) == ADDR_EXPR)
508     return addr_object_size (NULL, ptr, object_size_type, psize);
509
510   if (TREE_CODE (ptr) != SSA_NAME
511       || !POINTER_TYPE_P (TREE_TYPE (ptr)))
512       return false;
513
514   if (computed[object_size_type] == NULL)
515     {
516       if (optimize || object_size_type & 1)
517         return false;
518
519       /* When not optimizing, rather than failing, make a small effort
520          to determine the object size without the full benefit of
521          the (costly) computation below.  */
522       gimple *def = SSA_NAME_DEF_STMT (ptr);
523       if (gimple_code (def) == GIMPLE_ASSIGN)
524         {
525           tree_code code = gimple_assign_rhs_code (def);
526           if (code == POINTER_PLUS_EXPR)
527             {
528               tree offset = gimple_assign_rhs2 (def);
529               ptr = gimple_assign_rhs1 (def);
530
531               if (tree_fits_shwi_p (offset)
532                   && compute_builtin_object_size (ptr, object_size_type, psize))
533                 {
534                   /* Return zero when the offset is out of bounds.  */
535                   unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
536                   *psize = off < *psize ? *psize - off : 0;
537                   return true;
538                 }
539             }
540         }
541       return false;
542     }
543
544   if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
545     {
546       struct object_size_info osi;
547       bitmap_iterator bi;
548       unsigned int i;
549
550       if (num_ssa_names > object_sizes[object_size_type].length ())
551         object_sizes[object_size_type].safe_grow (num_ssa_names);
552       if (dump_file)
553         {
554           fprintf (dump_file, "Computing %s %sobject size for ",
555                    (object_size_type & 2) ? "minimum" : "maximum",
556                    (object_size_type & 1) ? "sub" : "");
557           print_generic_expr (dump_file, ptr, dump_flags);
558           fprintf (dump_file, ":\n");
559         }
560
561       osi.visited = BITMAP_ALLOC (NULL);
562       osi.reexamine = BITMAP_ALLOC (NULL);
563       osi.object_size_type = object_size_type;
564       osi.depths = NULL;
565       osi.stack = NULL;
566       osi.tos = NULL;
567
568       /* First pass: walk UD chains, compute object sizes that
569          can be computed.  osi.reexamine bitmap at the end will
570          contain what variables were found in dependency cycles
571          and therefore need to be reexamined.  */
572       osi.pass = 0;
573       osi.changed = false;
574       collect_object_sizes_for (&osi, ptr);
575
576       /* Second pass: keep recomputing object sizes of variables
577          that need reexamination, until no object sizes are
578          increased or all object sizes are computed.  */
579       if (! bitmap_empty_p (osi.reexamine))
580         {
581           bitmap reexamine = BITMAP_ALLOC (NULL);
582
583           /* If looking for minimum instead of maximum object size,
584              detect cases where a pointer is increased in a loop.
585              Although even without this detection pass 2 would eventually
586              terminate, it could take a long time.  If a pointer is
587              increasing this way, we need to assume 0 object size.
588              E.g. p = &buf[0]; while (cond) p = p + 4;  */
589           if (object_size_type & 2)
590             {
591               osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
592               osi.stack = XNEWVEC (unsigned int, num_ssa_names);
593               osi.tos = osi.stack;
594               osi.pass = 1;
595               /* collect_object_sizes_for is changing
596                  osi.reexamine bitmap, so iterate over a copy.  */
597               bitmap_copy (reexamine, osi.reexamine);
598               EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
599                 if (bitmap_bit_p (osi.reexamine, i))
600                   check_for_plus_in_loops (&osi, ssa_name (i));
601
602               free (osi.depths);
603               osi.depths = NULL;
604               free (osi.stack);
605               osi.stack = NULL;
606               osi.tos = NULL;
607             }
608
609           do
610             {
611               osi.pass = 2;
612               osi.changed = false;
613               /* collect_object_sizes_for is changing
614                  osi.reexamine bitmap, so iterate over a copy.  */
615               bitmap_copy (reexamine, osi.reexamine);
616               EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
617                 if (bitmap_bit_p (osi.reexamine, i))
618                   {
619                     collect_object_sizes_for (&osi, ssa_name (i));
620                     if (dump_file && (dump_flags & TDF_DETAILS))
621                       {
622                         fprintf (dump_file, "Reexamining ");
623                         print_generic_expr (dump_file, ssa_name (i),
624                                             dump_flags);
625                         fprintf (dump_file, "\n");
626                       }
627                   }
628             }
629           while (osi.changed);
630
631           BITMAP_FREE (reexamine);
632         }
633       EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
634         bitmap_set_bit (computed[object_size_type], i);
635
636       /* Debugging dumps.  */
637       if (dump_file)
638         {
639           EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
640             if (object_sizes[object_size_type][i]
641                 != unknown[object_size_type])
642               {
643                 print_generic_expr (dump_file, ssa_name (i),
644                                     dump_flags);
645                 fprintf (dump_file,
646                          ": %s %sobject size "
647                          HOST_WIDE_INT_PRINT_UNSIGNED "\n",
648                          (object_size_type & 2) ? "minimum" : "maximum",
649                          (object_size_type & 1) ? "sub" : "",
650                          object_sizes[object_size_type][i]);
651               }
652         }
653
654       BITMAP_FREE (osi.reexamine);
655       BITMAP_FREE (osi.visited);
656     }
657
658   *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
659   return *psize != unknown[object_size_type];
660 }
661
662 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME.  */
663
664 static void
665 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
666 {
667   int object_size_type = osi->object_size_type;
668   unsigned int varno = SSA_NAME_VERSION (ptr);
669   unsigned HOST_WIDE_INT bytes;
670
671   gcc_assert (object_sizes[object_size_type][varno]
672               != unknown[object_size_type]);
673   gcc_assert (osi->pass == 0);
674
675   if (TREE_CODE (value) == WITH_SIZE_EXPR)
676     value = TREE_OPERAND (value, 0);
677
678   /* Pointer variables should have been handled by merge_object_sizes.  */
679   gcc_assert (TREE_CODE (value) != SSA_NAME
680               || !POINTER_TYPE_P (TREE_TYPE (value)));
681
682   if (TREE_CODE (value) == ADDR_EXPR)
683     addr_object_size (osi, value, object_size_type, &bytes);
684   else
685     bytes = unknown[object_size_type];
686
687   if ((object_size_type & 2) == 0)
688     {
689       if (object_sizes[object_size_type][varno] < bytes)
690         object_sizes[object_size_type][varno] = bytes;
691     }
692   else
693     {
694       if (object_sizes[object_size_type][varno] > bytes)
695         object_sizes[object_size_type][varno] = bytes;
696     }
697 }
698
699
700 /* Compute object_sizes for PTR, defined to the result of a call.  */
701
702 static void
703 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
704 {
705   int object_size_type = osi->object_size_type;
706   unsigned int varno = SSA_NAME_VERSION (ptr);
707   unsigned HOST_WIDE_INT bytes;
708
709   gcc_assert (is_gimple_call (call));
710
711   gcc_assert (object_sizes[object_size_type][varno]
712               != unknown[object_size_type]);
713   gcc_assert (osi->pass == 0);
714
715   bytes = alloc_object_size (call, object_size_type);
716
717   if ((object_size_type & 2) == 0)
718     {
719       if (object_sizes[object_size_type][varno] < bytes)
720         object_sizes[object_size_type][varno] = bytes;
721     }
722   else
723     {
724       if (object_sizes[object_size_type][varno] > bytes)
725         object_sizes[object_size_type][varno] = bytes;
726     }
727 }
728
729
730 /* Compute object_sizes for PTR, defined to an unknown value.  */
731
732 static void
733 unknown_object_size (struct object_size_info *osi, tree ptr)
734 {
735   int object_size_type = osi->object_size_type;
736   unsigned int varno = SSA_NAME_VERSION (ptr);
737   unsigned HOST_WIDE_INT bytes;
738
739   gcc_assert (object_sizes[object_size_type][varno]
740               != unknown[object_size_type]);
741   gcc_assert (osi->pass == 0);
742
743   bytes = unknown[object_size_type];
744
745   if ((object_size_type & 2) == 0)
746     {
747       if (object_sizes[object_size_type][varno] < bytes)
748         object_sizes[object_size_type][varno] = bytes;
749     }
750   else
751     {
752       if (object_sizes[object_size_type][varno] > bytes)
753         object_sizes[object_size_type][varno] = bytes;
754     }
755 }
756
757
758 /* Merge object sizes of ORIG + OFFSET into DEST.  Return true if
759    the object size might need reexamination later.  */
760
761 static bool
762 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
763                     unsigned HOST_WIDE_INT offset)
764 {
765   int object_size_type = osi->object_size_type;
766   unsigned int varno = SSA_NAME_VERSION (dest);
767   unsigned HOST_WIDE_INT orig_bytes;
768
769   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
770     return false;
771   if (offset >= offset_limit)
772     {
773       object_sizes[object_size_type][varno] = unknown[object_size_type];
774       return false;
775     }
776
777   if (osi->pass == 0)
778     collect_object_sizes_for (osi, orig);
779
780   orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
781   if (orig_bytes != unknown[object_size_type])
782     orig_bytes = (offset > orig_bytes)
783                  ? HOST_WIDE_INT_0U : orig_bytes - offset;
784
785   if ((object_size_type & 2) == 0)
786     {
787       if (object_sizes[object_size_type][varno] < orig_bytes)
788         {
789           object_sizes[object_size_type][varno] = orig_bytes;
790           osi->changed = true;
791         }
792     }
793   else
794     {
795       if (object_sizes[object_size_type][varno] > orig_bytes)
796         {
797           object_sizes[object_size_type][varno] = orig_bytes;
798           osi->changed = true;
799         }
800     }
801   return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
802 }
803
804
805 /* Compute object_sizes for VAR, defined to the result of an assignment
806    with operator POINTER_PLUS_EXPR.  Return true if the object size might
807    need reexamination  later.  */
808
809 static bool
810 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
811 {
812   int object_size_type = osi->object_size_type;
813   unsigned int varno = SSA_NAME_VERSION (var);
814   unsigned HOST_WIDE_INT bytes;
815   tree op0, op1;
816
817   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
818     {
819       op0 = gimple_assign_rhs1 (stmt);
820       op1 = gimple_assign_rhs2 (stmt);
821     }
822   else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
823     {
824       tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
825       gcc_assert (TREE_CODE (rhs) == MEM_REF);
826       op0 = TREE_OPERAND (rhs, 0);
827       op1 = TREE_OPERAND (rhs, 1);
828     }
829   else
830     gcc_unreachable ();
831
832   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
833     return false;
834
835   /* Handle PTR + OFFSET here.  */
836   if (TREE_CODE (op1) == INTEGER_CST
837       && (TREE_CODE (op0) == SSA_NAME
838           || TREE_CODE (op0) == ADDR_EXPR))
839     {
840       if (! tree_fits_uhwi_p (op1))
841         bytes = unknown[object_size_type];
842       else if (TREE_CODE (op0) == SSA_NAME)
843         return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
844       else
845         {
846           unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
847
848           /* op0 will be ADDR_EXPR here.  */
849           addr_object_size (osi, op0, object_size_type, &bytes);
850           if (bytes == unknown[object_size_type])
851             ;
852           else if (off > offset_limit)
853             bytes = unknown[object_size_type];
854           else if (off > bytes)
855             bytes = 0;
856           else
857             bytes -= off;
858         }
859     }
860   else
861     bytes = unknown[object_size_type];
862
863   if ((object_size_type & 2) == 0)
864     {
865       if (object_sizes[object_size_type][varno] < bytes)
866         object_sizes[object_size_type][varno] = bytes;
867     }
868   else
869     {
870       if (object_sizes[object_size_type][varno] > bytes)
871         object_sizes[object_size_type][varno] = bytes;
872     }
873   return false;
874 }
875
876
877 /* Compute object_sizes for VAR, defined at STMT, which is
878    a COND_EXPR.  Return true if the object size might need reexamination
879    later.  */
880
881 static bool
882 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
883 {
884   tree then_, else_;
885   int object_size_type = osi->object_size_type;
886   unsigned int varno = SSA_NAME_VERSION (var);
887   bool reexamine = false;
888
889   gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
890
891   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
892     return false;
893
894   then_ = gimple_assign_rhs2 (stmt);
895   else_ = gimple_assign_rhs3 (stmt);
896
897   if (TREE_CODE (then_) == SSA_NAME)
898     reexamine |= merge_object_sizes (osi, var, then_, 0);
899   else
900     expr_object_size (osi, var, then_);
901
902   if (TREE_CODE (else_) == SSA_NAME)
903     reexamine |= merge_object_sizes (osi, var, else_, 0);
904   else
905     expr_object_size (osi, var, else_);
906
907   return reexamine;
908 }
909
910 /* Compute object sizes for VAR.
911    For ADDR_EXPR an object size is the number of remaining bytes
912    to the end of the object (where what is considered an object depends on
913    OSI->object_size_type).
914    For allocation GIMPLE_CALL like malloc or calloc object size is the size
915    of the allocation.
916    For POINTER_PLUS_EXPR where second operand is a constant integer,
917    object size is object size of the first operand minus the constant.
918    If the constant is bigger than the number of remaining bytes until the
919    end of the object, object size is 0, but if it is instead a pointer
920    subtraction, object size is unknown[object_size_type].
921    To differentiate addition from subtraction, ADDR_EXPR returns
922    unknown[object_size_type] for all objects bigger than half of the address
923    space, and constants less than half of the address space are considered
924    addition, while bigger constants subtraction.
925    For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
926    object size is object size of that argument.
927    Otherwise, object size is the maximum of object sizes of variables
928    that it might be set to.  */
929
930 static void
931 collect_object_sizes_for (struct object_size_info *osi, tree var)
932 {
933   int object_size_type = osi->object_size_type;
934   unsigned int varno = SSA_NAME_VERSION (var);
935   gimple *stmt;
936   bool reexamine;
937
938   if (bitmap_bit_p (computed[object_size_type], varno))
939     return;
940
941   if (osi->pass == 0)
942     {
943       if (bitmap_set_bit (osi->visited, varno))
944         {
945           object_sizes[object_size_type][varno]
946             = (object_size_type & 2) ? -1 : 0;
947         }
948       else
949         {
950           /* Found a dependency loop.  Mark the variable for later
951              re-examination.  */
952           bitmap_set_bit (osi->reexamine, varno);
953           if (dump_file && (dump_flags & TDF_DETAILS))
954             {
955               fprintf (dump_file, "Found a dependency loop at ");
956               print_generic_expr (dump_file, var, dump_flags);
957               fprintf (dump_file, "\n");
958             }
959           return;
960         }
961     }
962
963   if (dump_file && (dump_flags & TDF_DETAILS))
964     {
965       fprintf (dump_file, "Visiting use-def links for ");
966       print_generic_expr (dump_file, var, dump_flags);
967       fprintf (dump_file, "\n");
968     }
969
970   stmt = SSA_NAME_DEF_STMT (var);
971   reexamine = false;
972
973   switch (gimple_code (stmt))
974     {
975     case GIMPLE_ASSIGN:
976       {
977         tree rhs = gimple_assign_rhs1 (stmt);
978         if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
979             || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
980                 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
981           reexamine = plus_stmt_object_size (osi, var, stmt);
982         else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
983           reexamine = cond_expr_object_size (osi, var, stmt);
984         else if (gimple_assign_single_p (stmt)
985                  || gimple_assign_unary_nop_p (stmt))
986           {
987             if (TREE_CODE (rhs) == SSA_NAME
988                 && POINTER_TYPE_P (TREE_TYPE (rhs)))
989               reexamine = merge_object_sizes (osi, var, rhs, 0);
990             else
991               expr_object_size (osi, var, rhs);
992           }
993         else
994           unknown_object_size (osi, var);
995         break;
996       }
997
998     case GIMPLE_CALL:
999       {
1000         gcall *call_stmt = as_a <gcall *> (stmt);
1001         tree arg = pass_through_call (call_stmt);
1002         if (arg)
1003           {
1004             if (TREE_CODE (arg) == SSA_NAME
1005                 && POINTER_TYPE_P (TREE_TYPE (arg)))
1006               reexamine = merge_object_sizes (osi, var, arg, 0);
1007             else
1008               expr_object_size (osi, var, arg);
1009           }
1010         else
1011           call_object_size (osi, var, call_stmt);
1012         break;
1013       }
1014
1015     case GIMPLE_ASM:
1016       /* Pointers defined by __asm__ statements can point anywhere.  */
1017       object_sizes[object_size_type][varno] = unknown[object_size_type];
1018       break;
1019
1020     case GIMPLE_NOP:
1021       if (SSA_NAME_VAR (var)
1022           && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1023         expr_object_size (osi, var, SSA_NAME_VAR (var));
1024       else
1025         /* Uninitialized SSA names point nowhere.  */
1026         object_sizes[object_size_type][varno] = unknown[object_size_type];
1027       break;
1028
1029     case GIMPLE_PHI:
1030       {
1031         unsigned i;
1032
1033         for (i = 0; i < gimple_phi_num_args (stmt); i++)
1034           {
1035             tree rhs = gimple_phi_arg (stmt, i)->def;
1036
1037             if (object_sizes[object_size_type][varno]
1038                 == unknown[object_size_type])
1039               break;
1040
1041             if (TREE_CODE (rhs) == SSA_NAME)
1042               reexamine |= merge_object_sizes (osi, var, rhs, 0);
1043             else if (osi->pass == 0)
1044               expr_object_size (osi, var, rhs);
1045           }
1046         break;
1047       }
1048
1049     default:
1050       gcc_unreachable ();
1051     }
1052
1053   if (! reexamine
1054       || object_sizes[object_size_type][varno] == unknown[object_size_type])
1055     {
1056       bitmap_set_bit (computed[object_size_type], varno);
1057       bitmap_clear_bit (osi->reexamine, varno);
1058     }
1059   else
1060     {
1061       bitmap_set_bit (osi->reexamine, varno);
1062       if (dump_file && (dump_flags & TDF_DETAILS))
1063         {
1064           fprintf (dump_file, "Need to reexamine ");
1065           print_generic_expr (dump_file, var, dump_flags);
1066           fprintf (dump_file, "\n");
1067         }
1068     }
1069 }
1070
1071
1072 /* Helper function for check_for_plus_in_loops.  Called recursively
1073    to detect loops.  */
1074
1075 static void
1076 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1077                            unsigned int depth)
1078 {
1079   gimple *stmt = SSA_NAME_DEF_STMT (var);
1080   unsigned int varno = SSA_NAME_VERSION (var);
1081
1082   if (osi->depths[varno])
1083     {
1084       if (osi->depths[varno] != depth)
1085         {
1086           unsigned int *sp;
1087
1088           /* Found a loop involving pointer addition.  */
1089           for (sp = osi->tos; sp > osi->stack; )
1090             {
1091               --sp;
1092               bitmap_clear_bit (osi->reexamine, *sp);
1093               bitmap_set_bit (computed[osi->object_size_type], *sp);
1094               object_sizes[osi->object_size_type][*sp] = 0;
1095               if (*sp == varno)
1096                 break;
1097             }
1098         }
1099       return;
1100     }
1101   else if (! bitmap_bit_p (osi->reexamine, varno))
1102     return;
1103
1104   osi->depths[varno] = depth;
1105   *osi->tos++ = varno;
1106
1107   switch (gimple_code (stmt))
1108     {
1109
1110     case GIMPLE_ASSIGN:
1111       {
1112         if ((gimple_assign_single_p (stmt)
1113              || gimple_assign_unary_nop_p (stmt))
1114             && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1115           {
1116             tree rhs = gimple_assign_rhs1 (stmt);
1117
1118             check_for_plus_in_loops_1 (osi, rhs, depth);
1119           }
1120         else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1121           {
1122             tree basevar = gimple_assign_rhs1 (stmt);
1123             tree cst = gimple_assign_rhs2 (stmt);
1124
1125             gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1126
1127             check_for_plus_in_loops_1 (osi, basevar,
1128                                        depth + !integer_zerop (cst));
1129           }
1130         else
1131           gcc_unreachable ();
1132         break;
1133       }
1134
1135     case GIMPLE_CALL:
1136       {
1137         gcall *call_stmt = as_a <gcall *> (stmt);
1138         tree arg = pass_through_call (call_stmt);
1139         if (arg)
1140           {
1141             if (TREE_CODE (arg) == SSA_NAME)
1142               check_for_plus_in_loops_1 (osi, arg, depth);
1143             else
1144               gcc_unreachable ();
1145           }
1146         break;
1147       }
1148
1149     case GIMPLE_PHI:
1150       {
1151         unsigned i;
1152
1153         for (i = 0; i < gimple_phi_num_args (stmt); i++)
1154           {
1155             tree rhs = gimple_phi_arg (stmt, i)->def;
1156
1157             if (TREE_CODE (rhs) == SSA_NAME)
1158               check_for_plus_in_loops_1 (osi, rhs, depth);
1159           }
1160         break;
1161       }
1162
1163     default:
1164       gcc_unreachable ();
1165     }
1166
1167   osi->depths[varno] = 0;
1168   osi->tos--;
1169 }
1170
1171
1172 /* Check if some pointer we are computing object size of is being increased
1173    within a loop.  If yes, assume all the SSA variables participating in
1174    that loop have minimum object sizes 0.  */
1175
1176 static void
1177 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1178 {
1179   gimple *stmt = SSA_NAME_DEF_STMT (var);
1180
1181   /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1182      and looked for a POINTER_PLUS_EXPR in the pass-through
1183      argument, if any.  In GIMPLE, however, such an expression
1184      is not a valid call operand.  */
1185
1186   if (is_gimple_assign (stmt)
1187       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1188     {
1189       tree basevar = gimple_assign_rhs1 (stmt);
1190       tree cst = gimple_assign_rhs2 (stmt);
1191
1192       gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1193
1194       if (integer_zerop (cst))
1195         return;
1196
1197       osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1198       *osi->tos++ = SSA_NAME_VERSION (basevar);
1199       check_for_plus_in_loops_1 (osi, var, 2);
1200       osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1201       osi->tos--;
1202     }
1203 }
1204
1205
1206 /* Initialize data structures for the object size computation.  */
1207
1208 void
1209 init_object_sizes (void)
1210 {
1211   int object_size_type;
1212
1213   if (computed[0])
1214     return;
1215
1216   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1217     {
1218       object_sizes[object_size_type].safe_grow (num_ssa_names);
1219       computed[object_size_type] = BITMAP_ALLOC (NULL);
1220     }
1221
1222   init_offset_limit ();
1223 }
1224
1225
1226 /* Destroy data structures after the object size computation.  */
1227
1228 void
1229 fini_object_sizes (void)
1230 {
1231   int object_size_type;
1232
1233   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1234     {
1235       object_sizes[object_size_type].release ();
1236       BITMAP_FREE (computed[object_size_type]);
1237     }
1238 }
1239
1240
1241 /* Simple pass to optimize all __builtin_object_size () builtins.  */
1242
1243 namespace {
1244
1245 const pass_data pass_data_object_sizes =
1246 {
1247   GIMPLE_PASS, /* type */
1248   "objsz", /* name */
1249   OPTGROUP_NONE, /* optinfo_flags */
1250   TV_NONE, /* tv_id */
1251   ( PROP_cfg | PROP_ssa ), /* properties_required */
1252   0, /* properties_provided */
1253   0, /* properties_destroyed */
1254   0, /* todo_flags_start */
1255   0, /* todo_flags_finish */
1256 };
1257
1258 class pass_object_sizes : public gimple_opt_pass
1259 {
1260 public:
1261   pass_object_sizes (gcc::context *ctxt)
1262     : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1263   {}
1264
1265   /* opt_pass methods: */
1266   opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1267   void set_pass_param (unsigned int n, bool param)
1268     {
1269       gcc_assert (n == 0);
1270       insert_min_max_p = param;
1271     }
1272   virtual unsigned int execute (function *);
1273
1274  private:
1275   /* Determines whether the pass instance creates MIN/MAX_EXPRs.  */
1276   bool insert_min_max_p;
1277 }; // class pass_object_sizes
1278
1279 /* Dummy valueize function.  */
1280
1281 static tree
1282 do_valueize (tree t)
1283 {
1284   return t;
1285 }
1286
1287 unsigned int
1288 pass_object_sizes::execute (function *fun)
1289 {
1290   basic_block bb;
1291   FOR_EACH_BB_FN (bb, fun)
1292     {
1293       gimple_stmt_iterator i;
1294       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1295         {
1296           tree result;
1297           gimple *call = gsi_stmt (i);
1298           if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1299             continue;
1300
1301           init_object_sizes ();
1302
1303           /* If insert_min_max_p, only attempt to fold
1304              __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1305              and rather than folding the builtin to the constant if any,
1306              create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1307              call result and the computed constant.  */
1308           if (insert_min_max_p)
1309             {
1310               tree ost = gimple_call_arg (call, 1);
1311               if (tree_fits_uhwi_p (ost))
1312                 {
1313                   unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1314                   tree ptr = gimple_call_arg (call, 0);
1315                   tree lhs = gimple_call_lhs (call);
1316                   if ((object_size_type == 1 || object_size_type == 3)
1317                       && (TREE_CODE (ptr) == ADDR_EXPR
1318                           || TREE_CODE (ptr) == SSA_NAME)
1319                       && lhs)
1320                     {
1321                       tree type = TREE_TYPE (lhs);
1322                       unsigned HOST_WIDE_INT bytes;
1323                       if (compute_builtin_object_size (ptr, object_size_type,
1324                                                        &bytes)
1325                           && wi::fits_to_tree_p (bytes, type))
1326                         {
1327                           tree tem = make_ssa_name (type);
1328                           gimple_call_set_lhs (call, tem);
1329                           enum tree_code code
1330                             = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1331                           tree cst = build_int_cstu (type, bytes);
1332                           gimple *g
1333                             = gimple_build_assign (lhs, code, tem, cst);
1334                           gsi_insert_after (&i, g, GSI_NEW_STMT);
1335                           update_stmt (call);
1336                         }
1337                     }
1338                 }
1339               continue;
1340             }
1341
1342           tree lhs = gimple_call_lhs (call);
1343           if (!lhs)
1344             continue;
1345
1346           result = gimple_fold_stmt_to_constant (call, do_valueize);
1347           if (!result)
1348             {
1349               tree ost = gimple_call_arg (call, 1);
1350
1351               if (tree_fits_uhwi_p (ost))
1352                 {
1353                   unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1354
1355                   if (object_size_type < 2)
1356                     result = fold_convert (size_type_node,
1357                                            integer_minus_one_node);
1358                   else if (object_size_type < 4)
1359                     result = build_zero_cst (size_type_node);
1360                 }
1361
1362               if (!result)
1363                 continue;
1364             }
1365
1366           gcc_assert (TREE_CODE (result) == INTEGER_CST);
1367
1368           if (dump_file && (dump_flags & TDF_DETAILS))
1369             {
1370               fprintf (dump_file, "Simplified\n  ");
1371               print_gimple_stmt (dump_file, call, 0, dump_flags);
1372               fprintf (dump_file, " to ");
1373               print_generic_expr (dump_file, result);
1374               fprintf (dump_file, "\n");
1375             }
1376
1377           /* Propagate into all uses and fold those stmts.  */
1378           replace_uses_by (lhs, result);
1379         }
1380     }
1381
1382   fini_object_sizes ();
1383   return 0;
1384 }
1385
1386 } // anon namespace
1387
1388 gimple_opt_pass *
1389 make_pass_object_sizes (gcc::context *ctxt)
1390 {
1391   return new pass_object_sizes (ctxt);
1392 }