ipa/97673 - fix input_location leak
[gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
37
38 struct object_size_info
39 {
40 int object_size_type;
41 unsigned char pass;
42 bool changed;
43 bitmap visited, reexamine;
44 unsigned int *depths;
45 unsigned int *stack, *tos;
46 };
47
48 static const unsigned HOST_WIDE_INT unknown[4] = {
49 HOST_WIDE_INT_M1U,
50 HOST_WIDE_INT_M1U,
51 0,
52 0
53 };
54
55 static tree compute_object_offset (const_tree, const_tree);
56 static bool addr_object_size (struct object_size_info *,
57 const_tree, int, unsigned HOST_WIDE_INT *,
58 tree * = NULL, tree * = NULL);
59 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
60 static tree pass_through_call (const gcall *);
61 static void collect_object_sizes_for (struct object_size_info *, tree);
62 static void expr_object_size (struct object_size_info *, tree, tree);
63 static bool merge_object_sizes (struct object_size_info *, tree, tree,
64 unsigned HOST_WIDE_INT);
65 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
66 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
67 static void init_offset_limit (void);
68 static void check_for_plus_in_loops (struct object_size_info *, tree);
69 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
70 unsigned int);
71
72 /* object_sizes[0] is upper bound for number of bytes till the end of
73 the object.
74 object_sizes[1] is upper bound for number of bytes till the end of
75 the subobject (innermost array or field with address taken).
76 object_sizes[2] is lower bound for number of bytes till the end of
77 the object and object_sizes[3] lower bound for subobject. */
78 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
79
80 /* Bitmaps what object sizes have been computed already. */
81 static bitmap computed[4];
82
83 /* Maximum value of offset we consider to be addition. */
84 static unsigned HOST_WIDE_INT offset_limit;
85
86
87 /* Initialize OFFSET_LIMIT variable. */
88 static void
89 init_offset_limit (void)
90 {
91 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
92 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
93 else
94 offset_limit = -1;
95 offset_limit /= 2;
96 }
97
98
99 /* Compute offset of EXPR within VAR. Return error_mark_node
100 if unknown. */
101
102 static tree
103 compute_object_offset (const_tree expr, const_tree var)
104 {
105 enum tree_code code = PLUS_EXPR;
106 tree base, off, t;
107
108 if (expr == var)
109 return size_zero_node;
110
111 switch (TREE_CODE (expr))
112 {
113 case COMPONENT_REF:
114 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
115 if (base == error_mark_node)
116 return base;
117
118 t = TREE_OPERAND (expr, 1);
119 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
120 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
121 / BITS_PER_UNIT));
122 break;
123
124 case REALPART_EXPR:
125 CASE_CONVERT:
126 case VIEW_CONVERT_EXPR:
127 case NON_LVALUE_EXPR:
128 return compute_object_offset (TREE_OPERAND (expr, 0), var);
129
130 case IMAGPART_EXPR:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
134
135 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
136 break;
137
138 case ARRAY_REF:
139 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
140 if (base == error_mark_node)
141 return base;
142
143 t = TREE_OPERAND (expr, 1);
144 tree low_bound, unit_size;
145 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
146 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
147 if (! integer_zerop (low_bound))
148 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
149 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
150 {
151 code = MINUS_EXPR;
152 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
153 }
154 t = fold_convert (sizetype, t);
155 off = size_binop (MULT_EXPR, unit_size, t);
156 break;
157
158 case MEM_REF:
159 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
160 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
161
162 default:
163 return error_mark_node;
164 }
165
166 return size_binop (code, base, off);
167 }
168
169 /* Returns the size of the object designated by DECL considering its
170 initializer if it either has one or if it would not affect its size,
171 otherwise the size of the object without the initializer when MIN
172 is true, else null. An object's initializer affects the object's
173 size if it's a struct type with a flexible array member. */
174
175 tree
176 decl_init_size (tree decl, bool min)
177 {
178 tree size = DECL_SIZE_UNIT (decl);
179 tree type = TREE_TYPE (decl);
180 if (TREE_CODE (type) != RECORD_TYPE)
181 return size;
182
183 tree last = last_field (type);
184 if (!last)
185 return size;
186
187 tree last_type = TREE_TYPE (last);
188 if (TREE_CODE (last_type) != ARRAY_TYPE
189 || TYPE_SIZE (last_type))
190 return size;
191
192 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
193 of the initializer and sometimes doesn't. */
194 size = TYPE_SIZE_UNIT (type);
195 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
196 tree compsize = component_ref_size (ref);
197 if (!compsize)
198 return min ? size : NULL_TREE;
199
200 /* The size includes tail padding and initializer elements. */
201 tree pos = byte_position (last);
202 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
203 return size;
204 }
205
206 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
207 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
208 If unknown, return unknown[object_size_type]. */
209
210 static bool
211 addr_object_size (struct object_size_info *osi, const_tree ptr,
212 int object_size_type, unsigned HOST_WIDE_INT *psize,
213 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
214 {
215 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
216
217 tree dummy_decl, dummy_off = size_zero_node;
218 if (!pdecl)
219 pdecl = &dummy_decl;
220 if (!poff)
221 poff = &dummy_off;
222
223 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
224
225 /* Set to unknown and overwrite just before returning if the size
226 could be determined. */
227 *psize = unknown[object_size_type];
228
229 pt_var = TREE_OPERAND (ptr, 0);
230 while (handled_component_p (pt_var))
231 pt_var = TREE_OPERAND (pt_var, 0);
232
233 if (!pt_var)
234 return false;
235
236 if (TREE_CODE (pt_var) == MEM_REF)
237 {
238 unsigned HOST_WIDE_INT sz;
239
240 if (!osi || (object_size_type & 1) != 0
241 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
242 {
243 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
244 object_size_type & ~1, &sz, pdecl, poff);
245 }
246 else
247 {
248 tree var = TREE_OPERAND (pt_var, 0);
249 if (osi->pass == 0)
250 collect_object_sizes_for (osi, var);
251 if (bitmap_bit_p (computed[object_size_type],
252 SSA_NAME_VERSION (var)))
253 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
254 else
255 sz = unknown[object_size_type];
256 }
257 if (sz != unknown[object_size_type])
258 {
259 offset_int mem_offset;
260 if (mem_ref_offset (pt_var).is_constant (&mem_offset))
261 {
262 if (*poff)
263 *poff = wide_int_to_tree (ptrdiff_type_node,
264 mem_offset + wi::to_offset (*poff));
265 else
266 *poff = wide_int_to_tree (ptrdiff_type_node, mem_offset);
267 offset_int dsz = wi::sub (sz, mem_offset);
268 if (wi::neg_p (dsz))
269 sz = 0;
270 else if (wi::fits_uhwi_p (dsz))
271 sz = dsz.to_uhwi ();
272 else
273 sz = unknown[object_size_type];
274 }
275 else
276 sz = unknown[object_size_type];
277 }
278
279 if (sz != unknown[object_size_type] && sz < offset_limit)
280 pt_var_size = size_int (sz);
281 }
282 else if (DECL_P (pt_var))
283 {
284 *pdecl = pt_var;
285 pt_var_size = decl_init_size (pt_var, object_size_type & 2);
286 if (!pt_var_size)
287 return false;
288 }
289 else if (TREE_CODE (pt_var) == STRING_CST)
290 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
291 else
292 return false;
293
294 if (pt_var_size)
295 {
296 /* Validate the size determined above. */
297 if (!tree_fits_uhwi_p (pt_var_size)
298 || tree_to_uhwi (pt_var_size) >= offset_limit)
299 return false;
300 }
301
302 if (pt_var != TREE_OPERAND (ptr, 0))
303 {
304 tree var;
305
306 if (object_size_type & 1)
307 {
308 var = TREE_OPERAND (ptr, 0);
309
310 while (var != pt_var
311 && TREE_CODE (var) != BIT_FIELD_REF
312 && TREE_CODE (var) != COMPONENT_REF
313 && TREE_CODE (var) != ARRAY_REF
314 && TREE_CODE (var) != ARRAY_RANGE_REF
315 && TREE_CODE (var) != REALPART_EXPR
316 && TREE_CODE (var) != IMAGPART_EXPR)
317 var = TREE_OPERAND (var, 0);
318 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
319 var = TREE_OPERAND (var, 0);
320 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
321 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
322 || (pt_var_size
323 && tree_int_cst_lt (pt_var_size,
324 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
325 var = pt_var;
326 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
327 {
328 tree v = var;
329 /* For &X->fld, compute object size only if fld isn't the last
330 field, as struct { int i; char c[1]; } is often used instead
331 of flexible array member. */
332 while (v && v != pt_var)
333 switch (TREE_CODE (v))
334 {
335 case ARRAY_REF:
336 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
337 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
338 {
339 tree domain
340 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
341 if (domain
342 && TYPE_MAX_VALUE (domain)
343 && TREE_CODE (TYPE_MAX_VALUE (domain))
344 == INTEGER_CST
345 && tree_int_cst_lt (TREE_OPERAND (v, 1),
346 TYPE_MAX_VALUE (domain)))
347 {
348 v = NULL_TREE;
349 break;
350 }
351 }
352 v = TREE_OPERAND (v, 0);
353 break;
354 case REALPART_EXPR:
355 case IMAGPART_EXPR:
356 v = NULL_TREE;
357 break;
358 case COMPONENT_REF:
359 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
360 {
361 v = NULL_TREE;
362 break;
363 }
364 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
365 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
366 != UNION_TYPE
367 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
368 != QUAL_UNION_TYPE)
369 break;
370 else
371 v = TREE_OPERAND (v, 0);
372 if (TREE_CODE (v) == COMPONENT_REF
373 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
374 == RECORD_TYPE)
375 {
376 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
377 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
378 if (TREE_CODE (fld_chain) == FIELD_DECL)
379 break;
380
381 if (fld_chain)
382 {
383 v = NULL_TREE;
384 break;
385 }
386 v = TREE_OPERAND (v, 0);
387 }
388 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
389 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
390 != UNION_TYPE
391 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
392 != QUAL_UNION_TYPE)
393 break;
394 else
395 v = TREE_OPERAND (v, 0);
396 if (v != pt_var)
397 v = NULL_TREE;
398 else
399 v = pt_var;
400 break;
401 default:
402 v = pt_var;
403 break;
404 }
405 if (v == pt_var)
406 var = pt_var;
407 }
408 }
409 else
410 var = pt_var;
411
412 if (var != pt_var)
413 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
414 else if (!pt_var_size)
415 return false;
416 else
417 var_size = pt_var_size;
418 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
419 if (bytes != error_mark_node)
420 {
421 *poff = bytes;
422 if (TREE_CODE (bytes) == INTEGER_CST
423 && tree_int_cst_lt (var_size, bytes))
424 bytes = size_zero_node;
425 else
426 bytes = size_binop (MINUS_EXPR, var_size, bytes);
427 }
428 if (var != pt_var
429 && pt_var_size
430 && TREE_CODE (pt_var) == MEM_REF
431 && bytes != error_mark_node)
432 {
433 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
434 if (bytes2 != error_mark_node)
435 {
436 if (TREE_CODE (bytes2) == INTEGER_CST
437 && tree_int_cst_lt (pt_var_size, bytes2))
438 bytes2 = size_zero_node;
439 else
440 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
441 *poff = size_binop (PLUS_EXPR, *poff, bytes2);
442 bytes = size_binop (MIN_EXPR, bytes, bytes2);
443 }
444 }
445 }
446 else if (!pt_var_size)
447 return false;
448 else
449 {
450 bytes = pt_var_size;
451 if (!*poff)
452 *poff = size_zero_node;
453 }
454
455 if (tree_fits_uhwi_p (bytes))
456 {
457 *psize = tree_to_uhwi (bytes);
458 return true;
459 }
460
461 return false;
462 }
463
464
465 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
466 Handles calls to functions declared with attribute alloc_size.
467 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
468 If unknown, return unknown[object_size_type]. */
469
470 static unsigned HOST_WIDE_INT
471 alloc_object_size (const gcall *call, int object_size_type)
472 {
473 gcc_assert (is_gimple_call (call));
474
475 tree calltype;
476 if (tree callfn = gimple_call_fndecl (call))
477 calltype = TREE_TYPE (callfn);
478 else
479 calltype = gimple_call_fntype (call);
480
481 if (!calltype)
482 return unknown[object_size_type];
483
484 /* Set to positions of alloc_size arguments. */
485 int arg1 = -1, arg2 = -1;
486 tree alloc_size = lookup_attribute ("alloc_size",
487 TYPE_ATTRIBUTES (calltype));
488 if (alloc_size && TREE_VALUE (alloc_size))
489 {
490 tree p = TREE_VALUE (alloc_size);
491
492 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
493 if (TREE_CHAIN (p))
494 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
495 }
496
497 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
498 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
499 || (arg2 >= 0
500 && (arg2 >= (int)gimple_call_num_args (call)
501 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
502 return unknown[object_size_type];
503
504 tree bytes = NULL_TREE;
505 if (arg2 >= 0)
506 bytes = size_binop (MULT_EXPR,
507 fold_convert (sizetype, gimple_call_arg (call, arg1)),
508 fold_convert (sizetype, gimple_call_arg (call, arg2)));
509 else if (arg1 >= 0)
510 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
511
512 if (bytes && tree_fits_uhwi_p (bytes))
513 return tree_to_uhwi (bytes);
514
515 return unknown[object_size_type];
516 }
517
518
519 /* If object size is propagated from one of function's arguments directly
520 to its return value, return that argument for GIMPLE_CALL statement CALL.
521 Otherwise return NULL. */
522
523 static tree
524 pass_through_call (const gcall *call)
525 {
526 unsigned rf = gimple_call_return_flags (call);
527 if (rf & ERF_RETURNS_ARG)
528 {
529 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
530 if (argnum < gimple_call_num_args (call))
531 return gimple_call_arg (call, argnum);
532 }
533
534 /* __builtin_assume_aligned is intentionally not marked RET1. */
535 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
536 return gimple_call_arg (call, 0);
537
538 return NULL_TREE;
539 }
540
541
542 /* Compute __builtin_object_size value for PTR and set *PSIZE to
543 the resulting value. If the declared object is known and PDECL
544 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
545 is the second argument to __builtin_object_size.
546 Returns true on success and false when the object size could not
547 be determined. */
548
549 bool
550 compute_builtin_object_size (tree ptr, int object_size_type,
551 unsigned HOST_WIDE_INT *psize,
552 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
553 {
554 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
555
556 tree dummy_decl, dummy_off = size_zero_node;
557 if (!pdecl)
558 pdecl = &dummy_decl;
559 if (!poff)
560 poff = &dummy_off;
561
562 /* Set to unknown and overwrite just before returning if the size
563 could be determined. */
564 *psize = unknown[object_size_type];
565
566 if (! offset_limit)
567 init_offset_limit ();
568
569 if (TREE_CODE (ptr) == ADDR_EXPR)
570 return addr_object_size (NULL, ptr, object_size_type, psize, pdecl, poff);
571
572 if (TREE_CODE (ptr) != SSA_NAME
573 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
574 return false;
575
576 if (computed[object_size_type] == NULL)
577 {
578 if (optimize || object_size_type & 1)
579 return false;
580
581 /* When not optimizing, rather than failing, make a small effort
582 to determine the object size without the full benefit of
583 the (costly) computation below. */
584 gimple *def = SSA_NAME_DEF_STMT (ptr);
585 if (gimple_code (def) == GIMPLE_ASSIGN)
586 {
587 tree_code code = gimple_assign_rhs_code (def);
588 if (code == POINTER_PLUS_EXPR)
589 {
590 tree offset = gimple_assign_rhs2 (def);
591 ptr = gimple_assign_rhs1 (def);
592
593 if (tree_fits_shwi_p (offset)
594 && compute_builtin_object_size (ptr, object_size_type,
595 psize, pdecl, poff))
596 {
597 /* Return zero when the offset is out of bounds. */
598 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
599 *psize = off < *psize ? *psize - off : 0;
600 *poff = offset;
601 return true;
602 }
603 }
604 }
605 return false;
606 }
607
608 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
609 {
610 struct object_size_info osi;
611 bitmap_iterator bi;
612 unsigned int i;
613
614 if (num_ssa_names > object_sizes[object_size_type].length ())
615 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
616 if (dump_file)
617 {
618 fprintf (dump_file, "Computing %s %sobject size for ",
619 (object_size_type & 2) ? "minimum" : "maximum",
620 (object_size_type & 1) ? "sub" : "");
621 print_generic_expr (dump_file, ptr, dump_flags);
622 fprintf (dump_file, ":\n");
623 }
624
625 osi.visited = BITMAP_ALLOC (NULL);
626 osi.reexamine = BITMAP_ALLOC (NULL);
627 osi.object_size_type = object_size_type;
628 osi.depths = NULL;
629 osi.stack = NULL;
630 osi.tos = NULL;
631
632 /* First pass: walk UD chains, compute object sizes that
633 can be computed. osi.reexamine bitmap at the end will
634 contain what variables were found in dependency cycles
635 and therefore need to be reexamined. */
636 osi.pass = 0;
637 osi.changed = false;
638 collect_object_sizes_for (&osi, ptr);
639
640 /* Second pass: keep recomputing object sizes of variables
641 that need reexamination, until no object sizes are
642 increased or all object sizes are computed. */
643 if (! bitmap_empty_p (osi.reexamine))
644 {
645 bitmap reexamine = BITMAP_ALLOC (NULL);
646
647 /* If looking for minimum instead of maximum object size,
648 detect cases where a pointer is increased in a loop.
649 Although even without this detection pass 2 would eventually
650 terminate, it could take a long time. If a pointer is
651 increasing this way, we need to assume 0 object size.
652 E.g. p = &buf[0]; while (cond) p = p + 4; */
653 if (object_size_type & 2)
654 {
655 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
656 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
657 osi.tos = osi.stack;
658 osi.pass = 1;
659 /* collect_object_sizes_for is changing
660 osi.reexamine bitmap, so iterate over a copy. */
661 bitmap_copy (reexamine, osi.reexamine);
662 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
663 if (bitmap_bit_p (osi.reexamine, i))
664 check_for_plus_in_loops (&osi, ssa_name (i));
665
666 free (osi.depths);
667 osi.depths = NULL;
668 free (osi.stack);
669 osi.stack = NULL;
670 osi.tos = NULL;
671 }
672
673 do
674 {
675 osi.pass = 2;
676 osi.changed = false;
677 /* collect_object_sizes_for is changing
678 osi.reexamine bitmap, so iterate over a copy. */
679 bitmap_copy (reexamine, osi.reexamine);
680 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
681 if (bitmap_bit_p (osi.reexamine, i))
682 {
683 collect_object_sizes_for (&osi, ssa_name (i));
684 if (dump_file && (dump_flags & TDF_DETAILS))
685 {
686 fprintf (dump_file, "Reexamining ");
687 print_generic_expr (dump_file, ssa_name (i),
688 dump_flags);
689 fprintf (dump_file, "\n");
690 }
691 }
692 }
693 while (osi.changed);
694
695 BITMAP_FREE (reexamine);
696 }
697 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
698 bitmap_set_bit (computed[object_size_type], i);
699
700 /* Debugging dumps. */
701 if (dump_file)
702 {
703 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
704 if (object_sizes[object_size_type][i]
705 != unknown[object_size_type])
706 {
707 print_generic_expr (dump_file, ssa_name (i),
708 dump_flags);
709 fprintf (dump_file,
710 ": %s %sobject size "
711 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
712 (object_size_type & 2) ? "minimum" : "maximum",
713 (object_size_type & 1) ? "sub" : "",
714 object_sizes[object_size_type][i]);
715 }
716 }
717
718 BITMAP_FREE (osi.reexamine);
719 BITMAP_FREE (osi.visited);
720 }
721
722 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
723 return *psize != unknown[object_size_type];
724 }
725
726 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
727
728 static void
729 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
730 {
731 int object_size_type = osi->object_size_type;
732 unsigned int varno = SSA_NAME_VERSION (ptr);
733 unsigned HOST_WIDE_INT bytes;
734
735 gcc_assert (object_sizes[object_size_type][varno]
736 != unknown[object_size_type]);
737 gcc_assert (osi->pass == 0);
738
739 if (TREE_CODE (value) == WITH_SIZE_EXPR)
740 value = TREE_OPERAND (value, 0);
741
742 /* Pointer variables should have been handled by merge_object_sizes. */
743 gcc_assert (TREE_CODE (value) != SSA_NAME
744 || !POINTER_TYPE_P (TREE_TYPE (value)));
745
746 if (TREE_CODE (value) == ADDR_EXPR)
747 addr_object_size (osi, value, object_size_type, &bytes);
748 else
749 bytes = unknown[object_size_type];
750
751 if ((object_size_type & 2) == 0)
752 {
753 if (object_sizes[object_size_type][varno] < bytes)
754 object_sizes[object_size_type][varno] = bytes;
755 }
756 else
757 {
758 if (object_sizes[object_size_type][varno] > bytes)
759 object_sizes[object_size_type][varno] = bytes;
760 }
761 }
762
763
764 /* Compute object_sizes for PTR, defined to the result of a call. */
765
766 static void
767 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
768 {
769 int object_size_type = osi->object_size_type;
770 unsigned int varno = SSA_NAME_VERSION (ptr);
771 unsigned HOST_WIDE_INT bytes;
772
773 gcc_assert (is_gimple_call (call));
774
775 gcc_assert (object_sizes[object_size_type][varno]
776 != unknown[object_size_type]);
777 gcc_assert (osi->pass == 0);
778
779 bytes = alloc_object_size (call, object_size_type);
780
781 if ((object_size_type & 2) == 0)
782 {
783 if (object_sizes[object_size_type][varno] < bytes)
784 object_sizes[object_size_type][varno] = bytes;
785 }
786 else
787 {
788 if (object_sizes[object_size_type][varno] > bytes)
789 object_sizes[object_size_type][varno] = bytes;
790 }
791 }
792
793
794 /* Compute object_sizes for PTR, defined to an unknown value. */
795
796 static void
797 unknown_object_size (struct object_size_info *osi, tree ptr)
798 {
799 int object_size_type = osi->object_size_type;
800 unsigned int varno = SSA_NAME_VERSION (ptr);
801 unsigned HOST_WIDE_INT bytes;
802
803 gcc_assert (object_sizes[object_size_type][varno]
804 != unknown[object_size_type]);
805 gcc_assert (osi->pass == 0);
806
807 bytes = unknown[object_size_type];
808
809 if ((object_size_type & 2) == 0)
810 {
811 if (object_sizes[object_size_type][varno] < bytes)
812 object_sizes[object_size_type][varno] = bytes;
813 }
814 else
815 {
816 if (object_sizes[object_size_type][varno] > bytes)
817 object_sizes[object_size_type][varno] = bytes;
818 }
819 }
820
821
822 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
823 the object size might need reexamination later. */
824
825 static bool
826 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
827 unsigned HOST_WIDE_INT offset)
828 {
829 int object_size_type = osi->object_size_type;
830 unsigned int varno = SSA_NAME_VERSION (dest);
831 unsigned HOST_WIDE_INT orig_bytes;
832
833 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
834 return false;
835 if (offset >= offset_limit)
836 {
837 object_sizes[object_size_type][varno] = unknown[object_size_type];
838 return false;
839 }
840
841 if (osi->pass == 0)
842 collect_object_sizes_for (osi, orig);
843
844 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
845 if (orig_bytes != unknown[object_size_type])
846 orig_bytes = (offset > orig_bytes)
847 ? HOST_WIDE_INT_0U : orig_bytes - offset;
848
849 if ((object_size_type & 2) == 0)
850 {
851 if (object_sizes[object_size_type][varno] < orig_bytes)
852 {
853 object_sizes[object_size_type][varno] = orig_bytes;
854 osi->changed = true;
855 }
856 }
857 else
858 {
859 if (object_sizes[object_size_type][varno] > orig_bytes)
860 {
861 object_sizes[object_size_type][varno] = orig_bytes;
862 osi->changed = true;
863 }
864 }
865 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
866 }
867
868
869 /* Compute object_sizes for VAR, defined to the result of an assignment
870 with operator POINTER_PLUS_EXPR. Return true if the object size might
871 need reexamination later. */
872
873 static bool
874 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
875 {
876 int object_size_type = osi->object_size_type;
877 unsigned int varno = SSA_NAME_VERSION (var);
878 unsigned HOST_WIDE_INT bytes;
879 tree op0, op1;
880
881 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
882 {
883 op0 = gimple_assign_rhs1 (stmt);
884 op1 = gimple_assign_rhs2 (stmt);
885 }
886 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
887 {
888 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
889 gcc_assert (TREE_CODE (rhs) == MEM_REF);
890 op0 = TREE_OPERAND (rhs, 0);
891 op1 = TREE_OPERAND (rhs, 1);
892 }
893 else
894 gcc_unreachable ();
895
896 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
897 return false;
898
899 /* Handle PTR + OFFSET here. */
900 if (TREE_CODE (op1) == INTEGER_CST
901 && (TREE_CODE (op0) == SSA_NAME
902 || TREE_CODE (op0) == ADDR_EXPR))
903 {
904 if (! tree_fits_uhwi_p (op1))
905 bytes = unknown[object_size_type];
906 else if (TREE_CODE (op0) == SSA_NAME)
907 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
908 else
909 {
910 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
911
912 /* op0 will be ADDR_EXPR here. */
913 addr_object_size (osi, op0, object_size_type, &bytes);
914 if (bytes == unknown[object_size_type])
915 ;
916 else if (off > offset_limit)
917 bytes = unknown[object_size_type];
918 else if (off > bytes)
919 bytes = 0;
920 else
921 bytes -= off;
922 }
923 }
924 else
925 bytes = unknown[object_size_type];
926
927 if ((object_size_type & 2) == 0)
928 {
929 if (object_sizes[object_size_type][varno] < bytes)
930 object_sizes[object_size_type][varno] = bytes;
931 }
932 else
933 {
934 if (object_sizes[object_size_type][varno] > bytes)
935 object_sizes[object_size_type][varno] = bytes;
936 }
937 return false;
938 }
939
940
941 /* Compute object_sizes for VAR, defined at STMT, which is
942 a COND_EXPR. Return true if the object size might need reexamination
943 later. */
944
945 static bool
946 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
947 {
948 tree then_, else_;
949 int object_size_type = osi->object_size_type;
950 unsigned int varno = SSA_NAME_VERSION (var);
951 bool reexamine = false;
952
953 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
954
955 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
956 return false;
957
958 then_ = gimple_assign_rhs2 (stmt);
959 else_ = gimple_assign_rhs3 (stmt);
960
961 if (TREE_CODE (then_) == SSA_NAME)
962 reexamine |= merge_object_sizes (osi, var, then_, 0);
963 else
964 expr_object_size (osi, var, then_);
965
966 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
967 return reexamine;
968
969 if (TREE_CODE (else_) == SSA_NAME)
970 reexamine |= merge_object_sizes (osi, var, else_, 0);
971 else
972 expr_object_size (osi, var, else_);
973
974 return reexamine;
975 }
976
977 /* Compute object sizes for VAR.
978 For ADDR_EXPR an object size is the number of remaining bytes
979 to the end of the object (where what is considered an object depends on
980 OSI->object_size_type).
981 For allocation GIMPLE_CALL like malloc or calloc object size is the size
982 of the allocation.
983 For POINTER_PLUS_EXPR where second operand is a constant integer,
984 object size is object size of the first operand minus the constant.
985 If the constant is bigger than the number of remaining bytes until the
986 end of the object, object size is 0, but if it is instead a pointer
987 subtraction, object size is unknown[object_size_type].
988 To differentiate addition from subtraction, ADDR_EXPR returns
989 unknown[object_size_type] for all objects bigger than half of the address
990 space, and constants less than half of the address space are considered
991 addition, while bigger constants subtraction.
992 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
993 object size is object size of that argument.
994 Otherwise, object size is the maximum of object sizes of variables
995 that it might be set to. */
996
997 static void
998 collect_object_sizes_for (struct object_size_info *osi, tree var)
999 {
1000 int object_size_type = osi->object_size_type;
1001 unsigned int varno = SSA_NAME_VERSION (var);
1002 gimple *stmt;
1003 bool reexamine;
1004
1005 if (bitmap_bit_p (computed[object_size_type], varno))
1006 return;
1007
1008 if (osi->pass == 0)
1009 {
1010 if (bitmap_set_bit (osi->visited, varno))
1011 {
1012 object_sizes[object_size_type][varno]
1013 = (object_size_type & 2) ? -1 : 0;
1014 }
1015 else
1016 {
1017 /* Found a dependency loop. Mark the variable for later
1018 re-examination. */
1019 bitmap_set_bit (osi->reexamine, varno);
1020 if (dump_file && (dump_flags & TDF_DETAILS))
1021 {
1022 fprintf (dump_file, "Found a dependency loop at ");
1023 print_generic_expr (dump_file, var, dump_flags);
1024 fprintf (dump_file, "\n");
1025 }
1026 return;
1027 }
1028 }
1029
1030 if (dump_file && (dump_flags & TDF_DETAILS))
1031 {
1032 fprintf (dump_file, "Visiting use-def links for ");
1033 print_generic_expr (dump_file, var, dump_flags);
1034 fprintf (dump_file, "\n");
1035 }
1036
1037 stmt = SSA_NAME_DEF_STMT (var);
1038 reexamine = false;
1039
1040 switch (gimple_code (stmt))
1041 {
1042 case GIMPLE_ASSIGN:
1043 {
1044 tree rhs = gimple_assign_rhs1 (stmt);
1045 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1046 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1047 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1048 reexamine = plus_stmt_object_size (osi, var, stmt);
1049 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1050 reexamine = cond_expr_object_size (osi, var, stmt);
1051 else if (gimple_assign_single_p (stmt)
1052 || gimple_assign_unary_nop_p (stmt))
1053 {
1054 if (TREE_CODE (rhs) == SSA_NAME
1055 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1056 reexamine = merge_object_sizes (osi, var, rhs, 0);
1057 else
1058 expr_object_size (osi, var, rhs);
1059 }
1060 else
1061 unknown_object_size (osi, var);
1062 break;
1063 }
1064
1065 case GIMPLE_CALL:
1066 {
1067 gcall *call_stmt = as_a <gcall *> (stmt);
1068 tree arg = pass_through_call (call_stmt);
1069 if (arg)
1070 {
1071 if (TREE_CODE (arg) == SSA_NAME
1072 && POINTER_TYPE_P (TREE_TYPE (arg)))
1073 reexamine = merge_object_sizes (osi, var, arg, 0);
1074 else
1075 expr_object_size (osi, var, arg);
1076 }
1077 else
1078 call_object_size (osi, var, call_stmt);
1079 break;
1080 }
1081
1082 case GIMPLE_ASM:
1083 /* Pointers defined by __asm__ statements can point anywhere. */
1084 object_sizes[object_size_type][varno] = unknown[object_size_type];
1085 break;
1086
1087 case GIMPLE_NOP:
1088 if (SSA_NAME_VAR (var)
1089 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1090 expr_object_size (osi, var, SSA_NAME_VAR (var));
1091 else
1092 /* Uninitialized SSA names point nowhere. */
1093 object_sizes[object_size_type][varno] = unknown[object_size_type];
1094 break;
1095
1096 case GIMPLE_PHI:
1097 {
1098 unsigned i;
1099
1100 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1101 {
1102 tree rhs = gimple_phi_arg (stmt, i)->def;
1103
1104 if (object_sizes[object_size_type][varno]
1105 == unknown[object_size_type])
1106 break;
1107
1108 if (TREE_CODE (rhs) == SSA_NAME)
1109 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1110 else if (osi->pass == 0)
1111 expr_object_size (osi, var, rhs);
1112 }
1113 break;
1114 }
1115
1116 default:
1117 gcc_unreachable ();
1118 }
1119
1120 if (! reexamine
1121 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1122 {
1123 bitmap_set_bit (computed[object_size_type], varno);
1124 bitmap_clear_bit (osi->reexamine, varno);
1125 }
1126 else
1127 {
1128 bitmap_set_bit (osi->reexamine, varno);
1129 if (dump_file && (dump_flags & TDF_DETAILS))
1130 {
1131 fprintf (dump_file, "Need to reexamine ");
1132 print_generic_expr (dump_file, var, dump_flags);
1133 fprintf (dump_file, "\n");
1134 }
1135 }
1136 }
1137
1138
1139 /* Helper function for check_for_plus_in_loops. Called recursively
1140 to detect loops. */
1141
1142 static void
1143 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1144 unsigned int depth)
1145 {
1146 gimple *stmt = SSA_NAME_DEF_STMT (var);
1147 unsigned int varno = SSA_NAME_VERSION (var);
1148
1149 if (osi->depths[varno])
1150 {
1151 if (osi->depths[varno] != depth)
1152 {
1153 unsigned int *sp;
1154
1155 /* Found a loop involving pointer addition. */
1156 for (sp = osi->tos; sp > osi->stack; )
1157 {
1158 --sp;
1159 bitmap_clear_bit (osi->reexamine, *sp);
1160 bitmap_set_bit (computed[osi->object_size_type], *sp);
1161 object_sizes[osi->object_size_type][*sp] = 0;
1162 if (*sp == varno)
1163 break;
1164 }
1165 }
1166 return;
1167 }
1168 else if (! bitmap_bit_p (osi->reexamine, varno))
1169 return;
1170
1171 osi->depths[varno] = depth;
1172 *osi->tos++ = varno;
1173
1174 switch (gimple_code (stmt))
1175 {
1176
1177 case GIMPLE_ASSIGN:
1178 {
1179 if ((gimple_assign_single_p (stmt)
1180 || gimple_assign_unary_nop_p (stmt))
1181 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1182 {
1183 tree rhs = gimple_assign_rhs1 (stmt);
1184
1185 check_for_plus_in_loops_1 (osi, rhs, depth);
1186 }
1187 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1188 {
1189 tree basevar = gimple_assign_rhs1 (stmt);
1190 tree cst = gimple_assign_rhs2 (stmt);
1191
1192 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1193
1194 check_for_plus_in_loops_1 (osi, basevar,
1195 depth + !integer_zerop (cst));
1196 }
1197 else
1198 gcc_unreachable ();
1199 break;
1200 }
1201
1202 case GIMPLE_CALL:
1203 {
1204 gcall *call_stmt = as_a <gcall *> (stmt);
1205 tree arg = pass_through_call (call_stmt);
1206 if (arg)
1207 {
1208 if (TREE_CODE (arg) == SSA_NAME)
1209 check_for_plus_in_loops_1 (osi, arg, depth);
1210 else
1211 gcc_unreachable ();
1212 }
1213 break;
1214 }
1215
1216 case GIMPLE_PHI:
1217 {
1218 unsigned i;
1219
1220 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1221 {
1222 tree rhs = gimple_phi_arg (stmt, i)->def;
1223
1224 if (TREE_CODE (rhs) == SSA_NAME)
1225 check_for_plus_in_loops_1 (osi, rhs, depth);
1226 }
1227 break;
1228 }
1229
1230 default:
1231 gcc_unreachable ();
1232 }
1233
1234 osi->depths[varno] = 0;
1235 osi->tos--;
1236 }
1237
1238
1239 /* Check if some pointer we are computing object size of is being increased
1240 within a loop. If yes, assume all the SSA variables participating in
1241 that loop have minimum object sizes 0. */
1242
1243 static void
1244 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1245 {
1246 gimple *stmt = SSA_NAME_DEF_STMT (var);
1247
1248 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1249 and looked for a POINTER_PLUS_EXPR in the pass-through
1250 argument, if any. In GIMPLE, however, such an expression
1251 is not a valid call operand. */
1252
1253 if (is_gimple_assign (stmt)
1254 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1255 {
1256 tree basevar = gimple_assign_rhs1 (stmt);
1257 tree cst = gimple_assign_rhs2 (stmt);
1258
1259 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1260
1261 if (integer_zerop (cst))
1262 return;
1263
1264 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1265 *osi->tos++ = SSA_NAME_VERSION (basevar);
1266 check_for_plus_in_loops_1 (osi, var, 2);
1267 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1268 osi->tos--;
1269 }
1270 }
1271
1272
1273 /* Initialize data structures for the object size computation. */
1274
1275 void
1276 init_object_sizes (void)
1277 {
1278 int object_size_type;
1279
1280 if (computed[0])
1281 return;
1282
1283 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1284 {
1285 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
1286 computed[object_size_type] = BITMAP_ALLOC (NULL);
1287 }
1288
1289 init_offset_limit ();
1290 }
1291
1292
1293 /* Destroy data structures after the object size computation. */
1294
1295 void
1296 fini_object_sizes (void)
1297 {
1298 int object_size_type;
1299
1300 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1301 {
1302 object_sizes[object_size_type].release ();
1303 BITMAP_FREE (computed[object_size_type]);
1304 }
1305 }
1306
1307
1308 /* Simple pass to optimize all __builtin_object_size () builtins. */
1309
1310 namespace {
1311
1312 const pass_data pass_data_object_sizes =
1313 {
1314 GIMPLE_PASS, /* type */
1315 "objsz", /* name */
1316 OPTGROUP_NONE, /* optinfo_flags */
1317 TV_NONE, /* tv_id */
1318 ( PROP_cfg | PROP_ssa ), /* properties_required */
1319 0, /* properties_provided */
1320 0, /* properties_destroyed */
1321 0, /* todo_flags_start */
1322 0, /* todo_flags_finish */
1323 };
1324
1325 class pass_object_sizes : public gimple_opt_pass
1326 {
1327 public:
1328 pass_object_sizes (gcc::context *ctxt)
1329 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1330 {}
1331
1332 /* opt_pass methods: */
1333 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1334 void set_pass_param (unsigned int n, bool param)
1335 {
1336 gcc_assert (n == 0);
1337 insert_min_max_p = param;
1338 }
1339 virtual unsigned int execute (function *);
1340
1341 private:
1342 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1343 bool insert_min_max_p;
1344 }; // class pass_object_sizes
1345
1346 /* Dummy valueize function. */
1347
1348 static tree
1349 do_valueize (tree t)
1350 {
1351 return t;
1352 }
1353
1354 unsigned int
1355 pass_object_sizes::execute (function *fun)
1356 {
1357 basic_block bb;
1358 FOR_EACH_BB_FN (bb, fun)
1359 {
1360 gimple_stmt_iterator i;
1361 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1362 {
1363 tree result;
1364 gimple *call = gsi_stmt (i);
1365 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1366 continue;
1367
1368 init_object_sizes ();
1369
1370 /* If insert_min_max_p, only attempt to fold
1371 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1372 and rather than folding the builtin to the constant if any,
1373 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1374 call result and the computed constant. */
1375 if (insert_min_max_p)
1376 {
1377 tree ost = gimple_call_arg (call, 1);
1378 if (tree_fits_uhwi_p (ost))
1379 {
1380 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1381 tree ptr = gimple_call_arg (call, 0);
1382 tree lhs = gimple_call_lhs (call);
1383 if ((object_size_type == 1 || object_size_type == 3)
1384 && (TREE_CODE (ptr) == ADDR_EXPR
1385 || TREE_CODE (ptr) == SSA_NAME)
1386 && lhs)
1387 {
1388 tree type = TREE_TYPE (lhs);
1389 unsigned HOST_WIDE_INT bytes;
1390 if (compute_builtin_object_size (ptr, object_size_type,
1391 &bytes)
1392 && wi::fits_to_tree_p (bytes, type))
1393 {
1394 tree tem = make_ssa_name (type);
1395 gimple_call_set_lhs (call, tem);
1396 enum tree_code code
1397 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1398 tree cst = build_int_cstu (type, bytes);
1399 gimple *g
1400 = gimple_build_assign (lhs, code, tem, cst);
1401 gsi_insert_after (&i, g, GSI_NEW_STMT);
1402 update_stmt (call);
1403 }
1404 }
1405 }
1406 continue;
1407 }
1408
1409 tree lhs = gimple_call_lhs (call);
1410 if (!lhs)
1411 continue;
1412
1413 result = gimple_fold_stmt_to_constant (call, do_valueize);
1414 if (!result)
1415 {
1416 tree ost = gimple_call_arg (call, 1);
1417
1418 if (tree_fits_uhwi_p (ost))
1419 {
1420 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1421
1422 if (object_size_type < 2)
1423 result = fold_convert (size_type_node,
1424 integer_minus_one_node);
1425 else if (object_size_type < 4)
1426 result = build_zero_cst (size_type_node);
1427 }
1428
1429 if (!result)
1430 continue;
1431 }
1432
1433 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1434
1435 if (dump_file && (dump_flags & TDF_DETAILS))
1436 {
1437 fprintf (dump_file, "Simplified\n ");
1438 print_gimple_stmt (dump_file, call, 0, dump_flags);
1439 fprintf (dump_file, " to ");
1440 print_generic_expr (dump_file, result);
1441 fprintf (dump_file, "\n");
1442 }
1443
1444 /* Propagate into all uses and fold those stmts. */
1445 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1446 replace_uses_by (lhs, result);
1447 else
1448 replace_call_with_value (&i, result);
1449 }
1450 }
1451
1452 fini_object_sizes ();
1453 return 0;
1454 }
1455
1456 } // anon namespace
1457
1458 gimple_opt_pass *
1459 make_pass_object_sizes (gcc::context *ctxt)
1460 {
1461 return new pass_object_sizes (ctxt);
1462 }