ipa-cp.c (ipcp_cloning_candidate_p): Use opt_for_fn.
[gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tree-object-size.h"
27 #include "diagnostic-core.h"
28 #include "gimple-pretty-print.h"
29 #include "bitmap.h"
30 #include "predict.h"
31 #include "vec.h"
32 #include "hashtab.h"
33 #include "hash-set.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dominance.h"
39 #include "cfg.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-fold.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimple-iterator.h"
48 #include "gimple-ssa.h"
49 #include "stringpool.h"
50 #include "tree-ssanames.h"
51 #include "tree-pass.h"
52 #include "tree-ssa-propagate.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "builtins.h"
56
57 struct object_size_info
58 {
59 int object_size_type;
60 bitmap visited, reexamine;
61 int pass;
62 bool changed;
63 unsigned int *depths;
64 unsigned int *stack, *tos;
65 };
66
67 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
68
69 static tree compute_object_offset (const_tree, const_tree);
70 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
71 const_tree, int);
72 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
73 static tree pass_through_call (const_gimple);
74 static void collect_object_sizes_for (struct object_size_info *, tree);
75 static void expr_object_size (struct object_size_info *, tree, tree);
76 static bool merge_object_sizes (struct object_size_info *, tree, tree,
77 unsigned HOST_WIDE_INT);
78 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
79 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
80 static void init_offset_limit (void);
81 static void check_for_plus_in_loops (struct object_size_info *, tree);
82 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
83 unsigned int);
84
85 /* object_sizes[0] is upper bound for number of bytes till the end of
86 the object.
87 object_sizes[1] is upper bound for number of bytes till the end of
88 the subobject (innermost array or field with address taken).
89 object_sizes[2] is lower bound for number of bytes till the end of
90 the object and object_sizes[3] lower bound for subobject. */
91 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
92
93 /* Bitmaps what object sizes have been computed already. */
94 static bitmap computed[4];
95
96 /* Maximum value of offset we consider to be addition. */
97 static unsigned HOST_WIDE_INT offset_limit;
98
99
100 /* Initialize OFFSET_LIMIT variable. */
101 static void
102 init_offset_limit (void)
103 {
104 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
105 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
106 else
107 offset_limit = -1;
108 offset_limit /= 2;
109 }
110
111
112 /* Compute offset of EXPR within VAR. Return error_mark_node
113 if unknown. */
114
115 static tree
116 compute_object_offset (const_tree expr, const_tree var)
117 {
118 enum tree_code code = PLUS_EXPR;
119 tree base, off, t;
120
121 if (expr == var)
122 return size_zero_node;
123
124 switch (TREE_CODE (expr))
125 {
126 case COMPONENT_REF:
127 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
128 if (base == error_mark_node)
129 return base;
130
131 t = TREE_OPERAND (expr, 1);
132 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
133 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
134 / BITS_PER_UNIT));
135 break;
136
137 case REALPART_EXPR:
138 CASE_CONVERT:
139 case VIEW_CONVERT_EXPR:
140 case NON_LVALUE_EXPR:
141 return compute_object_offset (TREE_OPERAND (expr, 0), var);
142
143 case IMAGPART_EXPR:
144 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
145 if (base == error_mark_node)
146 return base;
147
148 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
149 break;
150
151 case ARRAY_REF:
152 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
153 if (base == error_mark_node)
154 return base;
155
156 t = TREE_OPERAND (expr, 1);
157 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
158 {
159 code = MINUS_EXPR;
160 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
161 }
162 t = fold_convert (sizetype, t);
163 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
164 break;
165
166 case MEM_REF:
167 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
168 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
169
170 default:
171 return error_mark_node;
172 }
173
174 return size_binop (code, base, off);
175 }
176
177
178 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
179 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
180 If unknown, return unknown[object_size_type]. */
181
182 static unsigned HOST_WIDE_INT
183 addr_object_size (struct object_size_info *osi, const_tree ptr,
184 int object_size_type)
185 {
186 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
187
188 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
189
190 pt_var = TREE_OPERAND (ptr, 0);
191 while (handled_component_p (pt_var))
192 pt_var = TREE_OPERAND (pt_var, 0);
193
194 if (pt_var
195 && TREE_CODE (pt_var) == MEM_REF)
196 {
197 unsigned HOST_WIDE_INT sz;
198
199 if (!osi || (object_size_type & 1) != 0
200 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
201 {
202 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
203 object_size_type & ~1);
204 }
205 else
206 {
207 tree var = TREE_OPERAND (pt_var, 0);
208 if (osi->pass == 0)
209 collect_object_sizes_for (osi, var);
210 if (bitmap_bit_p (computed[object_size_type],
211 SSA_NAME_VERSION (var)))
212 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
213 else
214 sz = unknown[object_size_type];
215 }
216 if (sz != unknown[object_size_type])
217 {
218 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
219 if (wi::neg_p (dsz))
220 sz = 0;
221 else if (wi::fits_uhwi_p (dsz))
222 sz = dsz.to_uhwi ();
223 else
224 sz = unknown[object_size_type];
225 }
226
227 if (sz != unknown[object_size_type] && sz < offset_limit)
228 pt_var_size = size_int (sz);
229 }
230 else if (pt_var
231 && DECL_P (pt_var)
232 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
233 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
234 pt_var_size = DECL_SIZE_UNIT (pt_var);
235 else if (pt_var
236 && TREE_CODE (pt_var) == STRING_CST
237 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
238 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
239 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
240 < offset_limit)
241 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
242 else
243 return unknown[object_size_type];
244
245 if (pt_var != TREE_OPERAND (ptr, 0))
246 {
247 tree var;
248
249 if (object_size_type & 1)
250 {
251 var = TREE_OPERAND (ptr, 0);
252
253 while (var != pt_var
254 && TREE_CODE (var) != BIT_FIELD_REF
255 && TREE_CODE (var) != COMPONENT_REF
256 && TREE_CODE (var) != ARRAY_REF
257 && TREE_CODE (var) != ARRAY_RANGE_REF
258 && TREE_CODE (var) != REALPART_EXPR
259 && TREE_CODE (var) != IMAGPART_EXPR)
260 var = TREE_OPERAND (var, 0);
261 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
262 var = TREE_OPERAND (var, 0);
263 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
264 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
265 || (pt_var_size
266 && tree_int_cst_lt (pt_var_size,
267 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
268 var = pt_var;
269 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
270 {
271 tree v = var;
272 /* For &X->fld, compute object size only if fld isn't the last
273 field, as struct { int i; char c[1]; } is often used instead
274 of flexible array member. */
275 while (v && v != pt_var)
276 switch (TREE_CODE (v))
277 {
278 case ARRAY_REF:
279 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
280 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
281 {
282 tree domain
283 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
284 if (domain
285 && TYPE_MAX_VALUE (domain)
286 && TREE_CODE (TYPE_MAX_VALUE (domain))
287 == INTEGER_CST
288 && tree_int_cst_lt (TREE_OPERAND (v, 1),
289 TYPE_MAX_VALUE (domain)))
290 {
291 v = NULL_TREE;
292 break;
293 }
294 }
295 v = TREE_OPERAND (v, 0);
296 break;
297 case REALPART_EXPR:
298 case IMAGPART_EXPR:
299 v = NULL_TREE;
300 break;
301 case COMPONENT_REF:
302 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
303 {
304 v = NULL_TREE;
305 break;
306 }
307 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
308 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
309 != UNION_TYPE
310 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
311 != QUAL_UNION_TYPE)
312 break;
313 else
314 v = TREE_OPERAND (v, 0);
315 if (TREE_CODE (v) == COMPONENT_REF
316 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
317 == RECORD_TYPE)
318 {
319 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
320 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
321 if (TREE_CODE (fld_chain) == FIELD_DECL)
322 break;
323
324 if (fld_chain)
325 {
326 v = NULL_TREE;
327 break;
328 }
329 v = TREE_OPERAND (v, 0);
330 }
331 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
332 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
333 != UNION_TYPE
334 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
335 != QUAL_UNION_TYPE)
336 break;
337 else
338 v = TREE_OPERAND (v, 0);
339 if (v != pt_var)
340 v = NULL_TREE;
341 else
342 v = pt_var;
343 break;
344 default:
345 v = pt_var;
346 break;
347 }
348 if (v == pt_var)
349 var = pt_var;
350 }
351 }
352 else
353 var = pt_var;
354
355 if (var != pt_var)
356 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
357 else if (!pt_var_size)
358 return unknown[object_size_type];
359 else
360 var_size = pt_var_size;
361 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
362 if (bytes != error_mark_node)
363 {
364 if (TREE_CODE (bytes) == INTEGER_CST
365 && tree_int_cst_lt (var_size, bytes))
366 bytes = size_zero_node;
367 else
368 bytes = size_binop (MINUS_EXPR, var_size, bytes);
369 }
370 if (var != pt_var
371 && pt_var_size
372 && TREE_CODE (pt_var) == MEM_REF
373 && bytes != error_mark_node)
374 {
375 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
376 if (bytes2 != error_mark_node)
377 {
378 if (TREE_CODE (bytes2) == INTEGER_CST
379 && tree_int_cst_lt (pt_var_size, bytes2))
380 bytes2 = size_zero_node;
381 else
382 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
383 bytes = size_binop (MIN_EXPR, bytes, bytes2);
384 }
385 }
386 }
387 else if (!pt_var_size)
388 return unknown[object_size_type];
389 else
390 bytes = pt_var_size;
391
392 if (tree_fits_uhwi_p (bytes))
393 return tree_to_uhwi (bytes);
394
395 return unknown[object_size_type];
396 }
397
398
399 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
400 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
401 argument from __builtin_object_size. If unknown, return
402 unknown[object_size_type]. */
403
404 static unsigned HOST_WIDE_INT
405 alloc_object_size (const_gimple call, int object_size_type)
406 {
407 tree callee, bytes = NULL_TREE;
408 tree alloc_size;
409 int arg1 = -1, arg2 = -1;
410
411 gcc_assert (is_gimple_call (call));
412
413 callee = gimple_call_fndecl (call);
414 if (!callee)
415 return unknown[object_size_type];
416
417 alloc_size = lookup_attribute ("alloc_size",
418 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
419 if (alloc_size && TREE_VALUE (alloc_size))
420 {
421 tree p = TREE_VALUE (alloc_size);
422
423 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
424 if (TREE_CHAIN (p))
425 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
426 }
427
428 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
429 switch (DECL_FUNCTION_CODE (callee))
430 {
431 case BUILT_IN_CALLOC:
432 arg2 = 1;
433 /* fall through */
434 case BUILT_IN_MALLOC:
435 case BUILT_IN_ALLOCA:
436 case BUILT_IN_ALLOCA_WITH_ALIGN:
437 arg1 = 0;
438 default:
439 break;
440 }
441
442 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
443 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
444 || (arg2 >= 0
445 && (arg2 >= (int)gimple_call_num_args (call)
446 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
447 return unknown[object_size_type];
448
449 if (arg2 >= 0)
450 bytes = size_binop (MULT_EXPR,
451 fold_convert (sizetype, gimple_call_arg (call, arg1)),
452 fold_convert (sizetype, gimple_call_arg (call, arg2)));
453 else if (arg1 >= 0)
454 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
455
456 if (bytes && tree_fits_uhwi_p (bytes))
457 return tree_to_uhwi (bytes);
458
459 return unknown[object_size_type];
460 }
461
462
463 /* If object size is propagated from one of function's arguments directly
464 to its return value, return that argument for GIMPLE_CALL statement CALL.
465 Otherwise return NULL. */
466
467 static tree
468 pass_through_call (const_gimple call)
469 {
470 tree callee = gimple_call_fndecl (call);
471
472 if (callee
473 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
474 switch (DECL_FUNCTION_CODE (callee))
475 {
476 case BUILT_IN_MEMCPY:
477 case BUILT_IN_MEMMOVE:
478 case BUILT_IN_MEMSET:
479 case BUILT_IN_STRCPY:
480 case BUILT_IN_STRNCPY:
481 case BUILT_IN_STRCAT:
482 case BUILT_IN_STRNCAT:
483 case BUILT_IN_MEMCPY_CHK:
484 case BUILT_IN_MEMMOVE_CHK:
485 case BUILT_IN_MEMSET_CHK:
486 case BUILT_IN_STRCPY_CHK:
487 case BUILT_IN_STRNCPY_CHK:
488 case BUILT_IN_STPNCPY_CHK:
489 case BUILT_IN_STRCAT_CHK:
490 case BUILT_IN_STRNCAT_CHK:
491 case BUILT_IN_ASSUME_ALIGNED:
492 if (gimple_call_num_args (call) >= 1)
493 return gimple_call_arg (call, 0);
494 break;
495 default:
496 break;
497 }
498
499 return NULL_TREE;
500 }
501
502
503 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
504 second argument from __builtin_object_size. */
505
506 unsigned HOST_WIDE_INT
507 compute_builtin_object_size (tree ptr, int object_size_type)
508 {
509 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
510
511 if (! offset_limit)
512 init_offset_limit ();
513
514 if (TREE_CODE (ptr) == ADDR_EXPR)
515 return addr_object_size (NULL, ptr, object_size_type);
516
517 if (TREE_CODE (ptr) == SSA_NAME
518 && POINTER_TYPE_P (TREE_TYPE (ptr))
519 && computed[object_size_type] != NULL)
520 {
521 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
522 {
523 struct object_size_info osi;
524 bitmap_iterator bi;
525 unsigned int i;
526
527 if (num_ssa_names > object_sizes[object_size_type].length ())
528 object_sizes[object_size_type].safe_grow (num_ssa_names);
529 if (dump_file)
530 {
531 fprintf (dump_file, "Computing %s %sobject size for ",
532 (object_size_type & 2) ? "minimum" : "maximum",
533 (object_size_type & 1) ? "sub" : "");
534 print_generic_expr (dump_file, ptr, dump_flags);
535 fprintf (dump_file, ":\n");
536 }
537
538 osi.visited = BITMAP_ALLOC (NULL);
539 osi.reexamine = BITMAP_ALLOC (NULL);
540 osi.object_size_type = object_size_type;
541 osi.depths = NULL;
542 osi.stack = NULL;
543 osi.tos = NULL;
544
545 /* First pass: walk UD chains, compute object sizes that
546 can be computed. osi.reexamine bitmap at the end will
547 contain what variables were found in dependency cycles
548 and therefore need to be reexamined. */
549 osi.pass = 0;
550 osi.changed = false;
551 collect_object_sizes_for (&osi, ptr);
552
553 /* Second pass: keep recomputing object sizes of variables
554 that need reexamination, until no object sizes are
555 increased or all object sizes are computed. */
556 if (! bitmap_empty_p (osi.reexamine))
557 {
558 bitmap reexamine = BITMAP_ALLOC (NULL);
559
560 /* If looking for minimum instead of maximum object size,
561 detect cases where a pointer is increased in a loop.
562 Although even without this detection pass 2 would eventually
563 terminate, it could take a long time. If a pointer is
564 increasing this way, we need to assume 0 object size.
565 E.g. p = &buf[0]; while (cond) p = p + 4; */
566 if (object_size_type & 2)
567 {
568 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
569 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
570 osi.tos = osi.stack;
571 osi.pass = 1;
572 /* collect_object_sizes_for is changing
573 osi.reexamine bitmap, so iterate over a copy. */
574 bitmap_copy (reexamine, osi.reexamine);
575 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
576 if (bitmap_bit_p (osi.reexamine, i))
577 check_for_plus_in_loops (&osi, ssa_name (i));
578
579 free (osi.depths);
580 osi.depths = NULL;
581 free (osi.stack);
582 osi.stack = NULL;
583 osi.tos = NULL;
584 }
585
586 do
587 {
588 osi.pass = 2;
589 osi.changed = false;
590 /* collect_object_sizes_for is changing
591 osi.reexamine bitmap, so iterate over a copy. */
592 bitmap_copy (reexamine, osi.reexamine);
593 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
594 if (bitmap_bit_p (osi.reexamine, i))
595 {
596 collect_object_sizes_for (&osi, ssa_name (i));
597 if (dump_file && (dump_flags & TDF_DETAILS))
598 {
599 fprintf (dump_file, "Reexamining ");
600 print_generic_expr (dump_file, ssa_name (i),
601 dump_flags);
602 fprintf (dump_file, "\n");
603 }
604 }
605 }
606 while (osi.changed);
607
608 BITMAP_FREE (reexamine);
609 }
610 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
611 bitmap_set_bit (computed[object_size_type], i);
612
613 /* Debugging dumps. */
614 if (dump_file)
615 {
616 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
617 if (object_sizes[object_size_type][i]
618 != unknown[object_size_type])
619 {
620 print_generic_expr (dump_file, ssa_name (i),
621 dump_flags);
622 fprintf (dump_file,
623 ": %s %sobject size "
624 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
625 (object_size_type & 2) ? "minimum" : "maximum",
626 (object_size_type & 1) ? "sub" : "",
627 object_sizes[object_size_type][i]);
628 }
629 }
630
631 BITMAP_FREE (osi.reexamine);
632 BITMAP_FREE (osi.visited);
633 }
634
635 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
636 }
637
638 return unknown[object_size_type];
639 }
640
641 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
642
643 static void
644 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
645 {
646 int object_size_type = osi->object_size_type;
647 unsigned int varno = SSA_NAME_VERSION (ptr);
648 unsigned HOST_WIDE_INT bytes;
649
650 gcc_assert (object_sizes[object_size_type][varno]
651 != unknown[object_size_type]);
652 gcc_assert (osi->pass == 0);
653
654 if (TREE_CODE (value) == WITH_SIZE_EXPR)
655 value = TREE_OPERAND (value, 0);
656
657 /* Pointer variables should have been handled by merge_object_sizes. */
658 gcc_assert (TREE_CODE (value) != SSA_NAME
659 || !POINTER_TYPE_P (TREE_TYPE (value)));
660
661 if (TREE_CODE (value) == ADDR_EXPR)
662 bytes = addr_object_size (osi, value, object_size_type);
663 else
664 bytes = unknown[object_size_type];
665
666 if ((object_size_type & 2) == 0)
667 {
668 if (object_sizes[object_size_type][varno] < bytes)
669 object_sizes[object_size_type][varno] = bytes;
670 }
671 else
672 {
673 if (object_sizes[object_size_type][varno] > bytes)
674 object_sizes[object_size_type][varno] = bytes;
675 }
676 }
677
678
679 /* Compute object_sizes for PTR, defined to the result of a call. */
680
681 static void
682 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
683 {
684 int object_size_type = osi->object_size_type;
685 unsigned int varno = SSA_NAME_VERSION (ptr);
686 unsigned HOST_WIDE_INT bytes;
687
688 gcc_assert (is_gimple_call (call));
689
690 gcc_assert (object_sizes[object_size_type][varno]
691 != unknown[object_size_type]);
692 gcc_assert (osi->pass == 0);
693
694 bytes = alloc_object_size (call, object_size_type);
695
696 if ((object_size_type & 2) == 0)
697 {
698 if (object_sizes[object_size_type][varno] < bytes)
699 object_sizes[object_size_type][varno] = bytes;
700 }
701 else
702 {
703 if (object_sizes[object_size_type][varno] > bytes)
704 object_sizes[object_size_type][varno] = bytes;
705 }
706 }
707
708
709 /* Compute object_sizes for PTR, defined to an unknown value. */
710
711 static void
712 unknown_object_size (struct object_size_info *osi, tree ptr)
713 {
714 int object_size_type = osi->object_size_type;
715 unsigned int varno = SSA_NAME_VERSION (ptr);
716 unsigned HOST_WIDE_INT bytes;
717
718 gcc_assert (object_sizes[object_size_type][varno]
719 != unknown[object_size_type]);
720 gcc_assert (osi->pass == 0);
721
722 bytes = unknown[object_size_type];
723
724 if ((object_size_type & 2) == 0)
725 {
726 if (object_sizes[object_size_type][varno] < bytes)
727 object_sizes[object_size_type][varno] = bytes;
728 }
729 else
730 {
731 if (object_sizes[object_size_type][varno] > bytes)
732 object_sizes[object_size_type][varno] = bytes;
733 }
734 }
735
736
737 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
738 the object size might need reexamination later. */
739
740 static bool
741 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
742 unsigned HOST_WIDE_INT offset)
743 {
744 int object_size_type = osi->object_size_type;
745 unsigned int varno = SSA_NAME_VERSION (dest);
746 unsigned HOST_WIDE_INT orig_bytes;
747
748 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
749 return false;
750 if (offset >= offset_limit)
751 {
752 object_sizes[object_size_type][varno] = unknown[object_size_type];
753 return false;
754 }
755
756 if (osi->pass == 0)
757 collect_object_sizes_for (osi, orig);
758
759 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
760 if (orig_bytes != unknown[object_size_type])
761 orig_bytes = (offset > orig_bytes)
762 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
763
764 if ((object_size_type & 2) == 0)
765 {
766 if (object_sizes[object_size_type][varno] < orig_bytes)
767 {
768 object_sizes[object_size_type][varno] = orig_bytes;
769 osi->changed = true;
770 }
771 }
772 else
773 {
774 if (object_sizes[object_size_type][varno] > orig_bytes)
775 {
776 object_sizes[object_size_type][varno] = orig_bytes;
777 osi->changed = true;
778 }
779 }
780 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
781 }
782
783
784 /* Compute object_sizes for VAR, defined to the result of an assignment
785 with operator POINTER_PLUS_EXPR. Return true if the object size might
786 need reexamination later. */
787
788 static bool
789 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
790 {
791 int object_size_type = osi->object_size_type;
792 unsigned int varno = SSA_NAME_VERSION (var);
793 unsigned HOST_WIDE_INT bytes;
794 tree op0, op1;
795
796 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
797 {
798 op0 = gimple_assign_rhs1 (stmt);
799 op1 = gimple_assign_rhs2 (stmt);
800 }
801 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
802 {
803 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
804 gcc_assert (TREE_CODE (rhs) == MEM_REF);
805 op0 = TREE_OPERAND (rhs, 0);
806 op1 = TREE_OPERAND (rhs, 1);
807 }
808 else
809 gcc_unreachable ();
810
811 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
812 return false;
813
814 /* Handle PTR + OFFSET here. */
815 if (TREE_CODE (op1) == INTEGER_CST
816 && (TREE_CODE (op0) == SSA_NAME
817 || TREE_CODE (op0) == ADDR_EXPR))
818 {
819 if (! tree_fits_uhwi_p (op1))
820 bytes = unknown[object_size_type];
821 else if (TREE_CODE (op0) == SSA_NAME)
822 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
823 else
824 {
825 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
826
827 /* op0 will be ADDR_EXPR here. */
828 bytes = addr_object_size (osi, op0, object_size_type);
829 if (bytes == unknown[object_size_type])
830 ;
831 else if (off > offset_limit)
832 bytes = unknown[object_size_type];
833 else if (off > bytes)
834 bytes = 0;
835 else
836 bytes -= off;
837 }
838 }
839 else
840 bytes = unknown[object_size_type];
841
842 if ((object_size_type & 2) == 0)
843 {
844 if (object_sizes[object_size_type][varno] < bytes)
845 object_sizes[object_size_type][varno] = bytes;
846 }
847 else
848 {
849 if (object_sizes[object_size_type][varno] > bytes)
850 object_sizes[object_size_type][varno] = bytes;
851 }
852 return false;
853 }
854
855
856 /* Compute object_sizes for VAR, defined at STMT, which is
857 a COND_EXPR. Return true if the object size might need reexamination
858 later. */
859
860 static bool
861 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
862 {
863 tree then_, else_;
864 int object_size_type = osi->object_size_type;
865 unsigned int varno = SSA_NAME_VERSION (var);
866 bool reexamine = false;
867
868 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
869
870 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
871 return false;
872
873 then_ = gimple_assign_rhs2 (stmt);
874 else_ = gimple_assign_rhs3 (stmt);
875
876 if (TREE_CODE (then_) == SSA_NAME)
877 reexamine |= merge_object_sizes (osi, var, then_, 0);
878 else
879 expr_object_size (osi, var, then_);
880
881 if (TREE_CODE (else_) == SSA_NAME)
882 reexamine |= merge_object_sizes (osi, var, else_, 0);
883 else
884 expr_object_size (osi, var, else_);
885
886 return reexamine;
887 }
888
889 /* Compute object sizes for VAR.
890 For ADDR_EXPR an object size is the number of remaining bytes
891 to the end of the object (where what is considered an object depends on
892 OSI->object_size_type).
893 For allocation GIMPLE_CALL like malloc or calloc object size is the size
894 of the allocation.
895 For POINTER_PLUS_EXPR where second operand is a constant integer,
896 object size is object size of the first operand minus the constant.
897 If the constant is bigger than the number of remaining bytes until the
898 end of the object, object size is 0, but if it is instead a pointer
899 subtraction, object size is unknown[object_size_type].
900 To differentiate addition from subtraction, ADDR_EXPR returns
901 unknown[object_size_type] for all objects bigger than half of the address
902 space, and constants less than half of the address space are considered
903 addition, while bigger constants subtraction.
904 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
905 object size is object size of that argument.
906 Otherwise, object size is the maximum of object sizes of variables
907 that it might be set to. */
908
909 static void
910 collect_object_sizes_for (struct object_size_info *osi, tree var)
911 {
912 int object_size_type = osi->object_size_type;
913 unsigned int varno = SSA_NAME_VERSION (var);
914 gimple stmt;
915 bool reexamine;
916
917 if (bitmap_bit_p (computed[object_size_type], varno))
918 return;
919
920 if (osi->pass == 0)
921 {
922 if (bitmap_set_bit (osi->visited, varno))
923 {
924 object_sizes[object_size_type][varno]
925 = (object_size_type & 2) ? -1 : 0;
926 }
927 else
928 {
929 /* Found a dependency loop. Mark the variable for later
930 re-examination. */
931 bitmap_set_bit (osi->reexamine, varno);
932 if (dump_file && (dump_flags & TDF_DETAILS))
933 {
934 fprintf (dump_file, "Found a dependency loop at ");
935 print_generic_expr (dump_file, var, dump_flags);
936 fprintf (dump_file, "\n");
937 }
938 return;
939 }
940 }
941
942 if (dump_file && (dump_flags & TDF_DETAILS))
943 {
944 fprintf (dump_file, "Visiting use-def links for ");
945 print_generic_expr (dump_file, var, dump_flags);
946 fprintf (dump_file, "\n");
947 }
948
949 stmt = SSA_NAME_DEF_STMT (var);
950 reexamine = false;
951
952 switch (gimple_code (stmt))
953 {
954 case GIMPLE_ASSIGN:
955 {
956 tree rhs = gimple_assign_rhs1 (stmt);
957 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
958 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
959 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
960 reexamine = plus_stmt_object_size (osi, var, stmt);
961 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
962 reexamine = cond_expr_object_size (osi, var, stmt);
963 else if (gimple_assign_single_p (stmt)
964 || gimple_assign_unary_nop_p (stmt))
965 {
966 if (TREE_CODE (rhs) == SSA_NAME
967 && POINTER_TYPE_P (TREE_TYPE (rhs)))
968 reexamine = merge_object_sizes (osi, var, rhs, 0);
969 else
970 expr_object_size (osi, var, rhs);
971 }
972 else
973 unknown_object_size (osi, var);
974 break;
975 }
976
977 case GIMPLE_CALL:
978 {
979 tree arg = pass_through_call (stmt);
980 if (arg)
981 {
982 if (TREE_CODE (arg) == SSA_NAME
983 && POINTER_TYPE_P (TREE_TYPE (arg)))
984 reexamine = merge_object_sizes (osi, var, arg, 0);
985 else
986 expr_object_size (osi, var, arg);
987 }
988 else
989 call_object_size (osi, var, stmt);
990 break;
991 }
992
993 case GIMPLE_ASM:
994 /* Pointers defined by __asm__ statements can point anywhere. */
995 object_sizes[object_size_type][varno] = unknown[object_size_type];
996 break;
997
998 case GIMPLE_NOP:
999 if (SSA_NAME_VAR (var)
1000 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1001 expr_object_size (osi, var, SSA_NAME_VAR (var));
1002 else
1003 /* Uninitialized SSA names point nowhere. */
1004 object_sizes[object_size_type][varno] = unknown[object_size_type];
1005 break;
1006
1007 case GIMPLE_PHI:
1008 {
1009 unsigned i;
1010
1011 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1012 {
1013 tree rhs = gimple_phi_arg (stmt, i)->def;
1014
1015 if (object_sizes[object_size_type][varno]
1016 == unknown[object_size_type])
1017 break;
1018
1019 if (TREE_CODE (rhs) == SSA_NAME)
1020 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1021 else if (osi->pass == 0)
1022 expr_object_size (osi, var, rhs);
1023 }
1024 break;
1025 }
1026
1027 default:
1028 gcc_unreachable ();
1029 }
1030
1031 if (! reexamine
1032 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1033 {
1034 bitmap_set_bit (computed[object_size_type], varno);
1035 bitmap_clear_bit (osi->reexamine, varno);
1036 }
1037 else
1038 {
1039 bitmap_set_bit (osi->reexamine, varno);
1040 if (dump_file && (dump_flags & TDF_DETAILS))
1041 {
1042 fprintf (dump_file, "Need to reexamine ");
1043 print_generic_expr (dump_file, var, dump_flags);
1044 fprintf (dump_file, "\n");
1045 }
1046 }
1047 }
1048
1049
1050 /* Helper function for check_for_plus_in_loops. Called recursively
1051 to detect loops. */
1052
1053 static void
1054 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1055 unsigned int depth)
1056 {
1057 gimple stmt = SSA_NAME_DEF_STMT (var);
1058 unsigned int varno = SSA_NAME_VERSION (var);
1059
1060 if (osi->depths[varno])
1061 {
1062 if (osi->depths[varno] != depth)
1063 {
1064 unsigned int *sp;
1065
1066 /* Found a loop involving pointer addition. */
1067 for (sp = osi->tos; sp > osi->stack; )
1068 {
1069 --sp;
1070 bitmap_clear_bit (osi->reexamine, *sp);
1071 bitmap_set_bit (computed[osi->object_size_type], *sp);
1072 object_sizes[osi->object_size_type][*sp] = 0;
1073 if (*sp == varno)
1074 break;
1075 }
1076 }
1077 return;
1078 }
1079 else if (! bitmap_bit_p (osi->reexamine, varno))
1080 return;
1081
1082 osi->depths[varno] = depth;
1083 *osi->tos++ = varno;
1084
1085 switch (gimple_code (stmt))
1086 {
1087
1088 case GIMPLE_ASSIGN:
1089 {
1090 if ((gimple_assign_single_p (stmt)
1091 || gimple_assign_unary_nop_p (stmt))
1092 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1093 {
1094 tree rhs = gimple_assign_rhs1 (stmt);
1095
1096 check_for_plus_in_loops_1 (osi, rhs, depth);
1097 }
1098 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1099 {
1100 tree basevar = gimple_assign_rhs1 (stmt);
1101 tree cst = gimple_assign_rhs2 (stmt);
1102
1103 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1104
1105 check_for_plus_in_loops_1 (osi, basevar,
1106 depth + !integer_zerop (cst));
1107 }
1108 else
1109 gcc_unreachable ();
1110 break;
1111 }
1112
1113 case GIMPLE_CALL:
1114 {
1115 tree arg = pass_through_call (stmt);
1116 if (arg)
1117 {
1118 if (TREE_CODE (arg) == SSA_NAME)
1119 check_for_plus_in_loops_1 (osi, arg, depth);
1120 else
1121 gcc_unreachable ();
1122 }
1123 break;
1124 }
1125
1126 case GIMPLE_PHI:
1127 {
1128 unsigned i;
1129
1130 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1131 {
1132 tree rhs = gimple_phi_arg (stmt, i)->def;
1133
1134 if (TREE_CODE (rhs) == SSA_NAME)
1135 check_for_plus_in_loops_1 (osi, rhs, depth);
1136 }
1137 break;
1138 }
1139
1140 default:
1141 gcc_unreachable ();
1142 }
1143
1144 osi->depths[varno] = 0;
1145 osi->tos--;
1146 }
1147
1148
1149 /* Check if some pointer we are computing object size of is being increased
1150 within a loop. If yes, assume all the SSA variables participating in
1151 that loop have minimum object sizes 0. */
1152
1153 static void
1154 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1155 {
1156 gimple stmt = SSA_NAME_DEF_STMT (var);
1157
1158 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1159 and looked for a POINTER_PLUS_EXPR in the pass-through
1160 argument, if any. In GIMPLE, however, such an expression
1161 is not a valid call operand. */
1162
1163 if (is_gimple_assign (stmt)
1164 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1165 {
1166 tree basevar = gimple_assign_rhs1 (stmt);
1167 tree cst = gimple_assign_rhs2 (stmt);
1168
1169 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1170
1171 if (integer_zerop (cst))
1172 return;
1173
1174 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1175 *osi->tos++ = SSA_NAME_VERSION (basevar);
1176 check_for_plus_in_loops_1 (osi, var, 2);
1177 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1178 osi->tos--;
1179 }
1180 }
1181
1182
1183 /* Initialize data structures for the object size computation. */
1184
1185 void
1186 init_object_sizes (void)
1187 {
1188 int object_size_type;
1189
1190 if (computed[0])
1191 return;
1192
1193 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1194 {
1195 object_sizes[object_size_type].safe_grow (num_ssa_names);
1196 computed[object_size_type] = BITMAP_ALLOC (NULL);
1197 }
1198
1199 init_offset_limit ();
1200 }
1201
1202
1203 /* Destroy data structures after the object size computation. */
1204
1205 static void
1206 fini_object_sizes (void)
1207 {
1208 int object_size_type;
1209
1210 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1211 {
1212 object_sizes[object_size_type].release ();
1213 BITMAP_FREE (computed[object_size_type]);
1214 }
1215 }
1216
1217
1218 /* Simple pass to optimize all __builtin_object_size () builtins. */
1219
1220 namespace {
1221
1222 const pass_data pass_data_object_sizes =
1223 {
1224 GIMPLE_PASS, /* type */
1225 "objsz", /* name */
1226 OPTGROUP_NONE, /* optinfo_flags */
1227 TV_NONE, /* tv_id */
1228 ( PROP_cfg | PROP_ssa ), /* properties_required */
1229 0, /* properties_provided */
1230 0, /* properties_destroyed */
1231 0, /* todo_flags_start */
1232 0, /* todo_flags_finish */
1233 };
1234
1235 class pass_object_sizes : public gimple_opt_pass
1236 {
1237 public:
1238 pass_object_sizes (gcc::context *ctxt)
1239 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1240 {}
1241
1242 /* opt_pass methods: */
1243 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1244 virtual unsigned int execute (function *);
1245
1246 }; // class pass_object_sizes
1247
1248 unsigned int
1249 pass_object_sizes::execute (function *fun)
1250 {
1251 basic_block bb;
1252 FOR_EACH_BB_FN (bb, fun)
1253 {
1254 gimple_stmt_iterator i;
1255 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1256 {
1257 tree result;
1258 gimple call = gsi_stmt (i);
1259 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1260 continue;
1261
1262 init_object_sizes ();
1263 result = fold_call_stmt (call, false);
1264 if (!result)
1265 {
1266 if (gimple_call_num_args (call) == 2
1267 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1268 {
1269 tree ost = gimple_call_arg (call, 1);
1270
1271 if (tree_fits_uhwi_p (ost))
1272 {
1273 unsigned HOST_WIDE_INT object_size_type
1274 = tree_to_uhwi (ost);
1275
1276 if (object_size_type < 2)
1277 result = fold_convert (size_type_node,
1278 integer_minus_one_node);
1279 else if (object_size_type < 4)
1280 result = build_zero_cst (size_type_node);
1281 }
1282 }
1283
1284 if (!result)
1285 continue;
1286 }
1287
1288 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1289
1290 if (dump_file && (dump_flags & TDF_DETAILS))
1291 {
1292 fprintf (dump_file, "Simplified\n ");
1293 print_gimple_stmt (dump_file, call, 0, dump_flags);
1294 fprintf (dump_file, " to ");
1295 print_generic_expr (dump_file, result, 0);
1296 fprintf (dump_file, "\n");
1297 }
1298
1299 tree lhs = gimple_call_lhs (call);
1300 if (!lhs)
1301 continue;
1302
1303 /* Propagate into all uses and fold those stmts. */
1304 gimple use_stmt;
1305 imm_use_iterator iter;
1306 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1307 {
1308 use_operand_p use_p;
1309 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1310 SET_USE (use_p, result);
1311 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1312 fold_stmt (&gsi);
1313 update_stmt (gsi_stmt (gsi));
1314 }
1315 }
1316 }
1317
1318 fini_object_sizes ();
1319 return 0;
1320 }
1321
1322 } // anon namespace
1323
1324 gimple_opt_pass *
1325 make_pass_object_sizes (gcc::context *ctxt)
1326 {
1327 return new pass_object_sizes (ctxt);
1328 }