bind_c_array_params_2.f90: Add "-mno-explicit-relocs" for alpha*-*-* targets.
[gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "diagnostic-core.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32
33 struct object_size_info
34 {
35 int object_size_type;
36 bitmap visited, reexamine;
37 int pass;
38 bool changed;
39 unsigned int *depths;
40 unsigned int *stack, *tos;
41 };
42
43 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
44
45 static tree compute_object_offset (const_tree, const_tree);
46 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
47 const_tree, int);
48 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
49 static tree pass_through_call (const_gimple);
50 static void collect_object_sizes_for (struct object_size_info *, tree);
51 static void expr_object_size (struct object_size_info *, tree, tree);
52 static bool merge_object_sizes (struct object_size_info *, tree, tree,
53 unsigned HOST_WIDE_INT);
54 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
55 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
56 static unsigned int compute_object_sizes (void);
57 static void init_offset_limit (void);
58 static void check_for_plus_in_loops (struct object_size_info *, tree);
59 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
60 unsigned int);
61
62 /* object_sizes[0] is upper bound for number of bytes till the end of
63 the object.
64 object_sizes[1] is upper bound for number of bytes till the end of
65 the subobject (innermost array or field with address taken).
66 object_sizes[2] is lower bound for number of bytes till the end of
67 the object and object_sizes[3] lower bound for subobject. */
68 static unsigned HOST_WIDE_INT *object_sizes[4];
69
70 /* Bitmaps what object sizes have been computed already. */
71 static bitmap computed[4];
72
73 /* Maximum value of offset we consider to be addition. */
74 static unsigned HOST_WIDE_INT offset_limit;
75
76
77 /* Initialize OFFSET_LIMIT variable. */
78 static void
79 init_offset_limit (void)
80 {
81 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
82 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
83 else
84 offset_limit = -1;
85 offset_limit /= 2;
86 }
87
88
89 /* Compute offset of EXPR within VAR. Return error_mark_node
90 if unknown. */
91
92 static tree
93 compute_object_offset (const_tree expr, const_tree var)
94 {
95 enum tree_code code = PLUS_EXPR;
96 tree base, off, t;
97
98 if (expr == var)
99 return size_zero_node;
100
101 switch (TREE_CODE (expr))
102 {
103 case COMPONENT_REF:
104 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
105 if (base == error_mark_node)
106 return base;
107
108 t = TREE_OPERAND (expr, 1);
109 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
110 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
111 / BITS_PER_UNIT));
112 break;
113
114 case REALPART_EXPR:
115 CASE_CONVERT:
116 case VIEW_CONVERT_EXPR:
117 case NON_LVALUE_EXPR:
118 return compute_object_offset (TREE_OPERAND (expr, 0), var);
119
120 case IMAGPART_EXPR:
121 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
122 if (base == error_mark_node)
123 return base;
124
125 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
126 break;
127
128 case ARRAY_REF:
129 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
130 if (base == error_mark_node)
131 return base;
132
133 t = TREE_OPERAND (expr, 1);
134 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
135 {
136 code = MINUS_EXPR;
137 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
138 }
139 t = fold_convert (sizetype, t);
140 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
141 break;
142
143 case MEM_REF:
144 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
145 return double_int_to_tree (sizetype, mem_ref_offset (expr));
146
147 default:
148 return error_mark_node;
149 }
150
151 return size_binop (code, base, off);
152 }
153
154
155 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
156 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
157 If unknown, return unknown[object_size_type]. */
158
159 static unsigned HOST_WIDE_INT
160 addr_object_size (struct object_size_info *osi, const_tree ptr,
161 int object_size_type)
162 {
163 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
164
165 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
166
167 pt_var = TREE_OPERAND (ptr, 0);
168 while (handled_component_p (pt_var))
169 pt_var = TREE_OPERAND (pt_var, 0);
170
171 if (pt_var
172 && TREE_CODE (pt_var) == MEM_REF)
173 {
174 unsigned HOST_WIDE_INT sz;
175
176 if (!osi || (object_size_type & 1) != 0
177 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
178 {
179 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
180 object_size_type & ~1);
181 }
182 else
183 {
184 tree var = TREE_OPERAND (pt_var, 0);
185 if (osi->pass == 0)
186 collect_object_sizes_for (osi, var);
187 if (bitmap_bit_p (computed[object_size_type],
188 SSA_NAME_VERSION (var)))
189 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
190 else
191 sz = unknown[object_size_type];
192 }
193 if (sz != unknown[object_size_type])
194 {
195 double_int dsz = double_int_sub (uhwi_to_double_int (sz),
196 mem_ref_offset (pt_var));
197 if (double_int_negative_p (dsz))
198 sz = 0;
199 else if (double_int_fits_in_uhwi_p (dsz))
200 sz = double_int_to_uhwi (dsz);
201 else
202 sz = unknown[object_size_type];
203 }
204
205 if (sz != unknown[object_size_type] && sz < offset_limit)
206 pt_var_size = size_int (sz);
207 }
208 else if (pt_var
209 && DECL_P (pt_var)
210 && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
211 && (unsigned HOST_WIDE_INT)
212 tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
213 pt_var_size = DECL_SIZE_UNIT (pt_var);
214 else if (pt_var
215 && TREE_CODE (pt_var) == STRING_CST
216 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
217 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
218 && (unsigned HOST_WIDE_INT)
219 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
220 < offset_limit)
221 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
222 else
223 return unknown[object_size_type];
224
225 if (pt_var != TREE_OPERAND (ptr, 0))
226 {
227 tree var;
228
229 if (object_size_type & 1)
230 {
231 var = TREE_OPERAND (ptr, 0);
232
233 while (var != pt_var
234 && TREE_CODE (var) != BIT_FIELD_REF
235 && TREE_CODE (var) != COMPONENT_REF
236 && TREE_CODE (var) != ARRAY_REF
237 && TREE_CODE (var) != ARRAY_RANGE_REF
238 && TREE_CODE (var) != REALPART_EXPR
239 && TREE_CODE (var) != IMAGPART_EXPR)
240 var = TREE_OPERAND (var, 0);
241 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
242 var = TREE_OPERAND (var, 0);
243 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
244 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
245 || (pt_var_size
246 && tree_int_cst_lt (pt_var_size,
247 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
248 var = pt_var;
249 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
250 {
251 tree v = var;
252 /* For &X->fld, compute object size only if fld isn't the last
253 field, as struct { int i; char c[1]; } is often used instead
254 of flexible array member. */
255 while (v && v != pt_var)
256 switch (TREE_CODE (v))
257 {
258 case ARRAY_REF:
259 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
260 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
261 {
262 tree domain
263 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
264 if (domain
265 && TYPE_MAX_VALUE (domain)
266 && TREE_CODE (TYPE_MAX_VALUE (domain))
267 == INTEGER_CST
268 && tree_int_cst_lt (TREE_OPERAND (v, 1),
269 TYPE_MAX_VALUE (domain)))
270 {
271 v = NULL_TREE;
272 break;
273 }
274 }
275 v = TREE_OPERAND (v, 0);
276 break;
277 case REALPART_EXPR:
278 case IMAGPART_EXPR:
279 v = NULL_TREE;
280 break;
281 case COMPONENT_REF:
282 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
283 {
284 v = NULL_TREE;
285 break;
286 }
287 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
288 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
289 != UNION_TYPE
290 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
291 != QUAL_UNION_TYPE)
292 break;
293 else
294 v = TREE_OPERAND (v, 0);
295 if (TREE_CODE (v) == COMPONENT_REF
296 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
297 == RECORD_TYPE)
298 {
299 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
300 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
301 if (TREE_CODE (fld_chain) == FIELD_DECL)
302 break;
303
304 if (fld_chain)
305 {
306 v = NULL_TREE;
307 break;
308 }
309 v = TREE_OPERAND (v, 0);
310 }
311 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
312 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
313 != UNION_TYPE
314 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
315 != QUAL_UNION_TYPE)
316 break;
317 else
318 v = TREE_OPERAND (v, 0);
319 if (v != pt_var)
320 v = NULL_TREE;
321 else
322 v = pt_var;
323 break;
324 default:
325 v = pt_var;
326 break;
327 }
328 if (v == pt_var)
329 var = pt_var;
330 }
331 }
332 else
333 var = pt_var;
334
335 if (var != pt_var)
336 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
337 else if (!pt_var_size)
338 return unknown[object_size_type];
339 else
340 var_size = pt_var_size;
341 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
342 if (bytes != error_mark_node)
343 {
344 if (TREE_CODE (bytes) == INTEGER_CST
345 && tree_int_cst_lt (var_size, bytes))
346 bytes = size_zero_node;
347 else
348 bytes = size_binop (MINUS_EXPR, var_size, bytes);
349 }
350 if (var != pt_var
351 && pt_var_size
352 && TREE_CODE (pt_var) == MEM_REF
353 && bytes != error_mark_node)
354 {
355 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
356 if (bytes2 != error_mark_node)
357 {
358 if (TREE_CODE (bytes2) == INTEGER_CST
359 && tree_int_cst_lt (pt_var_size, bytes2))
360 bytes2 = size_zero_node;
361 else
362 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
363 bytes = size_binop (MIN_EXPR, bytes, bytes2);
364 }
365 }
366 }
367 else if (!pt_var_size)
368 return unknown[object_size_type];
369 else
370 bytes = pt_var_size;
371
372 if (host_integerp (bytes, 1))
373 return tree_low_cst (bytes, 1);
374
375 return unknown[object_size_type];
376 }
377
378
379 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
380 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
381 argument from __builtin_object_size. If unknown, return
382 unknown[object_size_type]. */
383
384 static unsigned HOST_WIDE_INT
385 alloc_object_size (const_gimple call, int object_size_type)
386 {
387 tree callee, bytes = NULL_TREE;
388 tree alloc_size;
389 int arg1 = -1, arg2 = -1;
390
391 gcc_assert (is_gimple_call (call));
392
393 callee = gimple_call_fndecl (call);
394 if (!callee)
395 return unknown[object_size_type];
396
397 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
398 if (alloc_size && TREE_VALUE (alloc_size))
399 {
400 tree p = TREE_VALUE (alloc_size);
401
402 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
403 if (TREE_CHAIN (p))
404 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
405 }
406
407 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
408 switch (DECL_FUNCTION_CODE (callee))
409 {
410 case BUILT_IN_CALLOC:
411 arg2 = 1;
412 /* fall through */
413 case BUILT_IN_MALLOC:
414 case BUILT_IN_ALLOCA:
415 case BUILT_IN_ALLOCA_WITH_ALIGN:
416 arg1 = 0;
417 default:
418 break;
419 }
420
421 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
422 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
423 || (arg2 >= 0
424 && (arg2 >= (int)gimple_call_num_args (call)
425 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
426 return unknown[object_size_type];
427
428 if (arg2 >= 0)
429 bytes = size_binop (MULT_EXPR,
430 fold_convert (sizetype, gimple_call_arg (call, arg1)),
431 fold_convert (sizetype, gimple_call_arg (call, arg2)));
432 else if (arg1 >= 0)
433 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
434
435 if (bytes && host_integerp (bytes, 1))
436 return tree_low_cst (bytes, 1);
437
438 return unknown[object_size_type];
439 }
440
441
442 /* If object size is propagated from one of function's arguments directly
443 to its return value, return that argument for GIMPLE_CALL statement CALL.
444 Otherwise return NULL. */
445
446 static tree
447 pass_through_call (const_gimple call)
448 {
449 tree callee = gimple_call_fndecl (call);
450
451 if (callee
452 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
453 switch (DECL_FUNCTION_CODE (callee))
454 {
455 case BUILT_IN_MEMCPY:
456 case BUILT_IN_MEMMOVE:
457 case BUILT_IN_MEMSET:
458 case BUILT_IN_STRCPY:
459 case BUILT_IN_STRNCPY:
460 case BUILT_IN_STRCAT:
461 case BUILT_IN_STRNCAT:
462 case BUILT_IN_MEMCPY_CHK:
463 case BUILT_IN_MEMMOVE_CHK:
464 case BUILT_IN_MEMSET_CHK:
465 case BUILT_IN_STRCPY_CHK:
466 case BUILT_IN_STRNCPY_CHK:
467 case BUILT_IN_STPNCPY_CHK:
468 case BUILT_IN_STRCAT_CHK:
469 case BUILT_IN_STRNCAT_CHK:
470 case BUILT_IN_ASSUME_ALIGNED:
471 if (gimple_call_num_args (call) >= 1)
472 return gimple_call_arg (call, 0);
473 break;
474 default:
475 break;
476 }
477
478 return NULL_TREE;
479 }
480
481
482 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
483 second argument from __builtin_object_size. */
484
485 unsigned HOST_WIDE_INT
486 compute_builtin_object_size (tree ptr, int object_size_type)
487 {
488 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
489
490 if (! offset_limit)
491 init_offset_limit ();
492
493 if (TREE_CODE (ptr) == ADDR_EXPR)
494 return addr_object_size (NULL, ptr, object_size_type);
495
496 if (TREE_CODE (ptr) == SSA_NAME
497 && POINTER_TYPE_P (TREE_TYPE (ptr))
498 && object_sizes[object_size_type] != NULL)
499 {
500 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
501 {
502 struct object_size_info osi;
503 bitmap_iterator bi;
504 unsigned int i;
505
506 if (dump_file)
507 {
508 fprintf (dump_file, "Computing %s %sobject size for ",
509 (object_size_type & 2) ? "minimum" : "maximum",
510 (object_size_type & 1) ? "sub" : "");
511 print_generic_expr (dump_file, ptr, dump_flags);
512 fprintf (dump_file, ":\n");
513 }
514
515 osi.visited = BITMAP_ALLOC (NULL);
516 osi.reexamine = BITMAP_ALLOC (NULL);
517 osi.object_size_type = object_size_type;
518 osi.depths = NULL;
519 osi.stack = NULL;
520 osi.tos = NULL;
521
522 /* First pass: walk UD chains, compute object sizes that
523 can be computed. osi.reexamine bitmap at the end will
524 contain what variables were found in dependency cycles
525 and therefore need to be reexamined. */
526 osi.pass = 0;
527 osi.changed = false;
528 collect_object_sizes_for (&osi, ptr);
529
530 /* Second pass: keep recomputing object sizes of variables
531 that need reexamination, until no object sizes are
532 increased or all object sizes are computed. */
533 if (! bitmap_empty_p (osi.reexamine))
534 {
535 bitmap reexamine = BITMAP_ALLOC (NULL);
536
537 /* If looking for minimum instead of maximum object size,
538 detect cases where a pointer is increased in a loop.
539 Although even without this detection pass 2 would eventually
540 terminate, it could take a long time. If a pointer is
541 increasing this way, we need to assume 0 object size.
542 E.g. p = &buf[0]; while (cond) p = p + 4; */
543 if (object_size_type & 2)
544 {
545 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
546 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
547 osi.tos = osi.stack;
548 osi.pass = 1;
549 /* collect_object_sizes_for is changing
550 osi.reexamine bitmap, so iterate over a copy. */
551 bitmap_copy (reexamine, osi.reexamine);
552 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
553 if (bitmap_bit_p (osi.reexamine, i))
554 check_for_plus_in_loops (&osi, ssa_name (i));
555
556 free (osi.depths);
557 osi.depths = NULL;
558 free (osi.stack);
559 osi.stack = NULL;
560 osi.tos = NULL;
561 }
562
563 do
564 {
565 osi.pass = 2;
566 osi.changed = false;
567 /* collect_object_sizes_for is changing
568 osi.reexamine bitmap, so iterate over a copy. */
569 bitmap_copy (reexamine, osi.reexamine);
570 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
571 if (bitmap_bit_p (osi.reexamine, i))
572 {
573 collect_object_sizes_for (&osi, ssa_name (i));
574 if (dump_file && (dump_flags & TDF_DETAILS))
575 {
576 fprintf (dump_file, "Reexamining ");
577 print_generic_expr (dump_file, ssa_name (i),
578 dump_flags);
579 fprintf (dump_file, "\n");
580 }
581 }
582 }
583 while (osi.changed);
584
585 BITMAP_FREE (reexamine);
586 }
587 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
588 bitmap_set_bit (computed[object_size_type], i);
589
590 /* Debugging dumps. */
591 if (dump_file)
592 {
593 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
594 if (object_sizes[object_size_type][i]
595 != unknown[object_size_type])
596 {
597 print_generic_expr (dump_file, ssa_name (i),
598 dump_flags);
599 fprintf (dump_file,
600 ": %s %sobject size "
601 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
602 (object_size_type & 2) ? "minimum" : "maximum",
603 (object_size_type & 1) ? "sub" : "",
604 object_sizes[object_size_type][i]);
605 }
606 }
607
608 BITMAP_FREE (osi.reexamine);
609 BITMAP_FREE (osi.visited);
610 }
611
612 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
613 }
614
615 return unknown[object_size_type];
616 }
617
618 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
619
620 static void
621 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
622 {
623 int object_size_type = osi->object_size_type;
624 unsigned int varno = SSA_NAME_VERSION (ptr);
625 unsigned HOST_WIDE_INT bytes;
626
627 gcc_assert (object_sizes[object_size_type][varno]
628 != unknown[object_size_type]);
629 gcc_assert (osi->pass == 0);
630
631 if (TREE_CODE (value) == WITH_SIZE_EXPR)
632 value = TREE_OPERAND (value, 0);
633
634 /* Pointer variables should have been handled by merge_object_sizes. */
635 gcc_assert (TREE_CODE (value) != SSA_NAME
636 || !POINTER_TYPE_P (TREE_TYPE (value)));
637
638 if (TREE_CODE (value) == ADDR_EXPR)
639 bytes = addr_object_size (osi, value, object_size_type);
640 else
641 bytes = unknown[object_size_type];
642
643 if ((object_size_type & 2) == 0)
644 {
645 if (object_sizes[object_size_type][varno] < bytes)
646 object_sizes[object_size_type][varno] = bytes;
647 }
648 else
649 {
650 if (object_sizes[object_size_type][varno] > bytes)
651 object_sizes[object_size_type][varno] = bytes;
652 }
653 }
654
655
656 /* Compute object_sizes for PTR, defined to the result of a call. */
657
658 static void
659 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
660 {
661 int object_size_type = osi->object_size_type;
662 unsigned int varno = SSA_NAME_VERSION (ptr);
663 unsigned HOST_WIDE_INT bytes;
664
665 gcc_assert (is_gimple_call (call));
666
667 gcc_assert (object_sizes[object_size_type][varno]
668 != unknown[object_size_type]);
669 gcc_assert (osi->pass == 0);
670
671 bytes = alloc_object_size (call, object_size_type);
672
673 if ((object_size_type & 2) == 0)
674 {
675 if (object_sizes[object_size_type][varno] < bytes)
676 object_sizes[object_size_type][varno] = bytes;
677 }
678 else
679 {
680 if (object_sizes[object_size_type][varno] > bytes)
681 object_sizes[object_size_type][varno] = bytes;
682 }
683 }
684
685
686 /* Compute object_sizes for PTR, defined to an unknown value. */
687
688 static void
689 unknown_object_size (struct object_size_info *osi, tree ptr)
690 {
691 int object_size_type = osi->object_size_type;
692 unsigned int varno = SSA_NAME_VERSION (ptr);
693 unsigned HOST_WIDE_INT bytes;
694
695 gcc_assert (object_sizes[object_size_type][varno]
696 != unknown[object_size_type]);
697 gcc_assert (osi->pass == 0);
698
699 bytes = unknown[object_size_type];
700
701 if ((object_size_type & 2) == 0)
702 {
703 if (object_sizes[object_size_type][varno] < bytes)
704 object_sizes[object_size_type][varno] = bytes;
705 }
706 else
707 {
708 if (object_sizes[object_size_type][varno] > bytes)
709 object_sizes[object_size_type][varno] = bytes;
710 }
711 }
712
713
714 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
715 the object size might need reexamination later. */
716
717 static bool
718 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
719 unsigned HOST_WIDE_INT offset)
720 {
721 int object_size_type = osi->object_size_type;
722 unsigned int varno = SSA_NAME_VERSION (dest);
723 unsigned HOST_WIDE_INT orig_bytes;
724
725 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
726 return false;
727 if (offset >= offset_limit)
728 {
729 object_sizes[object_size_type][varno] = unknown[object_size_type];
730 return false;
731 }
732
733 if (osi->pass == 0)
734 collect_object_sizes_for (osi, orig);
735
736 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
737 if (orig_bytes != unknown[object_size_type])
738 orig_bytes = (offset > orig_bytes)
739 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
740
741 if ((object_size_type & 2) == 0)
742 {
743 if (object_sizes[object_size_type][varno] < orig_bytes)
744 {
745 object_sizes[object_size_type][varno] = orig_bytes;
746 osi->changed = true;
747 }
748 }
749 else
750 {
751 if (object_sizes[object_size_type][varno] > orig_bytes)
752 {
753 object_sizes[object_size_type][varno] = orig_bytes;
754 osi->changed = true;
755 }
756 }
757 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
758 }
759
760
761 /* Compute object_sizes for VAR, defined to the result of an assignment
762 with operator POINTER_PLUS_EXPR. Return true if the object size might
763 need reexamination later. */
764
765 static bool
766 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
767 {
768 int object_size_type = osi->object_size_type;
769 unsigned int varno = SSA_NAME_VERSION (var);
770 unsigned HOST_WIDE_INT bytes;
771 tree op0, op1;
772
773 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
774 {
775 op0 = gimple_assign_rhs1 (stmt);
776 op1 = gimple_assign_rhs2 (stmt);
777 }
778 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
779 {
780 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
781 gcc_assert (TREE_CODE (rhs) == MEM_REF);
782 op0 = TREE_OPERAND (rhs, 0);
783 op1 = TREE_OPERAND (rhs, 1);
784 }
785 else
786 gcc_unreachable ();
787
788 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
789 return false;
790
791 /* Handle PTR + OFFSET here. */
792 if (TREE_CODE (op1) == INTEGER_CST
793 && (TREE_CODE (op0) == SSA_NAME
794 || TREE_CODE (op0) == ADDR_EXPR))
795 {
796 if (! host_integerp (op1, 1))
797 bytes = unknown[object_size_type];
798 else if (TREE_CODE (op0) == SSA_NAME)
799 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
800 else
801 {
802 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
803
804 /* op0 will be ADDR_EXPR here. */
805 bytes = addr_object_size (osi, op0, object_size_type);
806 if (bytes == unknown[object_size_type])
807 ;
808 else if (off > offset_limit)
809 bytes = unknown[object_size_type];
810 else if (off > bytes)
811 bytes = 0;
812 else
813 bytes -= off;
814 }
815 }
816 else
817 bytes = unknown[object_size_type];
818
819 if ((object_size_type & 2) == 0)
820 {
821 if (object_sizes[object_size_type][varno] < bytes)
822 object_sizes[object_size_type][varno] = bytes;
823 }
824 else
825 {
826 if (object_sizes[object_size_type][varno] > bytes)
827 object_sizes[object_size_type][varno] = bytes;
828 }
829 return false;
830 }
831
832
833 /* Compute object_sizes for VAR, defined at STMT, which is
834 a COND_EXPR. Return true if the object size might need reexamination
835 later. */
836
837 static bool
838 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
839 {
840 tree then_, else_;
841 int object_size_type = osi->object_size_type;
842 unsigned int varno = SSA_NAME_VERSION (var);
843 bool reexamine = false;
844
845 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
846
847 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
848 return false;
849
850 then_ = gimple_assign_rhs2 (stmt);
851 else_ = gimple_assign_rhs3 (stmt);
852
853 if (TREE_CODE (then_) == SSA_NAME)
854 reexamine |= merge_object_sizes (osi, var, then_, 0);
855 else
856 expr_object_size (osi, var, then_);
857
858 if (TREE_CODE (else_) == SSA_NAME)
859 reexamine |= merge_object_sizes (osi, var, else_, 0);
860 else
861 expr_object_size (osi, var, else_);
862
863 return reexamine;
864 }
865
866 /* Compute object sizes for VAR.
867 For ADDR_EXPR an object size is the number of remaining bytes
868 to the end of the object (where what is considered an object depends on
869 OSI->object_size_type).
870 For allocation GIMPLE_CALL like malloc or calloc object size is the size
871 of the allocation.
872 For POINTER_PLUS_EXPR where second operand is a constant integer,
873 object size is object size of the first operand minus the constant.
874 If the constant is bigger than the number of remaining bytes until the
875 end of the object, object size is 0, but if it is instead a pointer
876 subtraction, object size is unknown[object_size_type].
877 To differentiate addition from subtraction, ADDR_EXPR returns
878 unknown[object_size_type] for all objects bigger than half of the address
879 space, and constants less than half of the address space are considered
880 addition, while bigger constants subtraction.
881 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
882 object size is object size of that argument.
883 Otherwise, object size is the maximum of object sizes of variables
884 that it might be set to. */
885
886 static void
887 collect_object_sizes_for (struct object_size_info *osi, tree var)
888 {
889 int object_size_type = osi->object_size_type;
890 unsigned int varno = SSA_NAME_VERSION (var);
891 gimple stmt;
892 bool reexamine;
893
894 if (bitmap_bit_p (computed[object_size_type], varno))
895 return;
896
897 if (osi->pass == 0)
898 {
899 if (bitmap_set_bit (osi->visited, varno))
900 {
901 object_sizes[object_size_type][varno]
902 = (object_size_type & 2) ? -1 : 0;
903 }
904 else
905 {
906 /* Found a dependency loop. Mark the variable for later
907 re-examination. */
908 bitmap_set_bit (osi->reexamine, varno);
909 if (dump_file && (dump_flags & TDF_DETAILS))
910 {
911 fprintf (dump_file, "Found a dependency loop at ");
912 print_generic_expr (dump_file, var, dump_flags);
913 fprintf (dump_file, "\n");
914 }
915 return;
916 }
917 }
918
919 if (dump_file && (dump_flags & TDF_DETAILS))
920 {
921 fprintf (dump_file, "Visiting use-def links for ");
922 print_generic_expr (dump_file, var, dump_flags);
923 fprintf (dump_file, "\n");
924 }
925
926 stmt = SSA_NAME_DEF_STMT (var);
927 reexamine = false;
928
929 switch (gimple_code (stmt))
930 {
931 case GIMPLE_ASSIGN:
932 {
933 tree rhs = gimple_assign_rhs1 (stmt);
934 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
935 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
936 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
937 reexamine = plus_stmt_object_size (osi, var, stmt);
938 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
939 reexamine = cond_expr_object_size (osi, var, stmt);
940 else if (gimple_assign_single_p (stmt)
941 || gimple_assign_unary_nop_p (stmt))
942 {
943 if (TREE_CODE (rhs) == SSA_NAME
944 && POINTER_TYPE_P (TREE_TYPE (rhs)))
945 reexamine = merge_object_sizes (osi, var, rhs, 0);
946 else
947 expr_object_size (osi, var, rhs);
948 }
949 else
950 unknown_object_size (osi, var);
951 break;
952 }
953
954 case GIMPLE_CALL:
955 {
956 tree arg = pass_through_call (stmt);
957 if (arg)
958 {
959 if (TREE_CODE (arg) == SSA_NAME
960 && POINTER_TYPE_P (TREE_TYPE (arg)))
961 reexamine = merge_object_sizes (osi, var, arg, 0);
962 else
963 expr_object_size (osi, var, arg);
964 }
965 else
966 call_object_size (osi, var, stmt);
967 break;
968 }
969
970 case GIMPLE_ASM:
971 /* Pointers defined by __asm__ statements can point anywhere. */
972 object_sizes[object_size_type][varno] = unknown[object_size_type];
973 break;
974
975 case GIMPLE_NOP:
976 {
977 tree decl = SSA_NAME_VAR (var);
978
979 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
980 expr_object_size (osi, var, DECL_INITIAL (decl));
981 else
982 expr_object_size (osi, var, decl);
983 }
984 break;
985
986 case GIMPLE_PHI:
987 {
988 unsigned i;
989
990 for (i = 0; i < gimple_phi_num_args (stmt); i++)
991 {
992 tree rhs = gimple_phi_arg (stmt, i)->def;
993
994 if (object_sizes[object_size_type][varno]
995 == unknown[object_size_type])
996 break;
997
998 if (TREE_CODE (rhs) == SSA_NAME)
999 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1000 else if (osi->pass == 0)
1001 expr_object_size (osi, var, rhs);
1002 }
1003 break;
1004 }
1005
1006 default:
1007 gcc_unreachable ();
1008 }
1009
1010 if (! reexamine
1011 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1012 {
1013 bitmap_set_bit (computed[object_size_type], varno);
1014 bitmap_clear_bit (osi->reexamine, varno);
1015 }
1016 else
1017 {
1018 bitmap_set_bit (osi->reexamine, varno);
1019 if (dump_file && (dump_flags & TDF_DETAILS))
1020 {
1021 fprintf (dump_file, "Need to reexamine ");
1022 print_generic_expr (dump_file, var, dump_flags);
1023 fprintf (dump_file, "\n");
1024 }
1025 }
1026 }
1027
1028
1029 /* Helper function for check_for_plus_in_loops. Called recursively
1030 to detect loops. */
1031
1032 static void
1033 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1034 unsigned int depth)
1035 {
1036 gimple stmt = SSA_NAME_DEF_STMT (var);
1037 unsigned int varno = SSA_NAME_VERSION (var);
1038
1039 if (osi->depths[varno])
1040 {
1041 if (osi->depths[varno] != depth)
1042 {
1043 unsigned int *sp;
1044
1045 /* Found a loop involving pointer addition. */
1046 for (sp = osi->tos; sp > osi->stack; )
1047 {
1048 --sp;
1049 bitmap_clear_bit (osi->reexamine, *sp);
1050 bitmap_set_bit (computed[osi->object_size_type], *sp);
1051 object_sizes[osi->object_size_type][*sp] = 0;
1052 if (*sp == varno)
1053 break;
1054 }
1055 }
1056 return;
1057 }
1058 else if (! bitmap_bit_p (osi->reexamine, varno))
1059 return;
1060
1061 osi->depths[varno] = depth;
1062 *osi->tos++ = varno;
1063
1064 switch (gimple_code (stmt))
1065 {
1066
1067 case GIMPLE_ASSIGN:
1068 {
1069 if ((gimple_assign_single_p (stmt)
1070 || gimple_assign_unary_nop_p (stmt))
1071 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1072 {
1073 tree rhs = gimple_assign_rhs1 (stmt);
1074
1075 check_for_plus_in_loops_1 (osi, rhs, depth);
1076 }
1077 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1078 {
1079 tree basevar = gimple_assign_rhs1 (stmt);
1080 tree cst = gimple_assign_rhs2 (stmt);
1081
1082 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1083
1084 check_for_plus_in_loops_1 (osi, basevar,
1085 depth + !integer_zerop (cst));
1086 }
1087 else
1088 gcc_unreachable ();
1089 break;
1090 }
1091
1092 case GIMPLE_CALL:
1093 {
1094 tree arg = pass_through_call (stmt);
1095 if (arg)
1096 {
1097 if (TREE_CODE (arg) == SSA_NAME)
1098 check_for_plus_in_loops_1 (osi, arg, depth);
1099 else
1100 gcc_unreachable ();
1101 }
1102 break;
1103 }
1104
1105 case GIMPLE_PHI:
1106 {
1107 unsigned i;
1108
1109 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1110 {
1111 tree rhs = gimple_phi_arg (stmt, i)->def;
1112
1113 if (TREE_CODE (rhs) == SSA_NAME)
1114 check_for_plus_in_loops_1 (osi, rhs, depth);
1115 }
1116 break;
1117 }
1118
1119 default:
1120 gcc_unreachable ();
1121 }
1122
1123 osi->depths[varno] = 0;
1124 osi->tos--;
1125 }
1126
1127
1128 /* Check if some pointer we are computing object size of is being increased
1129 within a loop. If yes, assume all the SSA variables participating in
1130 that loop have minimum object sizes 0. */
1131
1132 static void
1133 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1134 {
1135 gimple stmt = SSA_NAME_DEF_STMT (var);
1136
1137 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1138 and looked for a POINTER_PLUS_EXPR in the pass-through
1139 argument, if any. In GIMPLE, however, such an expression
1140 is not a valid call operand. */
1141
1142 if (is_gimple_assign (stmt)
1143 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1144 {
1145 tree basevar = gimple_assign_rhs1 (stmt);
1146 tree cst = gimple_assign_rhs2 (stmt);
1147
1148 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1149
1150 if (integer_zerop (cst))
1151 return;
1152
1153 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1154 *osi->tos++ = SSA_NAME_VERSION (basevar);
1155 check_for_plus_in_loops_1 (osi, var, 2);
1156 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1157 osi->tos--;
1158 }
1159 }
1160
1161
1162 /* Initialize data structures for the object size computation. */
1163
1164 void
1165 init_object_sizes (void)
1166 {
1167 int object_size_type;
1168
1169 if (object_sizes[0])
1170 return;
1171
1172 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1173 {
1174 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1175 computed[object_size_type] = BITMAP_ALLOC (NULL);
1176 }
1177
1178 init_offset_limit ();
1179 }
1180
1181
1182 /* Destroy data structures after the object size computation. */
1183
1184 void
1185 fini_object_sizes (void)
1186 {
1187 int object_size_type;
1188
1189 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1190 {
1191 free (object_sizes[object_size_type]);
1192 BITMAP_FREE (computed[object_size_type]);
1193 object_sizes[object_size_type] = NULL;
1194 }
1195 }
1196
1197
1198 /* Simple pass to optimize all __builtin_object_size () builtins. */
1199
1200 static unsigned int
1201 compute_object_sizes (void)
1202 {
1203 basic_block bb;
1204 FOR_EACH_BB (bb)
1205 {
1206 gimple_stmt_iterator i;
1207 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1208 {
1209 tree callee, result;
1210 gimple call = gsi_stmt (i);
1211
1212 if (gimple_code (call) != GIMPLE_CALL)
1213 continue;
1214
1215 callee = gimple_call_fndecl (call);
1216 if (!callee
1217 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1218 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1219 continue;
1220
1221 init_object_sizes ();
1222 result = fold_call_stmt (call, false);
1223 if (!result)
1224 {
1225 if (gimple_call_num_args (call) == 2
1226 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1227 {
1228 tree ost = gimple_call_arg (call, 1);
1229
1230 if (host_integerp (ost, 1))
1231 {
1232 unsigned HOST_WIDE_INT object_size_type
1233 = tree_low_cst (ost, 1);
1234
1235 if (object_size_type < 2)
1236 result = fold_convert (size_type_node,
1237 integer_minus_one_node);
1238 else if (object_size_type < 4)
1239 result = build_zero_cst (size_type_node);
1240 }
1241 }
1242
1243 if (!result)
1244 continue;
1245 }
1246
1247 if (dump_file && (dump_flags & TDF_DETAILS))
1248 {
1249 fprintf (dump_file, "Simplified\n ");
1250 print_gimple_stmt (dump_file, call, 0, dump_flags);
1251 }
1252
1253 if (!update_call_from_tree (&i, result))
1254 gcc_unreachable ();
1255
1256 if (dump_file && (dump_flags & TDF_DETAILS))
1257 {
1258 fprintf (dump_file, "to\n ");
1259 print_gimple_stmt (dump_file, gsi_stmt (i), 0, dump_flags);
1260 fprintf (dump_file, "\n");
1261 }
1262 }
1263 }
1264
1265 fini_object_sizes ();
1266 return 0;
1267 }
1268
1269 struct gimple_opt_pass pass_object_sizes =
1270 {
1271 {
1272 GIMPLE_PASS,
1273 "objsz", /* name */
1274 NULL, /* gate */
1275 compute_object_sizes, /* execute */
1276 NULL, /* sub */
1277 NULL, /* next */
1278 0, /* static_pass_number */
1279 TV_NONE, /* tv_id */
1280 PROP_cfg | PROP_ssa, /* properties_required */
1281 0, /* properties_provided */
1282 0, /* properties_destroyed */
1283 0, /* todo_flags_start */
1284 TODO_verify_ssa /* todo_flags_finish */
1285 }
1286 };