tree-data-ref.c (subscript_dependence_tester_1): Call free_conflict_function.
[gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "toplev.h"
27 #include "diagnostic.h"
28 #include "tree-flow.h"
29 #include "tree-pass.h"
30 #include "tree-ssa-propagate.h"
31
32 struct object_size_info
33 {
34 int object_size_type;
35 bitmap visited, reexamine;
36 int pass;
37 bool changed;
38 unsigned int *depths;
39 unsigned int *stack, *tos;
40 };
41
42 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
43
44 static tree compute_object_offset (const_tree, const_tree);
45 static unsigned HOST_WIDE_INT addr_object_size (const_tree, int);
46 static unsigned HOST_WIDE_INT alloc_object_size (const_tree, int);
47 static tree pass_through_call (const_tree);
48 static void collect_object_sizes_for (struct object_size_info *, tree);
49 static void expr_object_size (struct object_size_info *, tree, tree);
50 static bool merge_object_sizes (struct object_size_info *, tree, tree,
51 unsigned HOST_WIDE_INT);
52 static bool plus_expr_object_size (struct object_size_info *, tree, tree);
53 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
54 static unsigned int compute_object_sizes (void);
55 static void init_offset_limit (void);
56 static void check_for_plus_in_loops (struct object_size_info *, tree);
57 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
58 unsigned int);
59
60 /* object_sizes[0] is upper bound for number of bytes till the end of
61 the object.
62 object_sizes[1] is upper bound for number of bytes till the end of
63 the subobject (innermost array or field with address taken).
64 object_sizes[2] is lower bound for number of bytes till the end of
65 the object and object_sizes[3] lower bound for subobject. */
66 static unsigned HOST_WIDE_INT *object_sizes[4];
67
68 /* Bitmaps what object sizes have been computed already. */
69 static bitmap computed[4];
70
71 /* Maximum value of offset we consider to be addition. */
72 static unsigned HOST_WIDE_INT offset_limit;
73
74
75 /* Initialize OFFSET_LIMIT variable. */
76 static void
77 init_offset_limit (void)
78 {
79 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
80 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
81 else
82 offset_limit = -1;
83 offset_limit /= 2;
84 }
85
86
87 /* Compute offset of EXPR within VAR. Return error_mark_node
88 if unknown. */
89
90 static tree
91 compute_object_offset (const_tree expr, const_tree var)
92 {
93 enum tree_code code = PLUS_EXPR;
94 tree base, off, t;
95
96 if (expr == var)
97 return size_zero_node;
98
99 switch (TREE_CODE (expr))
100 {
101 case COMPONENT_REF:
102 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
103 if (base == error_mark_node)
104 return base;
105
106 t = TREE_OPERAND (expr, 1);
107 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
108 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
109 / BITS_PER_UNIT));
110 break;
111
112 case REALPART_EXPR:
113 case NOP_EXPR:
114 case CONVERT_EXPR:
115 case VIEW_CONVERT_EXPR:
116 case NON_LVALUE_EXPR:
117 return compute_object_offset (TREE_OPERAND (expr, 0), var);
118
119 case IMAGPART_EXPR:
120 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
121 if (base == error_mark_node)
122 return base;
123
124 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
125 break;
126
127 case ARRAY_REF:
128 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129 if (base == error_mark_node)
130 return base;
131
132 t = TREE_OPERAND (expr, 1);
133 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
134 {
135 code = MINUS_EXPR;
136 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
137 }
138 t = fold_convert (sizetype, t);
139 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
140 break;
141
142 default:
143 return error_mark_node;
144 }
145
146 return size_binop (code, base, off);
147 }
148
149
150 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
151 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
152 If unknown, return unknown[object_size_type]. */
153
154 static unsigned HOST_WIDE_INT
155 addr_object_size (const_tree ptr, int object_size_type)
156 {
157 tree pt_var;
158
159 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
160
161 pt_var = TREE_OPERAND (ptr, 0);
162 if (REFERENCE_CLASS_P (pt_var))
163 pt_var = get_base_address (pt_var);
164
165 if (pt_var
166 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
167 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
168 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
169 && (unsigned HOST_WIDE_INT)
170 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1) < offset_limit)
171 {
172 tree bytes;
173
174 if (pt_var != TREE_OPERAND (ptr, 0))
175 {
176 tree var;
177
178 if (object_size_type & 1)
179 {
180 var = TREE_OPERAND (ptr, 0);
181
182 while (var != pt_var
183 && TREE_CODE (var) != BIT_FIELD_REF
184 && TREE_CODE (var) != COMPONENT_REF
185 && TREE_CODE (var) != ARRAY_REF
186 && TREE_CODE (var) != ARRAY_RANGE_REF
187 && TREE_CODE (var) != REALPART_EXPR
188 && TREE_CODE (var) != IMAGPART_EXPR)
189 var = TREE_OPERAND (var, 0);
190 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
191 var = TREE_OPERAND (var, 0);
192 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
193 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
194 || tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)),
195 TYPE_SIZE_UNIT (TREE_TYPE (var))))
196 var = pt_var;
197 }
198 else
199 var = pt_var;
200
201 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
202 if (bytes != error_mark_node)
203 {
204 if (TREE_CODE (bytes) == INTEGER_CST
205 && tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes))
206 bytes = size_zero_node;
207 else
208 bytes = size_binop (MINUS_EXPR,
209 TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes);
210 }
211 }
212 else
213 bytes = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
214
215 if (host_integerp (bytes, 1))
216 return tree_low_cst (bytes, 1);
217 }
218
219 return unknown[object_size_type];
220 }
221
222
223 /* Compute __builtin_object_size for CALL, which is a CALL_EXPR.
224 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
225 argument from __builtin_object_size. If unknown, return
226 unknown[object_size_type]. */
227
228 static unsigned HOST_WIDE_INT
229 alloc_object_size (const_tree call, int object_size_type)
230 {
231 tree callee, bytes = NULL_TREE;
232 tree alloc_size;
233 int arg1 = -1, arg2 = -1;
234
235 gcc_assert (TREE_CODE (call) == CALL_EXPR);
236
237 callee = get_callee_fndecl (call);
238 if (!callee)
239 return unknown[object_size_type];
240
241 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
242 if (alloc_size && TREE_VALUE (alloc_size))
243 {
244 tree p = TREE_VALUE (alloc_size);
245
246 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
247 if (TREE_CHAIN (p))
248 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
249 }
250
251 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
252 switch (DECL_FUNCTION_CODE (callee))
253 {
254 case BUILT_IN_CALLOC:
255 arg2 = 1;
256 /* fall through */
257 case BUILT_IN_MALLOC:
258 case BUILT_IN_ALLOCA:
259 arg1 = 0;
260 default:
261 break;
262 }
263
264 if (arg1 < 0 || arg1 >= call_expr_nargs (call)
265 || TREE_CODE (CALL_EXPR_ARG (call, arg1)) != INTEGER_CST
266 || (arg2 >= 0
267 && (arg2 >= call_expr_nargs (call)
268 || TREE_CODE (CALL_EXPR_ARG (call, arg2)) != INTEGER_CST)))
269 return unknown[object_size_type];
270
271 if (arg2 >= 0)
272 bytes = size_binop (MULT_EXPR,
273 fold_convert (sizetype, CALL_EXPR_ARG (call, arg1)),
274 fold_convert (sizetype, CALL_EXPR_ARG (call, arg2)));
275 else if (arg1 >= 0)
276 bytes = fold_convert (sizetype, CALL_EXPR_ARG (call, arg1));
277
278 if (bytes && host_integerp (bytes, 1))
279 return tree_low_cst (bytes, 1);
280
281 return unknown[object_size_type];
282 }
283
284
285 /* If object size is propagated from one of function's arguments directly
286 to its return value, return that argument for CALL_EXPR CALL.
287 Otherwise return NULL. */
288
289 static tree
290 pass_through_call (const_tree call)
291 {
292 tree callee = get_callee_fndecl (call);
293
294 if (callee
295 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
296 switch (DECL_FUNCTION_CODE (callee))
297 {
298 case BUILT_IN_MEMCPY:
299 case BUILT_IN_MEMMOVE:
300 case BUILT_IN_MEMSET:
301 case BUILT_IN_STRCPY:
302 case BUILT_IN_STRNCPY:
303 case BUILT_IN_STRCAT:
304 case BUILT_IN_STRNCAT:
305 case BUILT_IN_MEMCPY_CHK:
306 case BUILT_IN_MEMMOVE_CHK:
307 case BUILT_IN_MEMSET_CHK:
308 case BUILT_IN_STRCPY_CHK:
309 case BUILT_IN_STRNCPY_CHK:
310 case BUILT_IN_STRCAT_CHK:
311 case BUILT_IN_STRNCAT_CHK:
312 if (call_expr_nargs (call) >= 1)
313 return CALL_EXPR_ARG (call, 0);
314 break;
315 default:
316 break;
317 }
318
319 return NULL_TREE;
320 }
321
322
323 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
324 second argument from __builtin_object_size. */
325
326 unsigned HOST_WIDE_INT
327 compute_builtin_object_size (tree ptr, int object_size_type)
328 {
329 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
330
331 if (! offset_limit)
332 init_offset_limit ();
333
334 if (TREE_CODE (ptr) == ADDR_EXPR)
335 return addr_object_size (ptr, object_size_type);
336 else if (TREE_CODE (ptr) == CALL_EXPR)
337 {
338 tree arg = pass_through_call (ptr);
339
340 if (arg)
341 return compute_builtin_object_size (arg, object_size_type);
342 else
343 return alloc_object_size (ptr, object_size_type);
344 }
345 else if (TREE_CODE (ptr) == SSA_NAME
346 && POINTER_TYPE_P (TREE_TYPE (ptr))
347 && object_sizes[object_size_type] != NULL)
348 {
349 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
350 {
351 struct object_size_info osi;
352 bitmap_iterator bi;
353 unsigned int i;
354
355 if (dump_file)
356 {
357 fprintf (dump_file, "Computing %s %sobject size for ",
358 (object_size_type & 2) ? "minimum" : "maximum",
359 (object_size_type & 1) ? "sub" : "");
360 print_generic_expr (dump_file, ptr, dump_flags);
361 fprintf (dump_file, ":\n");
362 }
363
364 osi.visited = BITMAP_ALLOC (NULL);
365 osi.reexamine = BITMAP_ALLOC (NULL);
366 osi.object_size_type = object_size_type;
367 osi.depths = NULL;
368 osi.stack = NULL;
369 osi.tos = NULL;
370
371 /* First pass: walk UD chains, compute object sizes that
372 can be computed. osi.reexamine bitmap at the end will
373 contain what variables were found in dependency cycles
374 and therefore need to be reexamined. */
375 osi.pass = 0;
376 osi.changed = false;
377 collect_object_sizes_for (&osi, ptr);
378
379 /* Second pass: keep recomputing object sizes of variables
380 that need reexamination, until no object sizes are
381 increased or all object sizes are computed. */
382 if (! bitmap_empty_p (osi.reexamine))
383 {
384 bitmap reexamine = BITMAP_ALLOC (NULL);
385
386 /* If looking for minimum instead of maximum object size,
387 detect cases where a pointer is increased in a loop.
388 Although even without this detection pass 2 would eventually
389 terminate, it could take a long time. If a pointer is
390 increasing this way, we need to assume 0 object size.
391 E.g. p = &buf[0]; while (cond) p = p + 4; */
392 if (object_size_type & 2)
393 {
394 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
395 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
396 osi.tos = osi.stack;
397 osi.pass = 1;
398 /* collect_object_sizes_for is changing
399 osi.reexamine bitmap, so iterate over a copy. */
400 bitmap_copy (reexamine, osi.reexamine);
401 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
402 if (bitmap_bit_p (osi.reexamine, i))
403 check_for_plus_in_loops (&osi, ssa_name (i));
404
405 free (osi.depths);
406 osi.depths = NULL;
407 free (osi.stack);
408 osi.stack = NULL;
409 osi.tos = NULL;
410 }
411
412 do
413 {
414 osi.pass = 2;
415 osi.changed = false;
416 /* collect_object_sizes_for is changing
417 osi.reexamine bitmap, so iterate over a copy. */
418 bitmap_copy (reexamine, osi.reexamine);
419 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
420 if (bitmap_bit_p (osi.reexamine, i))
421 {
422 collect_object_sizes_for (&osi, ssa_name (i));
423 if (dump_file && (dump_flags & TDF_DETAILS))
424 {
425 fprintf (dump_file, "Reexamining ");
426 print_generic_expr (dump_file, ssa_name (i),
427 dump_flags);
428 fprintf (dump_file, "\n");
429 }
430 }
431 }
432 while (osi.changed);
433
434 BITMAP_FREE (reexamine);
435 }
436 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
437 bitmap_set_bit (computed[object_size_type], i);
438
439 /* Debugging dumps. */
440 if (dump_file)
441 {
442 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
443 if (object_sizes[object_size_type][i]
444 != unknown[object_size_type])
445 {
446 print_generic_expr (dump_file, ssa_name (i),
447 dump_flags);
448 fprintf (dump_file,
449 ": %s %sobject size "
450 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
451 (object_size_type & 2) ? "minimum" : "maximum",
452 (object_size_type & 1) ? "sub" : "",
453 object_sizes[object_size_type][i]);
454 }
455 }
456
457 BITMAP_FREE (osi.reexamine);
458 BITMAP_FREE (osi.visited);
459 }
460
461 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
462 }
463
464 return unknown[object_size_type];
465 }
466
467
468 /* Compute object_sizes for PTR, defined to VALUE, which is not
469 a SSA_NAME. */
470
471 static void
472 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
473 {
474 int object_size_type = osi->object_size_type;
475 unsigned int varno = SSA_NAME_VERSION (ptr);
476 unsigned HOST_WIDE_INT bytes;
477
478 gcc_assert (object_sizes[object_size_type][varno]
479 != unknown[object_size_type]);
480 gcc_assert (osi->pass == 0);
481
482 if (TREE_CODE (value) == WITH_SIZE_EXPR)
483 value = TREE_OPERAND (value, 0);
484
485 /* Pointer variables should have been handled by merge_object_sizes. */
486 gcc_assert (TREE_CODE (value) != SSA_NAME
487 || !POINTER_TYPE_P (TREE_TYPE (value)));
488
489 if (TREE_CODE (value) == ADDR_EXPR)
490 bytes = addr_object_size (value, object_size_type);
491 else if (TREE_CODE (value) == CALL_EXPR)
492 bytes = alloc_object_size (value, object_size_type);
493 else
494 bytes = unknown[object_size_type];
495
496 if ((object_size_type & 2) == 0)
497 {
498 if (object_sizes[object_size_type][varno] < bytes)
499 object_sizes[object_size_type][varno] = bytes;
500 }
501 else
502 {
503 if (object_sizes[object_size_type][varno] > bytes)
504 object_sizes[object_size_type][varno] = bytes;
505 }
506 }
507
508
509 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
510 the object size might need reexamination later. */
511
512 static bool
513 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
514 unsigned HOST_WIDE_INT offset)
515 {
516 int object_size_type = osi->object_size_type;
517 unsigned int varno = SSA_NAME_VERSION (dest);
518 unsigned HOST_WIDE_INT orig_bytes;
519
520 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
521 return false;
522 if (offset >= offset_limit)
523 {
524 object_sizes[object_size_type][varno] = unknown[object_size_type];
525 return false;
526 }
527
528 if (osi->pass == 0)
529 collect_object_sizes_for (osi, orig);
530
531 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
532 if (orig_bytes != unknown[object_size_type])
533 orig_bytes = (offset > orig_bytes)
534 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
535
536 if ((object_size_type & 2) == 0)
537 {
538 if (object_sizes[object_size_type][varno] < orig_bytes)
539 {
540 object_sizes[object_size_type][varno] = orig_bytes;
541 osi->changed = true;
542 }
543 }
544 else
545 {
546 if (object_sizes[object_size_type][varno] > orig_bytes)
547 {
548 object_sizes[object_size_type][varno] = orig_bytes;
549 osi->changed = true;
550 }
551 }
552 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
553 }
554
555
556 /* Compute object_sizes for PTR, defined to VALUE, which is
557 a POINTER_PLUS_EXPR. Return true if the object size might need reexamination
558 later. */
559
560 static bool
561 plus_expr_object_size (struct object_size_info *osi, tree var, tree value)
562 {
563 tree op0 = TREE_OPERAND (value, 0);
564 tree op1 = TREE_OPERAND (value, 1);
565 int object_size_type = osi->object_size_type;
566 unsigned int varno = SSA_NAME_VERSION (var);
567 unsigned HOST_WIDE_INT bytes;
568
569 gcc_assert (TREE_CODE (value) == POINTER_PLUS_EXPR);
570
571 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
572 return false;
573
574 /* Handle PTR + OFFSET here. */
575 if (TREE_CODE (op1) == INTEGER_CST
576 && (TREE_CODE (op0) == SSA_NAME
577 || TREE_CODE (op0) == ADDR_EXPR))
578 {
579 if (! host_integerp (op1, 1))
580 bytes = unknown[object_size_type];
581 else if (TREE_CODE (op0) == SSA_NAME)
582 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
583 else
584 {
585 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
586
587 bytes = compute_builtin_object_size (op0, object_size_type);
588 if (bytes == unknown[object_size_type])
589 ;
590 else if (off > offset_limit)
591 bytes = unknown[object_size_type];
592 else if (off > bytes)
593 bytes = 0;
594 else
595 bytes -= off;
596 }
597 }
598 else
599 bytes = unknown[object_size_type];
600
601 if ((object_size_type & 2) == 0)
602 {
603 if (object_sizes[object_size_type][varno] < bytes)
604 object_sizes[object_size_type][varno] = bytes;
605 }
606 else
607 {
608 if (object_sizes[object_size_type][varno] > bytes)
609 object_sizes[object_size_type][varno] = bytes;
610 }
611 return false;
612 }
613
614
615 /* Compute object_sizes for PTR, defined to VALUE, which is
616 a COND_EXPR. Return true if the object size might need reexamination
617 later. */
618
619 static bool
620 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
621 {
622 tree then_, else_;
623 int object_size_type = osi->object_size_type;
624 unsigned int varno = SSA_NAME_VERSION (var);
625 bool reexamine = false;
626
627 gcc_assert (TREE_CODE (value) == COND_EXPR);
628
629 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
630 return false;
631
632 then_ = COND_EXPR_THEN (value);
633 else_ = COND_EXPR_ELSE (value);
634
635 if (TREE_CODE (then_) == SSA_NAME)
636 reexamine |= merge_object_sizes (osi, var, then_, 0);
637 else
638 expr_object_size (osi, var, then_);
639
640 if (TREE_CODE (else_) == SSA_NAME)
641 reexamine |= merge_object_sizes (osi, var, else_, 0);
642 else
643 expr_object_size (osi, var, else_);
644
645 return reexamine;
646 }
647
648
649 /* Compute object sizes for VAR.
650 For ADDR_EXPR an object size is the number of remaining bytes
651 to the end of the object (where what is considered an object depends on
652 OSI->object_size_type).
653 For allocation CALL_EXPR like malloc or calloc object size is the size
654 of the allocation.
655 For POINTER_PLUS_EXPR where second operand is a constant integer,
656 object size is object size of the first operand minus the constant.
657 If the constant is bigger than the number of remaining bytes until the
658 end of the object, object size is 0, but if it is instead a pointer
659 subtraction, object size is unknown[object_size_type].
660 To differentiate addition from subtraction, ADDR_EXPR returns
661 unknown[object_size_type] for all objects bigger than half of the address
662 space, and constants less than half of the address space are considered
663 addition, while bigger constants subtraction.
664 For a memcpy like CALL_EXPR that always returns one of its arguments, the
665 object size is object size of that argument.
666 Otherwise, object size is the maximum of object sizes of variables
667 that it might be set to. */
668
669 static void
670 collect_object_sizes_for (struct object_size_info *osi, tree var)
671 {
672 int object_size_type = osi->object_size_type;
673 unsigned int varno = SSA_NAME_VERSION (var);
674 tree stmt;
675 bool reexamine;
676
677 if (bitmap_bit_p (computed[object_size_type], varno))
678 return;
679
680 if (osi->pass == 0)
681 {
682 if (! bitmap_bit_p (osi->visited, varno))
683 {
684 bitmap_set_bit (osi->visited, varno);
685 object_sizes[object_size_type][varno]
686 = (object_size_type & 2) ? -1 : 0;
687 }
688 else
689 {
690 /* Found a dependency loop. Mark the variable for later
691 re-examination. */
692 bitmap_set_bit (osi->reexamine, varno);
693 if (dump_file && (dump_flags & TDF_DETAILS))
694 {
695 fprintf (dump_file, "Found a dependency loop at ");
696 print_generic_expr (dump_file, var, dump_flags);
697 fprintf (dump_file, "\n");
698 }
699 return;
700 }
701 }
702
703 if (dump_file && (dump_flags & TDF_DETAILS))
704 {
705 fprintf (dump_file, "Visiting use-def links for ");
706 print_generic_expr (dump_file, var, dump_flags);
707 fprintf (dump_file, "\n");
708 }
709
710 stmt = SSA_NAME_DEF_STMT (var);
711 reexamine = false;
712
713 switch (TREE_CODE (stmt))
714 {
715 case RETURN_EXPR:
716 gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT);
717 stmt = TREE_OPERAND (stmt, 0);
718 /* FALLTHRU */
719
720 case GIMPLE_MODIFY_STMT:
721 {
722 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), arg;
723 STRIP_NOPS (rhs);
724
725 if (TREE_CODE (rhs) == CALL_EXPR)
726 {
727 arg = pass_through_call (rhs);
728 if (arg)
729 rhs = arg;
730 }
731
732 if (TREE_CODE (rhs) == SSA_NAME
733 && POINTER_TYPE_P (TREE_TYPE (rhs)))
734 reexamine = merge_object_sizes (osi, var, rhs, 0);
735
736 else if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
737 reexamine = plus_expr_object_size (osi, var, rhs);
738
739 else if (TREE_CODE (rhs) == COND_EXPR)
740 reexamine = cond_expr_object_size (osi, var, rhs);
741
742 else
743 expr_object_size (osi, var, rhs);
744 break;
745 }
746
747 case ASM_EXPR:
748 /* Pointers defined by __asm__ statements can point anywhere. */
749 object_sizes[object_size_type][varno] = unknown[object_size_type];
750 break;
751
752 case NOP_EXPR:
753 {
754 tree decl = SSA_NAME_VAR (var);
755
756 gcc_assert (IS_EMPTY_STMT (stmt));
757
758 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
759 expr_object_size (osi, var, DECL_INITIAL (decl));
760 else
761 expr_object_size (osi, var, decl);
762 }
763 break;
764
765 case PHI_NODE:
766 {
767 int i;
768
769 for (i = 0; i < PHI_NUM_ARGS (stmt); i++)
770 {
771 tree rhs = PHI_ARG_DEF (stmt, i);
772
773 if (object_sizes[object_size_type][varno]
774 == unknown[object_size_type])
775 break;
776
777 if (TREE_CODE (rhs) == SSA_NAME)
778 reexamine |= merge_object_sizes (osi, var, rhs, 0);
779 else if (osi->pass == 0)
780 expr_object_size (osi, var, rhs);
781 }
782 break;
783 }
784 default:
785 gcc_unreachable ();
786 }
787
788 if (! reexamine
789 || object_sizes[object_size_type][varno] == unknown[object_size_type])
790 {
791 bitmap_set_bit (computed[object_size_type], varno);
792 bitmap_clear_bit (osi->reexamine, varno);
793 }
794 else
795 {
796 bitmap_set_bit (osi->reexamine, varno);
797 if (dump_file && (dump_flags & TDF_DETAILS))
798 {
799 fprintf (dump_file, "Need to reexamine ");
800 print_generic_expr (dump_file, var, dump_flags);
801 fprintf (dump_file, "\n");
802 }
803 }
804 }
805
806
807 /* Helper function for check_for_plus_in_loops. Called recursively
808 to detect loops. */
809
810 static void
811 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
812 unsigned int depth)
813 {
814 tree stmt = SSA_NAME_DEF_STMT (var);
815 unsigned int varno = SSA_NAME_VERSION (var);
816
817 if (osi->depths[varno])
818 {
819 if (osi->depths[varno] != depth)
820 {
821 unsigned int *sp;
822
823 /* Found a loop involving pointer addition. */
824 for (sp = osi->tos; sp > osi->stack; )
825 {
826 --sp;
827 bitmap_clear_bit (osi->reexamine, *sp);
828 bitmap_set_bit (computed[osi->object_size_type], *sp);
829 object_sizes[osi->object_size_type][*sp] = 0;
830 if (*sp == varno)
831 break;
832 }
833 }
834 return;
835 }
836 else if (! bitmap_bit_p (osi->reexamine, varno))
837 return;
838
839 osi->depths[varno] = depth;
840 *osi->tos++ = varno;
841
842 switch (TREE_CODE (stmt))
843 {
844 case RETURN_EXPR:
845 gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT);
846 stmt = TREE_OPERAND (stmt, 0);
847 /* FALLTHRU */
848
849 case GIMPLE_MODIFY_STMT:
850 {
851 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), arg;
852 STRIP_NOPS (rhs);
853
854 if (TREE_CODE (rhs) == CALL_EXPR)
855 {
856 arg = pass_through_call (rhs);
857 if (arg)
858 rhs = arg;
859 }
860
861 if (TREE_CODE (rhs) == SSA_NAME)
862 check_for_plus_in_loops_1 (osi, rhs, depth);
863 else if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
864 {
865 tree op0 = TREE_OPERAND (rhs, 0);
866 tree op1 = TREE_OPERAND (rhs, 1);
867 tree cst, basevar;
868
869 basevar = op0;
870 cst = op1;
871 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
872
873 check_for_plus_in_loops_1 (osi, basevar,
874 depth + !integer_zerop (cst));
875 }
876 else
877 gcc_unreachable ();
878 break;
879 }
880 case PHI_NODE:
881 {
882 int i;
883
884 for (i = 0; i < PHI_NUM_ARGS (stmt); i++)
885 {
886 tree rhs = PHI_ARG_DEF (stmt, i);
887
888 if (TREE_CODE (rhs) == SSA_NAME)
889 check_for_plus_in_loops_1 (osi, rhs, depth);
890 }
891 break;
892 }
893 default:
894 gcc_unreachable ();
895 }
896
897 osi->depths[varno] = 0;
898 osi->tos--;
899 }
900
901
902 /* Check if some pointer we are computing object size of is being increased
903 within a loop. If yes, assume all the SSA variables participating in
904 that loop have minimum object sizes 0. */
905
906 static void
907 check_for_plus_in_loops (struct object_size_info *osi, tree var)
908 {
909 tree stmt = SSA_NAME_DEF_STMT (var);
910
911 switch (TREE_CODE (stmt))
912 {
913 case RETURN_EXPR:
914 gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT);
915 stmt = TREE_OPERAND (stmt, 0);
916 /* FALLTHRU */
917
918 case GIMPLE_MODIFY_STMT:
919 {
920 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), arg;
921 STRIP_NOPS (rhs);
922
923 if (TREE_CODE (rhs) == CALL_EXPR)
924 {
925 arg = pass_through_call (rhs);
926 if (arg)
927 rhs = arg;
928 }
929
930 if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
931 {
932 tree op0 = TREE_OPERAND (rhs, 0);
933 tree op1 = TREE_OPERAND (rhs, 1);
934 tree cst, basevar;
935
936 basevar = op0;
937 cst = op1;
938 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
939
940 if (integer_zerop (cst))
941 break;
942
943 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
944 *osi->tos++ = SSA_NAME_VERSION (basevar);
945 check_for_plus_in_loops_1 (osi, var, 2);
946 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
947 osi->tos--;
948 }
949 break;
950 }
951 default:
952 break;
953 }
954 }
955
956
957 /* Initialize data structures for the object size computation. */
958
959 void
960 init_object_sizes (void)
961 {
962 int object_size_type;
963
964 if (object_sizes[0])
965 return;
966
967 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
968 {
969 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
970 computed[object_size_type] = BITMAP_ALLOC (NULL);
971 }
972
973 init_offset_limit ();
974 }
975
976
977 /* Destroy data structures after the object size computation. */
978
979 void
980 fini_object_sizes (void)
981 {
982 int object_size_type;
983
984 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
985 {
986 free (object_sizes[object_size_type]);
987 BITMAP_FREE (computed[object_size_type]);
988 object_sizes[object_size_type] = NULL;
989 }
990 }
991
992
993 /* Simple pass to optimize all __builtin_object_size () builtins. */
994
995 static unsigned int
996 compute_object_sizes (void)
997 {
998 basic_block bb;
999 FOR_EACH_BB (bb)
1000 {
1001 block_stmt_iterator i;
1002 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1003 {
1004 tree *stmtp = bsi_stmt_ptr (i);
1005 tree call = get_rhs (*stmtp);
1006 tree callee, result;
1007
1008 if (!call || TREE_CODE (call) != CALL_EXPR)
1009 continue;
1010
1011 callee = get_callee_fndecl (call);
1012 if (!callee
1013 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1014 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1015 continue;
1016
1017 init_object_sizes ();
1018 result = fold_call_expr (call, false);
1019 if (!result)
1020 {
1021 if (call_expr_nargs (call) == 2
1022 && POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (call, 0))))
1023 {
1024 tree ost = CALL_EXPR_ARG (call, 1);
1025
1026 if (host_integerp (ost, 1))
1027 {
1028 unsigned HOST_WIDE_INT object_size_type
1029 = tree_low_cst (ost, 1);
1030
1031 if (object_size_type < 2)
1032 result = fold_convert (size_type_node,
1033 integer_minus_one_node);
1034 else if (object_size_type < 4)
1035 result = size_zero_node;
1036 }
1037 }
1038
1039 if (!result)
1040 continue;
1041 }
1042
1043 if (dump_file && (dump_flags & TDF_DETAILS))
1044 {
1045 fprintf (dump_file, "Simplified\n ");
1046 print_generic_stmt (dump_file, *stmtp, dump_flags);
1047 }
1048
1049 if (!set_rhs (stmtp, result))
1050 gcc_unreachable ();
1051 update_stmt (*stmtp);
1052
1053 if (dump_file && (dump_flags & TDF_DETAILS))
1054 {
1055 fprintf (dump_file, "to\n ");
1056 print_generic_stmt (dump_file, *stmtp, dump_flags);
1057 fprintf (dump_file, "\n");
1058 }
1059 }
1060 }
1061
1062 fini_object_sizes ();
1063 return 0;
1064 }
1065
1066 struct tree_opt_pass pass_object_sizes =
1067 {
1068 "objsz", /* name */
1069 NULL, /* gate */
1070 compute_object_sizes, /* execute */
1071 NULL, /* sub */
1072 NULL, /* next */
1073 0, /* static_pass_number */
1074 0, /* tv_id */
1075 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1076 0, /* properties_provided */
1077 0, /* properties_destroyed */
1078 0, /* todo_flags_start */
1079 TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
1080 0 /* letter */
1081 };