IA MCU psABI support: changes to libraries
[gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "tree-object-size.h"
30 #include "diagnostic-core.h"
31 #include "gimple-pretty-print.h"
32 #include "bitmap.h"
33 #include "predict.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "basic-block.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "gimple-expr.h"
43 #include "gimple.h"
44 #include "gimple-iterator.h"
45 #include "gimple-ssa.h"
46 #include "stringpool.h"
47 #include "tree-ssanames.h"
48 #include "tree-pass.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-phinodes.h"
51 #include "ssa-iterators.h"
52 #include "builtins.h"
53
54 struct object_size_info
55 {
56 int object_size_type;
57 bitmap visited, reexamine;
58 int pass;
59 bool changed;
60 unsigned int *depths;
61 unsigned int *stack, *tos;
62 };
63
64 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
65
66 static tree compute_object_offset (const_tree, const_tree);
67 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
68 const_tree, int);
69 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
70 static tree pass_through_call (const gcall *);
71 static void collect_object_sizes_for (struct object_size_info *, tree);
72 static void expr_object_size (struct object_size_info *, tree, tree);
73 static bool merge_object_sizes (struct object_size_info *, tree, tree,
74 unsigned HOST_WIDE_INT);
75 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
76 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
77 static void init_offset_limit (void);
78 static void check_for_plus_in_loops (struct object_size_info *, tree);
79 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
80 unsigned int);
81
82 /* object_sizes[0] is upper bound for number of bytes till the end of
83 the object.
84 object_sizes[1] is upper bound for number of bytes till the end of
85 the subobject (innermost array or field with address taken).
86 object_sizes[2] is lower bound for number of bytes till the end of
87 the object and object_sizes[3] lower bound for subobject. */
88 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
89
90 /* Bitmaps what object sizes have been computed already. */
91 static bitmap computed[4];
92
93 /* Maximum value of offset we consider to be addition. */
94 static unsigned HOST_WIDE_INT offset_limit;
95
96
97 /* Initialize OFFSET_LIMIT variable. */
98 static void
99 init_offset_limit (void)
100 {
101 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
102 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
103 else
104 offset_limit = -1;
105 offset_limit /= 2;
106 }
107
108
109 /* Compute offset of EXPR within VAR. Return error_mark_node
110 if unknown. */
111
112 static tree
113 compute_object_offset (const_tree expr, const_tree var)
114 {
115 enum tree_code code = PLUS_EXPR;
116 tree base, off, t;
117
118 if (expr == var)
119 return size_zero_node;
120
121 switch (TREE_CODE (expr))
122 {
123 case COMPONENT_REF:
124 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
125 if (base == error_mark_node)
126 return base;
127
128 t = TREE_OPERAND (expr, 1);
129 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
130 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
131 / BITS_PER_UNIT));
132 break;
133
134 case REALPART_EXPR:
135 CASE_CONVERT:
136 case VIEW_CONVERT_EXPR:
137 case NON_LVALUE_EXPR:
138 return compute_object_offset (TREE_OPERAND (expr, 0), var);
139
140 case IMAGPART_EXPR:
141 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
142 if (base == error_mark_node)
143 return base;
144
145 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
146 break;
147
148 case ARRAY_REF:
149 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
150 if (base == error_mark_node)
151 return base;
152
153 t = TREE_OPERAND (expr, 1);
154 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
155 {
156 code = MINUS_EXPR;
157 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
158 }
159 t = fold_convert (sizetype, t);
160 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
161 break;
162
163 case MEM_REF:
164 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
165 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
166
167 default:
168 return error_mark_node;
169 }
170
171 return size_binop (code, base, off);
172 }
173
174
175 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
176 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
177 If unknown, return unknown[object_size_type]. */
178
179 static unsigned HOST_WIDE_INT
180 addr_object_size (struct object_size_info *osi, const_tree ptr,
181 int object_size_type)
182 {
183 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
184
185 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
186
187 pt_var = TREE_OPERAND (ptr, 0);
188 while (handled_component_p (pt_var))
189 pt_var = TREE_OPERAND (pt_var, 0);
190
191 if (pt_var
192 && TREE_CODE (pt_var) == MEM_REF)
193 {
194 unsigned HOST_WIDE_INT sz;
195
196 if (!osi || (object_size_type & 1) != 0
197 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
198 {
199 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
200 object_size_type & ~1);
201 }
202 else
203 {
204 tree var = TREE_OPERAND (pt_var, 0);
205 if (osi->pass == 0)
206 collect_object_sizes_for (osi, var);
207 if (bitmap_bit_p (computed[object_size_type],
208 SSA_NAME_VERSION (var)))
209 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
210 else
211 sz = unknown[object_size_type];
212 }
213 if (sz != unknown[object_size_type])
214 {
215 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
216 if (wi::neg_p (dsz))
217 sz = 0;
218 else if (wi::fits_uhwi_p (dsz))
219 sz = dsz.to_uhwi ();
220 else
221 sz = unknown[object_size_type];
222 }
223
224 if (sz != unknown[object_size_type] && sz < offset_limit)
225 pt_var_size = size_int (sz);
226 }
227 else if (pt_var
228 && DECL_P (pt_var)
229 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
230 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
231 pt_var_size = DECL_SIZE_UNIT (pt_var);
232 else if (pt_var
233 && TREE_CODE (pt_var) == STRING_CST
234 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
235 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
236 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
237 < offset_limit)
238 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
239 else
240 return unknown[object_size_type];
241
242 if (pt_var != TREE_OPERAND (ptr, 0))
243 {
244 tree var;
245
246 if (object_size_type & 1)
247 {
248 var = TREE_OPERAND (ptr, 0);
249
250 while (var != pt_var
251 && TREE_CODE (var) != BIT_FIELD_REF
252 && TREE_CODE (var) != COMPONENT_REF
253 && TREE_CODE (var) != ARRAY_REF
254 && TREE_CODE (var) != ARRAY_RANGE_REF
255 && TREE_CODE (var) != REALPART_EXPR
256 && TREE_CODE (var) != IMAGPART_EXPR)
257 var = TREE_OPERAND (var, 0);
258 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
259 var = TREE_OPERAND (var, 0);
260 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
261 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
262 || (pt_var_size
263 && tree_int_cst_lt (pt_var_size,
264 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
265 var = pt_var;
266 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
267 {
268 tree v = var;
269 /* For &X->fld, compute object size only if fld isn't the last
270 field, as struct { int i; char c[1]; } is often used instead
271 of flexible array member. */
272 while (v && v != pt_var)
273 switch (TREE_CODE (v))
274 {
275 case ARRAY_REF:
276 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
277 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
278 {
279 tree domain
280 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
281 if (domain
282 && TYPE_MAX_VALUE (domain)
283 && TREE_CODE (TYPE_MAX_VALUE (domain))
284 == INTEGER_CST
285 && tree_int_cst_lt (TREE_OPERAND (v, 1),
286 TYPE_MAX_VALUE (domain)))
287 {
288 v = NULL_TREE;
289 break;
290 }
291 }
292 v = TREE_OPERAND (v, 0);
293 break;
294 case REALPART_EXPR:
295 case IMAGPART_EXPR:
296 v = NULL_TREE;
297 break;
298 case COMPONENT_REF:
299 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
300 {
301 v = NULL_TREE;
302 break;
303 }
304 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
305 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
306 != UNION_TYPE
307 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
308 != QUAL_UNION_TYPE)
309 break;
310 else
311 v = TREE_OPERAND (v, 0);
312 if (TREE_CODE (v) == COMPONENT_REF
313 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
314 == RECORD_TYPE)
315 {
316 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
317 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
318 if (TREE_CODE (fld_chain) == FIELD_DECL)
319 break;
320
321 if (fld_chain)
322 {
323 v = NULL_TREE;
324 break;
325 }
326 v = TREE_OPERAND (v, 0);
327 }
328 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
329 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
330 != UNION_TYPE
331 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
332 != QUAL_UNION_TYPE)
333 break;
334 else
335 v = TREE_OPERAND (v, 0);
336 if (v != pt_var)
337 v = NULL_TREE;
338 else
339 v = pt_var;
340 break;
341 default:
342 v = pt_var;
343 break;
344 }
345 if (v == pt_var)
346 var = pt_var;
347 }
348 }
349 else
350 var = pt_var;
351
352 if (var != pt_var)
353 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
354 else if (!pt_var_size)
355 return unknown[object_size_type];
356 else
357 var_size = pt_var_size;
358 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
359 if (bytes != error_mark_node)
360 {
361 if (TREE_CODE (bytes) == INTEGER_CST
362 && tree_int_cst_lt (var_size, bytes))
363 bytes = size_zero_node;
364 else
365 bytes = size_binop (MINUS_EXPR, var_size, bytes);
366 }
367 if (var != pt_var
368 && pt_var_size
369 && TREE_CODE (pt_var) == MEM_REF
370 && bytes != error_mark_node)
371 {
372 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
373 if (bytes2 != error_mark_node)
374 {
375 if (TREE_CODE (bytes2) == INTEGER_CST
376 && tree_int_cst_lt (pt_var_size, bytes2))
377 bytes2 = size_zero_node;
378 else
379 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
380 bytes = size_binop (MIN_EXPR, bytes, bytes2);
381 }
382 }
383 }
384 else if (!pt_var_size)
385 return unknown[object_size_type];
386 else
387 bytes = pt_var_size;
388
389 if (tree_fits_uhwi_p (bytes))
390 return tree_to_uhwi (bytes);
391
392 return unknown[object_size_type];
393 }
394
395
396 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
397 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
398 argument from __builtin_object_size. If unknown, return
399 unknown[object_size_type]. */
400
401 static unsigned HOST_WIDE_INT
402 alloc_object_size (const gcall *call, int object_size_type)
403 {
404 tree callee, bytes = NULL_TREE;
405 tree alloc_size;
406 int arg1 = -1, arg2 = -1;
407
408 gcc_assert (is_gimple_call (call));
409
410 callee = gimple_call_fndecl (call);
411 if (!callee)
412 return unknown[object_size_type];
413
414 alloc_size = lookup_attribute ("alloc_size",
415 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
416 if (alloc_size && TREE_VALUE (alloc_size))
417 {
418 tree p = TREE_VALUE (alloc_size);
419
420 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
421 if (TREE_CHAIN (p))
422 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
423 }
424
425 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
426 switch (DECL_FUNCTION_CODE (callee))
427 {
428 case BUILT_IN_CALLOC:
429 arg2 = 1;
430 /* fall through */
431 case BUILT_IN_MALLOC:
432 case BUILT_IN_ALLOCA:
433 case BUILT_IN_ALLOCA_WITH_ALIGN:
434 arg1 = 0;
435 default:
436 break;
437 }
438
439 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
440 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
441 || (arg2 >= 0
442 && (arg2 >= (int)gimple_call_num_args (call)
443 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
444 return unknown[object_size_type];
445
446 if (arg2 >= 0)
447 bytes = size_binop (MULT_EXPR,
448 fold_convert (sizetype, gimple_call_arg (call, arg1)),
449 fold_convert (sizetype, gimple_call_arg (call, arg2)));
450 else if (arg1 >= 0)
451 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
452
453 if (bytes && tree_fits_uhwi_p (bytes))
454 return tree_to_uhwi (bytes);
455
456 return unknown[object_size_type];
457 }
458
459
460 /* If object size is propagated from one of function's arguments directly
461 to its return value, return that argument for GIMPLE_CALL statement CALL.
462 Otherwise return NULL. */
463
464 static tree
465 pass_through_call (const gcall *call)
466 {
467 tree callee = gimple_call_fndecl (call);
468
469 if (callee
470 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
471 switch (DECL_FUNCTION_CODE (callee))
472 {
473 case BUILT_IN_MEMCPY:
474 case BUILT_IN_MEMMOVE:
475 case BUILT_IN_MEMSET:
476 case BUILT_IN_STRCPY:
477 case BUILT_IN_STRNCPY:
478 case BUILT_IN_STRCAT:
479 case BUILT_IN_STRNCAT:
480 case BUILT_IN_MEMCPY_CHK:
481 case BUILT_IN_MEMMOVE_CHK:
482 case BUILT_IN_MEMSET_CHK:
483 case BUILT_IN_STRCPY_CHK:
484 case BUILT_IN_STRNCPY_CHK:
485 case BUILT_IN_STPNCPY_CHK:
486 case BUILT_IN_STRCAT_CHK:
487 case BUILT_IN_STRNCAT_CHK:
488 case BUILT_IN_ASSUME_ALIGNED:
489 if (gimple_call_num_args (call) >= 1)
490 return gimple_call_arg (call, 0);
491 break;
492 default:
493 break;
494 }
495
496 return NULL_TREE;
497 }
498
499
500 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
501 second argument from __builtin_object_size. */
502
503 unsigned HOST_WIDE_INT
504 compute_builtin_object_size (tree ptr, int object_size_type)
505 {
506 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
507
508 if (! offset_limit)
509 init_offset_limit ();
510
511 if (TREE_CODE (ptr) == ADDR_EXPR)
512 return addr_object_size (NULL, ptr, object_size_type);
513
514 if (TREE_CODE (ptr) == SSA_NAME
515 && POINTER_TYPE_P (TREE_TYPE (ptr))
516 && computed[object_size_type] != NULL)
517 {
518 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
519 {
520 struct object_size_info osi;
521 bitmap_iterator bi;
522 unsigned int i;
523
524 if (num_ssa_names > object_sizes[object_size_type].length ())
525 object_sizes[object_size_type].safe_grow (num_ssa_names);
526 if (dump_file)
527 {
528 fprintf (dump_file, "Computing %s %sobject size for ",
529 (object_size_type & 2) ? "minimum" : "maximum",
530 (object_size_type & 1) ? "sub" : "");
531 print_generic_expr (dump_file, ptr, dump_flags);
532 fprintf (dump_file, ":\n");
533 }
534
535 osi.visited = BITMAP_ALLOC (NULL);
536 osi.reexamine = BITMAP_ALLOC (NULL);
537 osi.object_size_type = object_size_type;
538 osi.depths = NULL;
539 osi.stack = NULL;
540 osi.tos = NULL;
541
542 /* First pass: walk UD chains, compute object sizes that
543 can be computed. osi.reexamine bitmap at the end will
544 contain what variables were found in dependency cycles
545 and therefore need to be reexamined. */
546 osi.pass = 0;
547 osi.changed = false;
548 collect_object_sizes_for (&osi, ptr);
549
550 /* Second pass: keep recomputing object sizes of variables
551 that need reexamination, until no object sizes are
552 increased or all object sizes are computed. */
553 if (! bitmap_empty_p (osi.reexamine))
554 {
555 bitmap reexamine = BITMAP_ALLOC (NULL);
556
557 /* If looking for minimum instead of maximum object size,
558 detect cases where a pointer is increased in a loop.
559 Although even without this detection pass 2 would eventually
560 terminate, it could take a long time. If a pointer is
561 increasing this way, we need to assume 0 object size.
562 E.g. p = &buf[0]; while (cond) p = p + 4; */
563 if (object_size_type & 2)
564 {
565 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
566 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
567 osi.tos = osi.stack;
568 osi.pass = 1;
569 /* collect_object_sizes_for is changing
570 osi.reexamine bitmap, so iterate over a copy. */
571 bitmap_copy (reexamine, osi.reexamine);
572 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
573 if (bitmap_bit_p (osi.reexamine, i))
574 check_for_plus_in_loops (&osi, ssa_name (i));
575
576 free (osi.depths);
577 osi.depths = NULL;
578 free (osi.stack);
579 osi.stack = NULL;
580 osi.tos = NULL;
581 }
582
583 do
584 {
585 osi.pass = 2;
586 osi.changed = false;
587 /* collect_object_sizes_for is changing
588 osi.reexamine bitmap, so iterate over a copy. */
589 bitmap_copy (reexamine, osi.reexamine);
590 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
591 if (bitmap_bit_p (osi.reexamine, i))
592 {
593 collect_object_sizes_for (&osi, ssa_name (i));
594 if (dump_file && (dump_flags & TDF_DETAILS))
595 {
596 fprintf (dump_file, "Reexamining ");
597 print_generic_expr (dump_file, ssa_name (i),
598 dump_flags);
599 fprintf (dump_file, "\n");
600 }
601 }
602 }
603 while (osi.changed);
604
605 BITMAP_FREE (reexamine);
606 }
607 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
608 bitmap_set_bit (computed[object_size_type], i);
609
610 /* Debugging dumps. */
611 if (dump_file)
612 {
613 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
614 if (object_sizes[object_size_type][i]
615 != unknown[object_size_type])
616 {
617 print_generic_expr (dump_file, ssa_name (i),
618 dump_flags);
619 fprintf (dump_file,
620 ": %s %sobject size "
621 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
622 (object_size_type & 2) ? "minimum" : "maximum",
623 (object_size_type & 1) ? "sub" : "",
624 object_sizes[object_size_type][i]);
625 }
626 }
627
628 BITMAP_FREE (osi.reexamine);
629 BITMAP_FREE (osi.visited);
630 }
631
632 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
633 }
634
635 return unknown[object_size_type];
636 }
637
638 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
639
640 static void
641 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
642 {
643 int object_size_type = osi->object_size_type;
644 unsigned int varno = SSA_NAME_VERSION (ptr);
645 unsigned HOST_WIDE_INT bytes;
646
647 gcc_assert (object_sizes[object_size_type][varno]
648 != unknown[object_size_type]);
649 gcc_assert (osi->pass == 0);
650
651 if (TREE_CODE (value) == WITH_SIZE_EXPR)
652 value = TREE_OPERAND (value, 0);
653
654 /* Pointer variables should have been handled by merge_object_sizes. */
655 gcc_assert (TREE_CODE (value) != SSA_NAME
656 || !POINTER_TYPE_P (TREE_TYPE (value)));
657
658 if (TREE_CODE (value) == ADDR_EXPR)
659 bytes = addr_object_size (osi, value, object_size_type);
660 else
661 bytes = unknown[object_size_type];
662
663 if ((object_size_type & 2) == 0)
664 {
665 if (object_sizes[object_size_type][varno] < bytes)
666 object_sizes[object_size_type][varno] = bytes;
667 }
668 else
669 {
670 if (object_sizes[object_size_type][varno] > bytes)
671 object_sizes[object_size_type][varno] = bytes;
672 }
673 }
674
675
676 /* Compute object_sizes for PTR, defined to the result of a call. */
677
678 static void
679 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
680 {
681 int object_size_type = osi->object_size_type;
682 unsigned int varno = SSA_NAME_VERSION (ptr);
683 unsigned HOST_WIDE_INT bytes;
684
685 gcc_assert (is_gimple_call (call));
686
687 gcc_assert (object_sizes[object_size_type][varno]
688 != unknown[object_size_type]);
689 gcc_assert (osi->pass == 0);
690
691 bytes = alloc_object_size (call, object_size_type);
692
693 if ((object_size_type & 2) == 0)
694 {
695 if (object_sizes[object_size_type][varno] < bytes)
696 object_sizes[object_size_type][varno] = bytes;
697 }
698 else
699 {
700 if (object_sizes[object_size_type][varno] > bytes)
701 object_sizes[object_size_type][varno] = bytes;
702 }
703 }
704
705
706 /* Compute object_sizes for PTR, defined to an unknown value. */
707
708 static void
709 unknown_object_size (struct object_size_info *osi, tree ptr)
710 {
711 int object_size_type = osi->object_size_type;
712 unsigned int varno = SSA_NAME_VERSION (ptr);
713 unsigned HOST_WIDE_INT bytes;
714
715 gcc_assert (object_sizes[object_size_type][varno]
716 != unknown[object_size_type]);
717 gcc_assert (osi->pass == 0);
718
719 bytes = unknown[object_size_type];
720
721 if ((object_size_type & 2) == 0)
722 {
723 if (object_sizes[object_size_type][varno] < bytes)
724 object_sizes[object_size_type][varno] = bytes;
725 }
726 else
727 {
728 if (object_sizes[object_size_type][varno] > bytes)
729 object_sizes[object_size_type][varno] = bytes;
730 }
731 }
732
733
734 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
735 the object size might need reexamination later. */
736
737 static bool
738 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
739 unsigned HOST_WIDE_INT offset)
740 {
741 int object_size_type = osi->object_size_type;
742 unsigned int varno = SSA_NAME_VERSION (dest);
743 unsigned HOST_WIDE_INT orig_bytes;
744
745 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
746 return false;
747 if (offset >= offset_limit)
748 {
749 object_sizes[object_size_type][varno] = unknown[object_size_type];
750 return false;
751 }
752
753 if (osi->pass == 0)
754 collect_object_sizes_for (osi, orig);
755
756 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
757 if (orig_bytes != unknown[object_size_type])
758 orig_bytes = (offset > orig_bytes)
759 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
760
761 if ((object_size_type & 2) == 0)
762 {
763 if (object_sizes[object_size_type][varno] < orig_bytes)
764 {
765 object_sizes[object_size_type][varno] = orig_bytes;
766 osi->changed = true;
767 }
768 }
769 else
770 {
771 if (object_sizes[object_size_type][varno] > orig_bytes)
772 {
773 object_sizes[object_size_type][varno] = orig_bytes;
774 osi->changed = true;
775 }
776 }
777 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
778 }
779
780
781 /* Compute object_sizes for VAR, defined to the result of an assignment
782 with operator POINTER_PLUS_EXPR. Return true if the object size might
783 need reexamination later. */
784
785 static bool
786 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
787 {
788 int object_size_type = osi->object_size_type;
789 unsigned int varno = SSA_NAME_VERSION (var);
790 unsigned HOST_WIDE_INT bytes;
791 tree op0, op1;
792
793 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
794 {
795 op0 = gimple_assign_rhs1 (stmt);
796 op1 = gimple_assign_rhs2 (stmt);
797 }
798 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
799 {
800 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
801 gcc_assert (TREE_CODE (rhs) == MEM_REF);
802 op0 = TREE_OPERAND (rhs, 0);
803 op1 = TREE_OPERAND (rhs, 1);
804 }
805 else
806 gcc_unreachable ();
807
808 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
809 return false;
810
811 /* Handle PTR + OFFSET here. */
812 if (TREE_CODE (op1) == INTEGER_CST
813 && (TREE_CODE (op0) == SSA_NAME
814 || TREE_CODE (op0) == ADDR_EXPR))
815 {
816 if (! tree_fits_uhwi_p (op1))
817 bytes = unknown[object_size_type];
818 else if (TREE_CODE (op0) == SSA_NAME)
819 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
820 else
821 {
822 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
823
824 /* op0 will be ADDR_EXPR here. */
825 bytes = addr_object_size (osi, op0, object_size_type);
826 if (bytes == unknown[object_size_type])
827 ;
828 else if (off > offset_limit)
829 bytes = unknown[object_size_type];
830 else if (off > bytes)
831 bytes = 0;
832 else
833 bytes -= off;
834 }
835 }
836 else
837 bytes = unknown[object_size_type];
838
839 if ((object_size_type & 2) == 0)
840 {
841 if (object_sizes[object_size_type][varno] < bytes)
842 object_sizes[object_size_type][varno] = bytes;
843 }
844 else
845 {
846 if (object_sizes[object_size_type][varno] > bytes)
847 object_sizes[object_size_type][varno] = bytes;
848 }
849 return false;
850 }
851
852
853 /* Compute object_sizes for VAR, defined at STMT, which is
854 a COND_EXPR. Return true if the object size might need reexamination
855 later. */
856
857 static bool
858 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
859 {
860 tree then_, else_;
861 int object_size_type = osi->object_size_type;
862 unsigned int varno = SSA_NAME_VERSION (var);
863 bool reexamine = false;
864
865 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
866
867 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
868 return false;
869
870 then_ = gimple_assign_rhs2 (stmt);
871 else_ = gimple_assign_rhs3 (stmt);
872
873 if (TREE_CODE (then_) == SSA_NAME)
874 reexamine |= merge_object_sizes (osi, var, then_, 0);
875 else
876 expr_object_size (osi, var, then_);
877
878 if (TREE_CODE (else_) == SSA_NAME)
879 reexamine |= merge_object_sizes (osi, var, else_, 0);
880 else
881 expr_object_size (osi, var, else_);
882
883 return reexamine;
884 }
885
886 /* Compute object sizes for VAR.
887 For ADDR_EXPR an object size is the number of remaining bytes
888 to the end of the object (where what is considered an object depends on
889 OSI->object_size_type).
890 For allocation GIMPLE_CALL like malloc or calloc object size is the size
891 of the allocation.
892 For POINTER_PLUS_EXPR where second operand is a constant integer,
893 object size is object size of the first operand minus the constant.
894 If the constant is bigger than the number of remaining bytes until the
895 end of the object, object size is 0, but if it is instead a pointer
896 subtraction, object size is unknown[object_size_type].
897 To differentiate addition from subtraction, ADDR_EXPR returns
898 unknown[object_size_type] for all objects bigger than half of the address
899 space, and constants less than half of the address space are considered
900 addition, while bigger constants subtraction.
901 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
902 object size is object size of that argument.
903 Otherwise, object size is the maximum of object sizes of variables
904 that it might be set to. */
905
906 static void
907 collect_object_sizes_for (struct object_size_info *osi, tree var)
908 {
909 int object_size_type = osi->object_size_type;
910 unsigned int varno = SSA_NAME_VERSION (var);
911 gimple stmt;
912 bool reexamine;
913
914 if (bitmap_bit_p (computed[object_size_type], varno))
915 return;
916
917 if (osi->pass == 0)
918 {
919 if (bitmap_set_bit (osi->visited, varno))
920 {
921 object_sizes[object_size_type][varno]
922 = (object_size_type & 2) ? -1 : 0;
923 }
924 else
925 {
926 /* Found a dependency loop. Mark the variable for later
927 re-examination. */
928 bitmap_set_bit (osi->reexamine, varno);
929 if (dump_file && (dump_flags & TDF_DETAILS))
930 {
931 fprintf (dump_file, "Found a dependency loop at ");
932 print_generic_expr (dump_file, var, dump_flags);
933 fprintf (dump_file, "\n");
934 }
935 return;
936 }
937 }
938
939 if (dump_file && (dump_flags & TDF_DETAILS))
940 {
941 fprintf (dump_file, "Visiting use-def links for ");
942 print_generic_expr (dump_file, var, dump_flags);
943 fprintf (dump_file, "\n");
944 }
945
946 stmt = SSA_NAME_DEF_STMT (var);
947 reexamine = false;
948
949 switch (gimple_code (stmt))
950 {
951 case GIMPLE_ASSIGN:
952 {
953 tree rhs = gimple_assign_rhs1 (stmt);
954 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
955 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
956 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
957 reexamine = plus_stmt_object_size (osi, var, stmt);
958 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
959 reexamine = cond_expr_object_size (osi, var, stmt);
960 else if (gimple_assign_single_p (stmt)
961 || gimple_assign_unary_nop_p (stmt))
962 {
963 if (TREE_CODE (rhs) == SSA_NAME
964 && POINTER_TYPE_P (TREE_TYPE (rhs)))
965 reexamine = merge_object_sizes (osi, var, rhs, 0);
966 else
967 expr_object_size (osi, var, rhs);
968 }
969 else
970 unknown_object_size (osi, var);
971 break;
972 }
973
974 case GIMPLE_CALL:
975 {
976 gcall *call_stmt = as_a <gcall *> (stmt);
977 tree arg = pass_through_call (call_stmt);
978 if (arg)
979 {
980 if (TREE_CODE (arg) == SSA_NAME
981 && POINTER_TYPE_P (TREE_TYPE (arg)))
982 reexamine = merge_object_sizes (osi, var, arg, 0);
983 else
984 expr_object_size (osi, var, arg);
985 }
986 else
987 call_object_size (osi, var, call_stmt);
988 break;
989 }
990
991 case GIMPLE_ASM:
992 /* Pointers defined by __asm__ statements can point anywhere. */
993 object_sizes[object_size_type][varno] = unknown[object_size_type];
994 break;
995
996 case GIMPLE_NOP:
997 if (SSA_NAME_VAR (var)
998 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
999 expr_object_size (osi, var, SSA_NAME_VAR (var));
1000 else
1001 /* Uninitialized SSA names point nowhere. */
1002 object_sizes[object_size_type][varno] = unknown[object_size_type];
1003 break;
1004
1005 case GIMPLE_PHI:
1006 {
1007 unsigned i;
1008
1009 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1010 {
1011 tree rhs = gimple_phi_arg (stmt, i)->def;
1012
1013 if (object_sizes[object_size_type][varno]
1014 == unknown[object_size_type])
1015 break;
1016
1017 if (TREE_CODE (rhs) == SSA_NAME)
1018 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1019 else if (osi->pass == 0)
1020 expr_object_size (osi, var, rhs);
1021 }
1022 break;
1023 }
1024
1025 default:
1026 gcc_unreachable ();
1027 }
1028
1029 if (! reexamine
1030 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1031 {
1032 bitmap_set_bit (computed[object_size_type], varno);
1033 bitmap_clear_bit (osi->reexamine, varno);
1034 }
1035 else
1036 {
1037 bitmap_set_bit (osi->reexamine, varno);
1038 if (dump_file && (dump_flags & TDF_DETAILS))
1039 {
1040 fprintf (dump_file, "Need to reexamine ");
1041 print_generic_expr (dump_file, var, dump_flags);
1042 fprintf (dump_file, "\n");
1043 }
1044 }
1045 }
1046
1047
1048 /* Helper function for check_for_plus_in_loops. Called recursively
1049 to detect loops. */
1050
1051 static void
1052 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1053 unsigned int depth)
1054 {
1055 gimple stmt = SSA_NAME_DEF_STMT (var);
1056 unsigned int varno = SSA_NAME_VERSION (var);
1057
1058 if (osi->depths[varno])
1059 {
1060 if (osi->depths[varno] != depth)
1061 {
1062 unsigned int *sp;
1063
1064 /* Found a loop involving pointer addition. */
1065 for (sp = osi->tos; sp > osi->stack; )
1066 {
1067 --sp;
1068 bitmap_clear_bit (osi->reexamine, *sp);
1069 bitmap_set_bit (computed[osi->object_size_type], *sp);
1070 object_sizes[osi->object_size_type][*sp] = 0;
1071 if (*sp == varno)
1072 break;
1073 }
1074 }
1075 return;
1076 }
1077 else if (! bitmap_bit_p (osi->reexamine, varno))
1078 return;
1079
1080 osi->depths[varno] = depth;
1081 *osi->tos++ = varno;
1082
1083 switch (gimple_code (stmt))
1084 {
1085
1086 case GIMPLE_ASSIGN:
1087 {
1088 if ((gimple_assign_single_p (stmt)
1089 || gimple_assign_unary_nop_p (stmt))
1090 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1091 {
1092 tree rhs = gimple_assign_rhs1 (stmt);
1093
1094 check_for_plus_in_loops_1 (osi, rhs, depth);
1095 }
1096 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1097 {
1098 tree basevar = gimple_assign_rhs1 (stmt);
1099 tree cst = gimple_assign_rhs2 (stmt);
1100
1101 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1102
1103 check_for_plus_in_loops_1 (osi, basevar,
1104 depth + !integer_zerop (cst));
1105 }
1106 else
1107 gcc_unreachable ();
1108 break;
1109 }
1110
1111 case GIMPLE_CALL:
1112 {
1113 gcall *call_stmt = as_a <gcall *> (stmt);
1114 tree arg = pass_through_call (call_stmt);
1115 if (arg)
1116 {
1117 if (TREE_CODE (arg) == SSA_NAME)
1118 check_for_plus_in_loops_1 (osi, arg, depth);
1119 else
1120 gcc_unreachable ();
1121 }
1122 break;
1123 }
1124
1125 case GIMPLE_PHI:
1126 {
1127 unsigned i;
1128
1129 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1130 {
1131 tree rhs = gimple_phi_arg (stmt, i)->def;
1132
1133 if (TREE_CODE (rhs) == SSA_NAME)
1134 check_for_plus_in_loops_1 (osi, rhs, depth);
1135 }
1136 break;
1137 }
1138
1139 default:
1140 gcc_unreachable ();
1141 }
1142
1143 osi->depths[varno] = 0;
1144 osi->tos--;
1145 }
1146
1147
1148 /* Check if some pointer we are computing object size of is being increased
1149 within a loop. If yes, assume all the SSA variables participating in
1150 that loop have minimum object sizes 0. */
1151
1152 static void
1153 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1154 {
1155 gimple stmt = SSA_NAME_DEF_STMT (var);
1156
1157 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1158 and looked for a POINTER_PLUS_EXPR in the pass-through
1159 argument, if any. In GIMPLE, however, such an expression
1160 is not a valid call operand. */
1161
1162 if (is_gimple_assign (stmt)
1163 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1164 {
1165 tree basevar = gimple_assign_rhs1 (stmt);
1166 tree cst = gimple_assign_rhs2 (stmt);
1167
1168 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1169
1170 if (integer_zerop (cst))
1171 return;
1172
1173 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1174 *osi->tos++ = SSA_NAME_VERSION (basevar);
1175 check_for_plus_in_loops_1 (osi, var, 2);
1176 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1177 osi->tos--;
1178 }
1179 }
1180
1181
1182 /* Initialize data structures for the object size computation. */
1183
1184 void
1185 init_object_sizes (void)
1186 {
1187 int object_size_type;
1188
1189 if (computed[0])
1190 return;
1191
1192 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1193 {
1194 object_sizes[object_size_type].safe_grow (num_ssa_names);
1195 computed[object_size_type] = BITMAP_ALLOC (NULL);
1196 }
1197
1198 init_offset_limit ();
1199 }
1200
1201
1202 /* Destroy data structures after the object size computation. */
1203
1204 static void
1205 fini_object_sizes (void)
1206 {
1207 int object_size_type;
1208
1209 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1210 {
1211 object_sizes[object_size_type].release ();
1212 BITMAP_FREE (computed[object_size_type]);
1213 }
1214 }
1215
1216
1217 /* Simple pass to optimize all __builtin_object_size () builtins. */
1218
1219 namespace {
1220
1221 const pass_data pass_data_object_sizes =
1222 {
1223 GIMPLE_PASS, /* type */
1224 "objsz", /* name */
1225 OPTGROUP_NONE, /* optinfo_flags */
1226 TV_NONE, /* tv_id */
1227 ( PROP_cfg | PROP_ssa ), /* properties_required */
1228 0, /* properties_provided */
1229 0, /* properties_destroyed */
1230 0, /* todo_flags_start */
1231 0, /* todo_flags_finish */
1232 };
1233
1234 class pass_object_sizes : public gimple_opt_pass
1235 {
1236 public:
1237 pass_object_sizes (gcc::context *ctxt)
1238 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1239 {}
1240
1241 /* opt_pass methods: */
1242 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1243 virtual unsigned int execute (function *);
1244
1245 }; // class pass_object_sizes
1246
1247 unsigned int
1248 pass_object_sizes::execute (function *fun)
1249 {
1250 basic_block bb;
1251 FOR_EACH_BB_FN (bb, fun)
1252 {
1253 gimple_stmt_iterator i;
1254 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1255 {
1256 tree result;
1257 gimple call = gsi_stmt (i);
1258 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1259 continue;
1260
1261 init_object_sizes ();
1262
1263 /* In the first pass instance, only attempt to fold
1264 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1265 and rather than folding the builtin to the constant if any,
1266 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1267 call result and the computed constant. */
1268 if (first_pass_instance)
1269 {
1270 tree ost = gimple_call_arg (call, 1);
1271 if (tree_fits_uhwi_p (ost))
1272 {
1273 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1274 tree ptr = gimple_call_arg (call, 0);
1275 tree lhs = gimple_call_lhs (call);
1276 if ((object_size_type == 1 || object_size_type == 3)
1277 && (TREE_CODE (ptr) == ADDR_EXPR
1278 || TREE_CODE (ptr) == SSA_NAME)
1279 && lhs)
1280 {
1281 tree type = TREE_TYPE (lhs);
1282 unsigned HOST_WIDE_INT bytes
1283 = compute_builtin_object_size (ptr, object_size_type);
1284 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1
1285 ? -1 : 0)
1286 && wi::fits_to_tree_p (bytes, type))
1287 {
1288 tree tem = make_ssa_name (type);
1289 gimple_call_set_lhs (call, tem);
1290 enum tree_code code
1291 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1292 tree cst = build_int_cstu (type, bytes);
1293 gimple g = gimple_build_assign (lhs, code, tem, cst);
1294 gsi_insert_after (&i, g, GSI_NEW_STMT);
1295 update_stmt (call);
1296 }
1297 }
1298 }
1299 continue;
1300 }
1301
1302 result = fold_call_stmt (as_a <gcall *> (call), false);
1303 if (!result)
1304 {
1305 tree ost = gimple_call_arg (call, 1);
1306
1307 if (tree_fits_uhwi_p (ost))
1308 {
1309 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1310
1311 if (object_size_type < 2)
1312 result = fold_convert (size_type_node,
1313 integer_minus_one_node);
1314 else if (object_size_type < 4)
1315 result = build_zero_cst (size_type_node);
1316 }
1317
1318 if (!result)
1319 continue;
1320 }
1321
1322 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1323
1324 if (dump_file && (dump_flags & TDF_DETAILS))
1325 {
1326 fprintf (dump_file, "Simplified\n ");
1327 print_gimple_stmt (dump_file, call, 0, dump_flags);
1328 fprintf (dump_file, " to ");
1329 print_generic_expr (dump_file, result, 0);
1330 fprintf (dump_file, "\n");
1331 }
1332
1333 tree lhs = gimple_call_lhs (call);
1334 if (!lhs)
1335 continue;
1336
1337 /* Propagate into all uses and fold those stmts. */
1338 gimple use_stmt;
1339 imm_use_iterator iter;
1340 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1341 {
1342 use_operand_p use_p;
1343 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1344 SET_USE (use_p, result);
1345 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1346 fold_stmt (&gsi);
1347 update_stmt (gsi_stmt (gsi));
1348 }
1349 }
1350 }
1351
1352 fini_object_sizes ();
1353 return 0;
1354 }
1355
1356 } // anon namespace
1357
1358 gimple_opt_pass *
1359 make_pass_object_sizes (gcc::context *ctxt)
1360 {
1361 return new pass_object_sizes (ctxt);
1362 }