decl.c (value_annotation_hasher::handle_cache_entry): Delete.
[gcc.git] / gcc / sanopt.c
1 /* Optimize and expand sanitizer functions.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Marek Polacek <polacek@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "options.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "predict.h"
30 #include "tm.h"
31 #include "hard-reg-set.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "gimple.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "plugin-api.h"
43 #include "tree-pass.h"
44 #include "asan.h"
45 #include "gimple-pretty-print.h"
46 #include "tm_p.h"
47 #include "langhooks.h"
48 #include "ubsan.h"
49 #include "params.h"
50 #include "tree-ssa-operands.h"
51
52
53 /* This is used to carry information about basic blocks. It is
54 attached to the AUX field of the standard CFG block. */
55
56 struct sanopt_info
57 {
58 /* True if this BB might call (directly or indirectly) free/munmap
59 or similar operation. */
60 bool has_freeing_call_p;
61
62 /* True if HAS_FREEING_CALL_P flag has been computed. */
63 bool has_freeing_call_computed_p;
64
65 /* True if there is a block with HAS_FREEING_CALL_P flag set
66 on any path between an immediate dominator of BB, denoted
67 imm(BB), and BB. */
68 bool imm_dom_path_with_freeing_call_p;
69
70 /* True if IMM_DOM_PATH_WITH_FREEING_CALL_P has been computed. */
71 bool imm_dom_path_with_freeing_call_computed_p;
72
73 /* Number of possibly freeing calls encountered in this bb
74 (so far). */
75 uint64_t freeing_call_events;
76
77 /* True if BB is currently being visited during computation
78 of IMM_DOM_PATH_WITH_FREEING_CALL_P flag. */
79 bool being_visited_p;
80
81 /* True if this BB has been visited in the dominator walk. */
82 bool visited_p;
83 };
84
85 /* If T has a single definition of form T = T2, return T2. */
86
87 static tree
88 maybe_get_single_definition (tree t)
89 {
90 if (TREE_CODE (t) == SSA_NAME)
91 {
92 gimple g = SSA_NAME_DEF_STMT (t);
93 if (gimple_assign_single_p (g))
94 return gimple_assign_rhs1 (g);
95 }
96 return NULL_TREE;
97 }
98
99 /* Traits class for tree hash maps below. */
100
101 struct sanopt_tree_map_traits : default_hashmap_traits
102 {
103 static inline hashval_t hash (const_tree ref)
104 {
105 return iterative_hash_expr (ref, 0);
106 }
107
108 static inline bool equal_keys (const_tree ref1, const_tree ref2)
109 {
110 return operand_equal_p (ref1, ref2, 0);
111 }
112 };
113
114 /* Tree triplet for vptr_check_map. */
115 struct sanopt_tree_triplet
116 {
117 tree t1, t2, t3;
118 };
119
120 /* Traits class for tree triplet hash maps below. */
121
122 struct sanopt_tree_triplet_map_traits : default_hashmap_traits
123 {
124 static inline hashval_t
125 hash (const sanopt_tree_triplet &ref)
126 {
127 inchash::hash hstate (0);
128 inchash::add_expr (ref.t1, hstate);
129 inchash::add_expr (ref.t2, hstate);
130 inchash::add_expr (ref.t3, hstate);
131 return hstate.end ();
132 }
133
134 static inline bool
135 equal_keys (const sanopt_tree_triplet &ref1, const sanopt_tree_triplet &ref2)
136 {
137 return operand_equal_p (ref1.t1, ref2.t1, 0)
138 && operand_equal_p (ref1.t2, ref2.t2, 0)
139 && operand_equal_p (ref1.t3, ref2.t3, 0);
140 }
141
142 template<typename T>
143 static inline void
144 mark_deleted (T &e)
145 {
146 e.m_key.t1 = reinterpret_cast<T *> (1);
147 }
148
149 template<typename T>
150 static inline void
151 mark_empty (T &e)
152 {
153 e.m_key.t1 = NULL;
154 }
155
156 template<typename T>
157 static inline bool
158 is_deleted (T &e)
159 {
160 return e.m_key.t1 == (void *) 1;
161 }
162
163 template<typename T>
164 static inline bool
165 is_empty (T &e)
166 {
167 return e.m_key.t1 == NULL;
168 }
169 };
170
171 /* This is used to carry various hash maps and variables used
172 in sanopt_optimize_walker. */
173
174 struct sanopt_ctx
175 {
176 /* This map maps a pointer (the first argument of UBSAN_NULL) to
177 a vector of UBSAN_NULL call statements that check this pointer. */
178 hash_map<tree, auto_vec<gimple> > null_check_map;
179
180 /* This map maps a pointer (the second argument of ASAN_CHECK) to
181 a vector of ASAN_CHECK call statements that check the access. */
182 hash_map<tree, auto_vec<gimple>, sanopt_tree_map_traits> asan_check_map;
183
184 /* This map maps a tree triplet (the first, second and fourth argument
185 of UBSAN_VPTR) to a vector of UBSAN_VPTR call statements that check
186 that virtual table pointer. */
187 hash_map<sanopt_tree_triplet, auto_vec<gimple>,
188 sanopt_tree_triplet_map_traits> vptr_check_map;
189
190 /* Number of IFN_ASAN_CHECK statements. */
191 int asan_num_accesses;
192 };
193
194
195 /* Return true if there might be any call to free/munmap operation
196 on any path in between DOM (which should be imm(BB)) and BB. */
197
198 static bool
199 imm_dom_path_with_freeing_call (basic_block bb, basic_block dom)
200 {
201 sanopt_info *info = (sanopt_info *) bb->aux;
202 edge e;
203 edge_iterator ei;
204
205 if (info->imm_dom_path_with_freeing_call_computed_p)
206 return info->imm_dom_path_with_freeing_call_p;
207
208 info->being_visited_p = true;
209
210 FOR_EACH_EDGE (e, ei, bb->preds)
211 {
212 sanopt_info *pred_info = (sanopt_info *) e->src->aux;
213
214 if (e->src == dom)
215 continue;
216
217 if ((pred_info->imm_dom_path_with_freeing_call_computed_p
218 && pred_info->imm_dom_path_with_freeing_call_p)
219 || (pred_info->has_freeing_call_computed_p
220 && pred_info->has_freeing_call_p))
221 {
222 info->imm_dom_path_with_freeing_call_computed_p = true;
223 info->imm_dom_path_with_freeing_call_p = true;
224 info->being_visited_p = false;
225 return true;
226 }
227 }
228
229 FOR_EACH_EDGE (e, ei, bb->preds)
230 {
231 sanopt_info *pred_info = (sanopt_info *) e->src->aux;
232
233 if (e->src == dom)
234 continue;
235
236 if (pred_info->has_freeing_call_computed_p)
237 continue;
238
239 gimple_stmt_iterator gsi;
240 for (gsi = gsi_start_bb (e->src); !gsi_end_p (gsi); gsi_next (&gsi))
241 {
242 gimple stmt = gsi_stmt (gsi);
243
244 if (is_gimple_call (stmt) && !nonfreeing_call_p (stmt))
245 {
246 pred_info->has_freeing_call_p = true;
247 break;
248 }
249 }
250
251 pred_info->has_freeing_call_computed_p = true;
252 if (pred_info->has_freeing_call_p)
253 {
254 info->imm_dom_path_with_freeing_call_computed_p = true;
255 info->imm_dom_path_with_freeing_call_p = true;
256 info->being_visited_p = false;
257 return true;
258 }
259 }
260
261 FOR_EACH_EDGE (e, ei, bb->preds)
262 {
263 if (e->src == dom)
264 continue;
265
266 basic_block src;
267 for (src = e->src; src != dom; )
268 {
269 sanopt_info *pred_info = (sanopt_info *) src->aux;
270 if (pred_info->being_visited_p)
271 break;
272 basic_block imm = get_immediate_dominator (CDI_DOMINATORS, src);
273 if (imm_dom_path_with_freeing_call (src, imm))
274 {
275 info->imm_dom_path_with_freeing_call_computed_p = true;
276 info->imm_dom_path_with_freeing_call_p = true;
277 info->being_visited_p = false;
278 return true;
279 }
280 src = imm;
281 }
282 }
283
284 info->imm_dom_path_with_freeing_call_computed_p = true;
285 info->imm_dom_path_with_freeing_call_p = false;
286 info->being_visited_p = false;
287 return false;
288 }
289
290 /* Get the first dominating check from the list of stored checks.
291 Non-dominating checks are silently dropped. */
292
293 static gimple
294 maybe_get_dominating_check (auto_vec<gimple> &v)
295 {
296 for (; !v.is_empty (); v.pop ())
297 {
298 gimple g = v.last ();
299 sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
300 if (!si->visited_p)
301 /* At this point we shouldn't have any statements
302 that aren't dominating the current BB. */
303 return g;
304 }
305 return NULL;
306 }
307
308 /* Optimize away redundant UBSAN_NULL calls. */
309
310 static bool
311 maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt)
312 {
313 gcc_assert (gimple_call_num_args (stmt) == 3);
314 tree ptr = gimple_call_arg (stmt, 0);
315 tree cur_align = gimple_call_arg (stmt, 2);
316 gcc_assert (TREE_CODE (cur_align) == INTEGER_CST);
317 bool remove = false;
318
319 auto_vec<gimple> &v = ctx->null_check_map.get_or_insert (ptr);
320 gimple g = maybe_get_dominating_check (v);
321 if (!g)
322 {
323 /* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's
324 nothing to optimize yet. */
325 v.safe_push (stmt);
326 return false;
327 }
328
329 /* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we
330 can drop this one. But only if this check doesn't specify stricter
331 alignment. */
332
333 tree align = gimple_call_arg (g, 2);
334 int kind = tree_to_shwi (gimple_call_arg (g, 1));
335 /* If this is a NULL pointer check where we had segv anyway, we can
336 remove it. */
337 if (integer_zerop (align)
338 && (kind == UBSAN_LOAD_OF
339 || kind == UBSAN_STORE_OF
340 || kind == UBSAN_MEMBER_ACCESS))
341 remove = true;
342 /* Otherwise remove the check in non-recovering mode, or if the
343 stmts have same location. */
344 else if (integer_zerop (align))
345 remove = (flag_sanitize_recover & SANITIZE_NULL) == 0
346 || flag_sanitize_undefined_trap_on_error
347 || gimple_location (g) == gimple_location (stmt);
348 else if (tree_int_cst_le (cur_align, align))
349 remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0
350 || flag_sanitize_undefined_trap_on_error
351 || gimple_location (g) == gimple_location (stmt);
352
353 if (!remove && gimple_bb (g) == gimple_bb (stmt)
354 && tree_int_cst_compare (cur_align, align) == 0)
355 v.pop ();
356
357 if (!remove)
358 v.safe_push (stmt);
359 return remove;
360 }
361
362 /* Optimize away redundant UBSAN_VPTR calls. The second argument
363 is the value loaded from the virtual table, so rely on FRE to find out
364 when we can actually optimize. */
365
366 static bool
367 maybe_optimize_ubsan_vptr_ifn (struct sanopt_ctx *ctx, gimple stmt)
368 {
369 gcc_assert (gimple_call_num_args (stmt) == 5);
370 sanopt_tree_triplet triplet;
371 triplet.t1 = gimple_call_arg (stmt, 0);
372 triplet.t2 = gimple_call_arg (stmt, 1);
373 triplet.t3 = gimple_call_arg (stmt, 3);
374
375 auto_vec<gimple> &v = ctx->vptr_check_map.get_or_insert (triplet);
376 gimple g = maybe_get_dominating_check (v);
377 if (!g)
378 {
379 /* For this PTR we don't have any UBSAN_VPTR stmts recorded, so there's
380 nothing to optimize yet. */
381 v.safe_push (stmt);
382 return false;
383 }
384
385 return true;
386 }
387
388 /* Returns TRUE if ASan check of length LEN in block BB can be removed
389 if preceded by checks in V. */
390
391 static bool
392 can_remove_asan_check (auto_vec<gimple> &v, tree len, basic_block bb)
393 {
394 unsigned int i;
395 gimple g;
396 gimple to_pop = NULL;
397 bool remove = false;
398 basic_block last_bb = bb;
399 bool cleanup = false;
400
401 FOR_EACH_VEC_ELT_REVERSE (v, i, g)
402 {
403 basic_block gbb = gimple_bb (g);
404 sanopt_info *si = (sanopt_info *) gbb->aux;
405 if (gimple_uid (g) < si->freeing_call_events)
406 {
407 /* If there is a potentially freeing call after g in gbb, we should
408 remove it from the vector, can't use in optimization. */
409 cleanup = true;
410 continue;
411 }
412
413 tree glen = gimple_call_arg (g, 2);
414 gcc_assert (TREE_CODE (glen) == INTEGER_CST);
415
416 /* If we've checked only smaller length than we want to check now,
417 we can't remove the current stmt. If g is in the same basic block,
418 we want to remove it though, as the current stmt is better. */
419 if (tree_int_cst_lt (glen, len))
420 {
421 if (gbb == bb)
422 {
423 to_pop = g;
424 cleanup = true;
425 }
426 continue;
427 }
428
429 while (last_bb != gbb)
430 {
431 /* Paths from last_bb to bb have been checked before.
432 gbb is necessarily a dominator of last_bb, but not necessarily
433 immediate dominator. */
434 if (((sanopt_info *) last_bb->aux)->freeing_call_events)
435 break;
436
437 basic_block imm = get_immediate_dominator (CDI_DOMINATORS, last_bb);
438 gcc_assert (imm);
439 if (imm_dom_path_with_freeing_call (last_bb, imm))
440 break;
441
442 last_bb = imm;
443 }
444 if (last_bb == gbb)
445 remove = true;
446 break;
447 }
448
449 if (cleanup)
450 {
451 unsigned int j = 0, l = v.length ();
452 for (i = 0; i < l; i++)
453 if (v[i] != to_pop
454 && (gimple_uid (v[i])
455 == ((sanopt_info *)
456 gimple_bb (v[i])->aux)->freeing_call_events))
457 {
458 if (i != j)
459 v[j] = v[i];
460 j++;
461 }
462 v.truncate (j);
463 }
464
465 return remove;
466 }
467
468 /* Optimize away redundant ASAN_CHECK calls. */
469
470 static bool
471 maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt)
472 {
473 gcc_assert (gimple_call_num_args (stmt) == 4);
474 tree ptr = gimple_call_arg (stmt, 1);
475 tree len = gimple_call_arg (stmt, 2);
476 basic_block bb = gimple_bb (stmt);
477 sanopt_info *info = (sanopt_info *) bb->aux;
478
479 if (TREE_CODE (len) != INTEGER_CST)
480 return false;
481 if (integer_zerop (len))
482 return false;
483
484 gimple_set_uid (stmt, info->freeing_call_events);
485
486 auto_vec<gimple> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
487
488 tree base_addr = maybe_get_single_definition (ptr);
489 auto_vec<gimple> *base_checks = NULL;
490 if (base_addr)
491 {
492 base_checks = &ctx->asan_check_map.get_or_insert (base_addr);
493 /* Original pointer might have been invalidated. */
494 ptr_checks = ctx->asan_check_map.get (ptr);
495 }
496
497 gimple g = maybe_get_dominating_check (*ptr_checks);
498 gimple g2 = NULL;
499
500 if (base_checks)
501 /* Try with base address as well. */
502 g2 = maybe_get_dominating_check (*base_checks);
503
504 if (g == NULL && g2 == NULL)
505 {
506 /* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's
507 nothing to optimize yet. */
508 ptr_checks->safe_push (stmt);
509 if (base_checks)
510 base_checks->safe_push (stmt);
511 return false;
512 }
513
514 bool remove = false;
515
516 if (ptr_checks)
517 remove = can_remove_asan_check (*ptr_checks, len, bb);
518
519 if (!remove && base_checks)
520 /* Try with base address as well. */
521 remove = can_remove_asan_check (*base_checks, len, bb);
522
523 if (!remove)
524 {
525 ptr_checks->safe_push (stmt);
526 if (base_checks)
527 base_checks->safe_push (stmt);
528 }
529
530 return remove;
531 }
532
533 /* Try to optimize away redundant UBSAN_NULL and ASAN_CHECK calls.
534
535 We walk blocks in the CFG via a depth first search of the dominator
536 tree; we push unique UBSAN_NULL or ASAN_CHECK statements into a vector
537 in the NULL_CHECK_MAP or ASAN_CHECK_MAP hash maps as we enter the
538 blocks. When leaving a block, we mark the block as visited; then
539 when checking the statements in the vector, we ignore statements that
540 are coming from already visited blocks, because these cannot dominate
541 anything anymore. CTX is a sanopt context. */
542
543 static void
544 sanopt_optimize_walker (basic_block bb, struct sanopt_ctx *ctx)
545 {
546 basic_block son;
547 gimple_stmt_iterator gsi;
548 sanopt_info *info = (sanopt_info *) bb->aux;
549 bool asan_check_optimize = (flag_sanitize & SANITIZE_ADDRESS) != 0;
550
551 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
552 {
553 gimple stmt = gsi_stmt (gsi);
554 bool remove = false;
555
556 if (!is_gimple_call (stmt))
557 {
558 /* Handle asm volatile or asm with "memory" clobber
559 the same as potentionally freeing call. */
560 gasm *asm_stmt = dyn_cast <gasm *> (stmt);
561 if (asm_stmt
562 && asan_check_optimize
563 && (gimple_asm_clobbers_memory_p (asm_stmt)
564 || gimple_asm_volatile_p (asm_stmt)))
565 info->freeing_call_events++;
566 gsi_next (&gsi);
567 continue;
568 }
569
570 if (asan_check_optimize && !nonfreeing_call_p (stmt))
571 info->freeing_call_events++;
572
573 if (gimple_call_internal_p (stmt))
574 switch (gimple_call_internal_fn (stmt))
575 {
576 case IFN_UBSAN_NULL:
577 remove = maybe_optimize_ubsan_null_ifn (ctx, stmt);
578 break;
579 case IFN_UBSAN_VPTR:
580 remove = maybe_optimize_ubsan_vptr_ifn (ctx, stmt);
581 break;
582 case IFN_ASAN_CHECK:
583 if (asan_check_optimize)
584 remove = maybe_optimize_asan_check_ifn (ctx, stmt);
585 if (!remove)
586 ctx->asan_num_accesses++;
587 break;
588 default:
589 break;
590 }
591
592 if (remove)
593 {
594 /* Drop this check. */
595 if (dump_file && (dump_flags & TDF_DETAILS))
596 {
597 fprintf (dump_file, "Optimizing out\n ");
598 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
599 fprintf (dump_file, "\n");
600 }
601 unlink_stmt_vdef (stmt);
602 gsi_remove (&gsi, true);
603 }
604 else
605 gsi_next (&gsi);
606 }
607
608 if (asan_check_optimize)
609 {
610 info->has_freeing_call_p = info->freeing_call_events != 0;
611 info->has_freeing_call_computed_p = true;
612 }
613
614 for (son = first_dom_son (CDI_DOMINATORS, bb);
615 son;
616 son = next_dom_son (CDI_DOMINATORS, son))
617 sanopt_optimize_walker (son, ctx);
618
619 /* We're leaving this BB, so mark it to that effect. */
620 info->visited_p = true;
621 }
622
623 /* Try to remove redundant sanitizer checks in function FUN. */
624
625 static int
626 sanopt_optimize (function *fun)
627 {
628 struct sanopt_ctx ctx;
629 ctx.asan_num_accesses = 0;
630
631 /* Set up block info for each basic block. */
632 alloc_aux_for_blocks (sizeof (sanopt_info));
633
634 /* We're going to do a dominator walk, so ensure that we have
635 dominance information. */
636 calculate_dominance_info (CDI_DOMINATORS);
637
638 /* Recursively walk the dominator tree optimizing away
639 redundant checks. */
640 sanopt_optimize_walker (ENTRY_BLOCK_PTR_FOR_FN (fun), &ctx);
641
642 free_aux_for_blocks ();
643
644 return ctx.asan_num_accesses;
645 }
646
647 /* Perform optimization of sanitize functions. */
648
649 namespace {
650
651 const pass_data pass_data_sanopt =
652 {
653 GIMPLE_PASS, /* type */
654 "sanopt", /* name */
655 OPTGROUP_NONE, /* optinfo_flags */
656 TV_NONE, /* tv_id */
657 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
658 0, /* properties_provided */
659 0, /* properties_destroyed */
660 0, /* todo_flags_start */
661 TODO_update_ssa, /* todo_flags_finish */
662 };
663
664 class pass_sanopt : public gimple_opt_pass
665 {
666 public:
667 pass_sanopt (gcc::context *ctxt)
668 : gimple_opt_pass (pass_data_sanopt, ctxt)
669 {}
670
671 /* opt_pass methods: */
672 virtual bool gate (function *) { return flag_sanitize; }
673 virtual unsigned int execute (function *);
674
675 }; // class pass_sanopt
676
677 unsigned int
678 pass_sanopt::execute (function *fun)
679 {
680 basic_block bb;
681 int asan_num_accesses = 0;
682
683 /* Try to remove redundant checks. */
684 if (optimize
685 && (flag_sanitize
686 & (SANITIZE_NULL | SANITIZE_ALIGNMENT
687 | SANITIZE_ADDRESS | SANITIZE_VPTR)))
688 asan_num_accesses = sanopt_optimize (fun);
689 else if (flag_sanitize & SANITIZE_ADDRESS)
690 {
691 gimple_stmt_iterator gsi;
692 FOR_EACH_BB_FN (bb, fun)
693 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
694 {
695 gimple stmt = gsi_stmt (gsi);
696 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt)
697 && gimple_call_internal_fn (stmt) == IFN_ASAN_CHECK)
698 ++asan_num_accesses;
699 }
700 }
701
702 bool use_calls = ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD < INT_MAX
703 && asan_num_accesses >= ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD;
704
705 FOR_EACH_BB_FN (bb, fun)
706 {
707 gimple_stmt_iterator gsi;
708 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
709 {
710 gimple stmt = gsi_stmt (gsi);
711 bool no_next = false;
712
713 if (!is_gimple_call (stmt))
714 {
715 gsi_next (&gsi);
716 continue;
717 }
718
719 if (gimple_call_internal_p (stmt))
720 {
721 enum internal_fn ifn = gimple_call_internal_fn (stmt);
722 switch (ifn)
723 {
724 case IFN_UBSAN_NULL:
725 no_next = ubsan_expand_null_ifn (&gsi);
726 break;
727 case IFN_UBSAN_BOUNDS:
728 no_next = ubsan_expand_bounds_ifn (&gsi);
729 break;
730 case IFN_UBSAN_OBJECT_SIZE:
731 no_next = ubsan_expand_objsize_ifn (&gsi);
732 break;
733 case IFN_UBSAN_VPTR:
734 no_next = ubsan_expand_vptr_ifn (&gsi);
735 break;
736 case IFN_ASAN_CHECK:
737 no_next = asan_expand_check_ifn (&gsi, use_calls);
738 break;
739 default:
740 break;
741 }
742 }
743 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
744 {
745 tree callee = gimple_call_fndecl (stmt);
746 switch (DECL_FUNCTION_CODE (callee))
747 {
748 case BUILT_IN_UNREACHABLE:
749 if (flag_sanitize & SANITIZE_UNREACHABLE
750 && !lookup_attribute ("no_sanitize_undefined",
751 DECL_ATTRIBUTES (fun->decl)))
752 no_next = ubsan_instrument_unreachable (&gsi);
753 break;
754 default:
755 break;
756 }
757 }
758
759 if (dump_file && (dump_flags & TDF_DETAILS))
760 {
761 fprintf (dump_file, "Expanded\n ");
762 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
763 fprintf (dump_file, "\n");
764 }
765
766 if (!no_next)
767 gsi_next (&gsi);
768 }
769 }
770 return 0;
771 }
772
773 } // anon namespace
774
775 gimple_opt_pass *
776 make_pass_sanopt (gcc::context *ctxt)
777 {
778 return new pass_sanopt (ctxt);
779 }