cgraph.c (cgraph_create_indirect_edge): Discover polymorphic calls and record basic...
[gcc.git] / gcc / tree-mudflap.c
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 Contributed by Frank Ch. Eigler <fche@redhat.com>
4 and Graydon Hoare <graydon@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "tree-iterator.h"
35 #include "tree-flow.h"
36 #include "tree-mudflap.h"
37 #include "tree-pass.h"
38 #include "hashtab.h"
39 #include "diagnostic.h"
40 #include "demangle.h"
41 #include "langhooks.h"
42 #include "ggc.h"
43 #include "cgraph.h"
44 #include "gimple.h"
45
46 extern void add_bb_to_loop (basic_block, struct loop *);
47
48 /* Internal function decls */
49
50
51 /* Options. */
52 #define flag_mudflap_threads (flag_mudflap == 2)
53
54 /* Helpers. */
55 static tree mf_build_string (const char *string);
56 static tree mf_varname_tree (tree);
57 static tree mf_file_function_line_tree (location_t);
58
59 /* Indirection-related instrumentation. */
60 static void mf_decl_cache_locals (void);
61 static void mf_decl_clear_locals (void);
62 static void mf_xform_statements (void);
63 static unsigned int execute_mudflap_function_ops (void);
64
65 /* Addressable variables instrumentation. */
66 static void mf_xform_decls (gimple_seq, tree);
67 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
68 struct walk_stmt_info *);
69 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
70 static unsigned int execute_mudflap_function_decls (void);
71
72 /* Return true if DECL is artificial stub that shouldn't be instrumented by
73 mf. We should instrument clones of non-artificial functions. */
74 static inline bool
75 mf_artificial (const_tree decl)
76 {
77 return DECL_ARTIFICIAL (DECL_ORIGIN (decl));
78 }
79
80 /* ------------------------------------------------------------------------ */
81 /* Some generally helpful functions for mudflap instrumentation. */
82
83 /* Build a reference to a literal string. */
84 static tree
85 mf_build_string (const char *string)
86 {
87 size_t len = strlen (string);
88 tree result = mf_mark (build_string (len + 1, string));
89
90 TREE_TYPE (result) = build_array_type
91 (char_type_node, build_index_type (size_int (len)));
92 TREE_CONSTANT (result) = 1;
93 TREE_READONLY (result) = 1;
94 TREE_STATIC (result) = 1;
95
96 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
97
98 return mf_mark (result);
99 }
100
101 /* Create a properly typed STRING_CST node that describes the given
102 declaration. It will be used as an argument for __mf_register().
103 Try to construct a helpful string, including file/function/variable
104 name. */
105
106 static tree
107 mf_varname_tree (tree decl)
108 {
109 const char *buf_contents;
110 tree result;
111
112 gcc_assert (decl);
113
114 pretty_printer buf;
115 pp_construct (&buf, /* prefix */ NULL, /* line-width */ 0);
116 pp_clear_output_area (&buf);
117
118 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
119 {
120 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
121 const char *sourcefile;
122 unsigned sourceline = xloc.line;
123 unsigned sourcecolumn = 0;
124 sourcecolumn = xloc.column;
125 sourcefile = xloc.file;
126 if (sourcefile == NULL && current_function_decl != NULL_TREE)
127 sourcefile = DECL_SOURCE_FILE (current_function_decl);
128 if (sourcefile == NULL)
129 sourcefile = "<unknown file>";
130
131 pp_string (&buf, sourcefile);
132
133 if (sourceline != 0)
134 {
135 pp_colon (&buf);
136 pp_decimal_int (&buf, sourceline);
137
138 if (sourcecolumn != 0)
139 {
140 pp_colon (&buf);
141 pp_decimal_int (&buf, sourcecolumn);
142 }
143 }
144 }
145
146 if (current_function_decl != NULL_TREE)
147 {
148 /* Add (FUNCTION) */
149 pp_string (&buf, " (");
150 {
151 const char *funcname = NULL;
152 if (DECL_NAME (current_function_decl))
153 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
154 if (funcname == NULL)
155 funcname = "anonymous fn";
156
157 pp_string (&buf, funcname);
158 }
159 pp_string (&buf, ") ");
160 }
161 else
162 pp_space (&buf);
163
164 /* Add <variable-declaration>, possibly demangled. */
165 {
166 const char *declname = NULL;
167
168 if (DECL_NAME (decl) != NULL)
169 {
170 if (strcmp ("GNU C++", lang_hooks.name) == 0)
171 {
172 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
173 the libiberty demangler. */
174 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
175 DMGL_AUTO | DMGL_VERBOSE);
176 }
177 if (declname == NULL)
178 declname = lang_hooks.decl_printable_name (decl, 3);
179 }
180 if (declname == NULL)
181 declname = "<unnamed variable>";
182
183 pp_string (&buf, declname);
184 }
185
186 /* Return the lot as a new STRING_CST. */
187 buf_contents = ggc_strdup (pp_formatted_text (&buf));
188 result = mf_build_string (buf_contents);
189 pp_clear_output_area (&buf);
190
191 return result;
192 }
193
194
195 /* And another friend, for producing a simpler message. */
196
197 static tree
198 mf_file_function_line_tree (location_t location)
199 {
200 expanded_location xloc = expand_location (location);
201 const char *file = NULL, *colon, *line, *op, *name, *cp;
202 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
203 char *string;
204 tree result;
205
206 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
207 file = xloc.file;
208 if (file == NULL && current_function_decl != NULL_TREE)
209 file = DECL_SOURCE_FILE (current_function_decl);
210 if (file == NULL)
211 file = "<unknown file>";
212
213 if (xloc.line > 0)
214 {
215 if (xloc.column > 0)
216 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
217 else
218 sprintf (linecolbuf, "%d", xloc.line);
219 colon = ":";
220 line = linecolbuf;
221 }
222 else
223 colon = line = "";
224
225 /* Add (FUNCTION). */
226 name = lang_hooks.decl_printable_name (current_function_decl, 1);
227 if (name)
228 {
229 op = " (";
230 cp = ")";
231 }
232 else
233 op = name = cp = "";
234
235 string = concat (file, colon, line, op, name, cp, NULL);
236 result = mf_build_string (string);
237 free (string);
238
239 return result;
240 }
241
242
243 /* global tree nodes */
244
245 /* Global tree objects for global variables and functions exported by
246 mudflap runtime library. mf_init_extern_trees must be called
247 before using these. */
248
249 /* uintptr_t (usually "unsigned long") */
250 static GTY (()) tree mf_uintptr_type;
251
252 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
253 static GTY (()) tree mf_cache_struct_type;
254
255 /* struct __mf_cache * const */
256 static GTY (()) tree mf_cache_structptr_type;
257
258 /* extern struct __mf_cache __mf_lookup_cache []; */
259 static GTY (()) tree mf_cache_array_decl;
260
261 /* extern unsigned char __mf_lc_shift; */
262 static GTY (()) tree mf_cache_shift_decl;
263
264 /* extern uintptr_t __mf_lc_mask; */
265 static GTY (()) tree mf_cache_mask_decl;
266
267 /* Their function-scope local shadows, used in single-threaded mode only. */
268
269 /* auto const unsigned char __mf_lc_shift_l; */
270 static GTY (()) tree mf_cache_shift_decl_l;
271
272 /* auto const uintptr_t __mf_lc_mask_l; */
273 static GTY (()) tree mf_cache_mask_decl_l;
274
275 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
276 static GTY (()) tree mf_check_fndecl;
277
278 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
279 static GTY (()) tree mf_register_fndecl;
280
281 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
282 static GTY (()) tree mf_unregister_fndecl;
283
284 /* extern void __mf_init (); */
285 static GTY (()) tree mf_init_fndecl;
286
287 /* extern int __mf_set_options (const char*); */
288 static GTY (()) tree mf_set_options_fndecl;
289
290
291 /* Helper for mudflap_init: construct a decl with the given category,
292 name, and type, mark it an external reference, and pushdecl it. */
293 static inline tree
294 mf_make_builtin (enum tree_code category, const char *name, tree type)
295 {
296 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
297 category, get_identifier (name), type));
298 TREE_PUBLIC (decl) = 1;
299 DECL_EXTERNAL (decl) = 1;
300 lang_hooks.decls.pushdecl (decl);
301 /* The decl was declared by the compiler. */
302 DECL_ARTIFICIAL (decl) = 1;
303 /* And we don't want debug info for it. */
304 DECL_IGNORED_P (decl) = 1;
305 return decl;
306 }
307
308 /* Helper for mudflap_init: construct a tree corresponding to the type
309 struct __mf_cache { uintptr_t low; uintptr_t high; };
310 where uintptr_t is the FIELD_TYPE argument. */
311 static inline tree
312 mf_make_mf_cache_struct_type (tree field_type)
313 {
314 /* There is, abominably, no language-independent way to construct a
315 RECORD_TYPE. So we have to call the basic type construction
316 primitives by hand. */
317 tree fieldlo = build_decl (UNKNOWN_LOCATION,
318 FIELD_DECL, get_identifier ("low"), field_type);
319 tree fieldhi = build_decl (UNKNOWN_LOCATION,
320 FIELD_DECL, get_identifier ("high"), field_type);
321
322 tree struct_type = make_node (RECORD_TYPE);
323 DECL_CONTEXT (fieldlo) = struct_type;
324 DECL_CONTEXT (fieldhi) = struct_type;
325 DECL_CHAIN (fieldlo) = fieldhi;
326 TYPE_FIELDS (struct_type) = fieldlo;
327 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
328 layout_type (struct_type);
329
330 return struct_type;
331 }
332
333 /* Initialize the global tree nodes that correspond to mf-runtime.h
334 declarations. */
335 void
336 mudflap_init (void)
337 {
338 static bool done = false;
339 tree mf_const_string_type;
340 tree mf_cache_array_type;
341 tree mf_check_register_fntype;
342 tree mf_unregister_fntype;
343 tree mf_init_fntype;
344 tree mf_set_options_fntype;
345
346 if (done)
347 return;
348 done = true;
349
350 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
351 /*unsignedp=*/true);
352 mf_const_string_type
353 = build_pointer_type (build_qualified_type
354 (char_type_node, TYPE_QUAL_CONST));
355
356 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
357 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
358 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
359 mf_check_register_fntype =
360 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
361 integer_type_node, mf_const_string_type, NULL_TREE);
362 mf_unregister_fntype =
363 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
364 integer_type_node, NULL_TREE);
365 mf_init_fntype =
366 build_function_type_list (void_type_node, NULL_TREE);
367 mf_set_options_fntype =
368 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
369
370 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
371 mf_cache_array_type);
372 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
373 unsigned_char_type_node);
374 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
375 mf_uintptr_type);
376 /* Don't process these in mudflap_enqueue_decl, should they come by
377 there for some reason. */
378 mf_mark (mf_cache_array_decl);
379 mf_mark (mf_cache_shift_decl);
380 mf_mark (mf_cache_mask_decl);
381 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
382 mf_check_register_fntype);
383 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
384 mf_check_register_fntype);
385 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
386 mf_unregister_fntype);
387 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
388 mf_init_fntype);
389 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
390 mf_set_options_fntype);
391 }
392
393
394 /* ------------------------------------------------------------------------ */
395 /* This is the second part of the mudflap instrumentation. It works on
396 low-level GIMPLE using the CFG, because we want to run this pass after
397 tree optimizations have been performed, but we have to preserve the CFG
398 for expansion from trees to RTL.
399 Below is the list of transformations performed on statements in the
400 current function.
401
402 1) Memory reference transforms: Perform the mudflap indirection-related
403 tree transforms on memory references.
404
405 2) Mark BUILTIN_ALLOCA calls not inlineable.
406
407 */
408
409 static unsigned int
410 execute_mudflap_function_ops (void)
411 {
412 struct gimplify_ctx gctx;
413
414 /* Don't instrument functions such as the synthetic constructor
415 built during mudflap_finish_file. */
416 if (mf_marked_p (current_function_decl)
417 || mf_artificial (current_function_decl))
418 return 0;
419
420 push_gimplify_context (&gctx);
421
422 /* In multithreaded mode, don't cache the lookup cache parameters. */
423 if (! flag_mudflap_threads)
424 mf_decl_cache_locals ();
425
426 mf_xform_statements ();
427
428 if (! flag_mudflap_threads)
429 mf_decl_clear_locals ();
430
431 pop_gimplify_context (NULL);
432 return 0;
433 }
434
435 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
436 if BB has more than one edge, STMT will be replicated for each edge.
437 Also, abnormal edges will be ignored. */
438
439 static void
440 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
441 {
442 edge e;
443 edge_iterator ei;
444 unsigned n_copies = -1;
445
446 FOR_EACH_EDGE (e, ei, bb->succs)
447 if (!(e->flags & EDGE_ABNORMAL))
448 n_copies++;
449
450 FOR_EACH_EDGE (e, ei, bb->succs)
451 if (!(e->flags & EDGE_ABNORMAL))
452 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
453 }
454
455 /* Create and initialize local shadow variables for the lookup cache
456 globals. Put their decls in the *_l globals for use by
457 mf_build_check_statement_for. */
458
459 static void
460 mf_decl_cache_locals (void)
461 {
462 gimple g;
463 gimple_seq seq = NULL;
464
465 /* Build the cache vars. */
466 mf_cache_shift_decl_l
467 = mf_mark (create_tmp_reg (TREE_TYPE (mf_cache_shift_decl),
468 "__mf_lookup_shift_l"));
469
470 mf_cache_mask_decl_l
471 = mf_mark (create_tmp_reg (TREE_TYPE (mf_cache_mask_decl),
472 "__mf_lookup_mask_l"));
473
474 /* Build initialization nodes for the cache vars. We just load the
475 globals into the cache variables. */
476 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
477 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
478 gimple_seq_add_stmt (&seq, g);
479
480 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
481 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
482 gimple_seq_add_stmt (&seq, g);
483
484 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
485
486 gsi_commit_edge_inserts ();
487 }
488
489
490 static void
491 mf_decl_clear_locals (void)
492 {
493 /* Unset local shadows. */
494 mf_cache_shift_decl_l = NULL_TREE;
495 mf_cache_mask_decl_l = NULL_TREE;
496 }
497
498 static void
499 mf_build_check_statement_for (tree base, tree limit,
500 gimple_stmt_iterator *instr_gsi,
501 location_t location, tree dirflag)
502 {
503 gimple_stmt_iterator gsi;
504 basic_block cond_bb, then_bb, join_bb;
505 edge e;
506 tree cond, t, u, v;
507 tree mf_base;
508 tree mf_elem;
509 tree mf_limit;
510 gimple g;
511 gimple_seq seq, stmts;
512
513 /* We first need to split the current basic block, and start altering
514 the CFG. This allows us to insert the statements we're about to
515 construct into the right basic blocks. */
516
517 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
518 gsi = *instr_gsi;
519 gsi_prev (&gsi);
520 if (! gsi_end_p (gsi))
521 e = split_block (cond_bb, gsi_stmt (gsi));
522 else
523 e = split_block_after_labels (cond_bb);
524 cond_bb = e->src;
525 join_bb = e->dest;
526
527 /* A recap at this point: join_bb is the basic block at whose head
528 is the gimple statement for which this check expression is being
529 built. cond_bb is the (possibly new, synthetic) basic block the
530 end of which will contain the cache-lookup code, and a
531 conditional that jumps to the cache-miss code or, much more
532 likely, over to join_bb. */
533
534 /* Create the bb that contains the cache-miss fallback block (mf_check). */
535 then_bb = create_empty_bb (cond_bb);
536 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
537 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
538
539 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
540 e = find_edge (cond_bb, join_bb);
541 e->flags = EDGE_FALSE_VALUE;
542 e->count = cond_bb->count;
543 e->probability = REG_BR_PROB_BASE;
544
545 /* Update dominance info. Note that bb_join's data was
546 updated by split_block. */
547 if (dom_info_available_p (CDI_DOMINATORS))
548 {
549 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
550 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
551 }
552
553 /* Update loop info. */
554 if (current_loops)
555 add_bb_to_loop (then_bb, cond_bb->loop_father);
556
557 /* Build our local variables. */
558 mf_elem = create_tmp_reg (mf_cache_structptr_type, "__mf_elem");
559 mf_base = create_tmp_reg (mf_uintptr_type, "__mf_base");
560 mf_limit = create_tmp_reg (mf_uintptr_type, "__mf_limit");
561
562 /* Build: __mf_base = (uintptr_t) <base address expression>. */
563 seq = NULL;
564 t = fold_convert_loc (location, mf_uintptr_type,
565 unshare_expr (base));
566 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
567 gimple_seq_add_seq (&seq, stmts);
568 g = gimple_build_assign (mf_base, t);
569 gimple_set_location (g, location);
570 gimple_seq_add_stmt (&seq, g);
571
572 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
573 t = fold_convert_loc (location, mf_uintptr_type,
574 unshare_expr (limit));
575 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
576 gimple_seq_add_seq (&seq, stmts);
577 g = gimple_build_assign (mf_limit, t);
578 gimple_set_location (g, location);
579 gimple_seq_add_stmt (&seq, g);
580
581 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
582 & __mf_mask]. */
583 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
584 flag_mudflap_threads ? mf_cache_shift_decl
585 : mf_cache_shift_decl_l);
586 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
587 flag_mudflap_threads ? mf_cache_mask_decl
588 : mf_cache_mask_decl_l);
589 t = build4 (ARRAY_REF,
590 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
591 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
592 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
593 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
594 gimple_seq_add_seq (&seq, stmts);
595 g = gimple_build_assign (mf_elem, t);
596 gimple_set_location (g, location);
597 gimple_seq_add_stmt (&seq, g);
598
599 /* Quick validity check.
600
601 if (__mf_elem->low > __mf_base
602 || (__mf_elem_high < __mf_limit))
603 {
604 __mf_check ();
605 ... and only if single-threaded:
606 __mf_lookup_shift_1 = f...;
607 __mf_lookup_mask_l = ...;
608 }
609
610 It is expected that this body of code is rarely executed so we mark
611 the edge to the THEN clause of the conditional jump as unlikely. */
612
613 /* Construct t <-- '__mf_elem->low > __mf_base'. */
614 t = build3 (COMPONENT_REF, mf_uintptr_type,
615 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
616 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
617 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
618
619 /* Construct '__mf_elem->high < __mf_limit'.
620
621 First build:
622 1) u <-- '__mf_elem->high'
623 2) v <-- '__mf_limit'.
624
625 Then build 'u <-- (u < v). */
626
627 u = build3 (COMPONENT_REF, mf_uintptr_type,
628 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
629 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
630
631 v = mf_limit;
632
633 u = build2 (LT_EXPR, boolean_type_node, u, v);
634
635 /* Build the composed conditional: t <-- 't || u'. Then store the
636 result of the evaluation of 't' in a temporary variable which we
637 can use as the condition for the conditional jump. */
638 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
639 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
640 gimple_seq_add_seq (&seq, stmts);
641 cond = create_tmp_reg (boolean_type_node, "__mf_unlikely_cond");
642 g = gimple_build_assign (cond, t);
643 gimple_set_location (g, location);
644 gimple_seq_add_stmt (&seq, g);
645
646 /* Build the conditional jump. 'cond' is just a temporary so we can
647 simply build a void COND_EXPR. We do need labels in both arms though. */
648 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
649 NULL_TREE);
650 gimple_set_location (g, location);
651 gimple_seq_add_stmt (&seq, g);
652
653 /* At this point, after so much hard work, we have only constructed
654 the conditional jump,
655
656 if (__mf_elem->low > __mf_base
657 || (__mf_elem_high < __mf_limit))
658
659 The lowered GIMPLE tree representing this code is in the statement
660 list starting at 'head'.
661
662 We can insert this now in the current basic block, i.e. the one that
663 the statement we're instrumenting was originally in. */
664 gsi = gsi_last_bb (cond_bb);
665 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
666
667 /* Now build up the body of the cache-miss handling:
668
669 __mf_check();
670 refresh *_l vars.
671
672 This is the body of the conditional. */
673
674 seq = NULL;
675 /* u is a string, so it is already a gimple value. */
676 u = mf_file_function_line_tree (location);
677 /* NB: we pass the overall [base..limit] range to mf_check. */
678 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
679 fold_build2_loc (location,
680 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
681 build_int_cst (mf_uintptr_type, 1));
682 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
683 gimple_seq_add_seq (&seq, stmts);
684 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
685 gimple_seq_add_stmt (&seq, g);
686
687 if (! flag_mudflap_threads)
688 {
689 if (stmt_ends_bb_p (g))
690 {
691 gsi = gsi_start_bb (then_bb);
692 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
693 e = split_block (then_bb, g);
694 then_bb = e->dest;
695 seq = NULL;
696 }
697
698 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
699 gimple_seq_add_stmt (&seq, g);
700
701 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
702 gimple_seq_add_stmt (&seq, g);
703 }
704
705 /* Insert the check code in the THEN block. */
706 gsi = gsi_start_bb (then_bb);
707 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
708
709 *instr_gsi = gsi_start_bb (join_bb);
710 }
711
712
713 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
714 eligible for instrumentation. For the mudflap1 pass, this implies
715 that it should be registered with the libmudflap runtime. For the
716 mudflap2 pass this means instrumenting an indirection operation with
717 respect to the object.
718 */
719 static int
720 mf_decl_eligible_p (tree decl)
721 {
722 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
723 /* The decl must have its address taken. In the case of
724 arrays, this flag is also set if the indexes are not
725 compile-time known valid constants. */
726 /* XXX: not sufficient: return-by-value structs! */
727 && TREE_ADDRESSABLE (decl)
728 /* The type of the variable must be complete. */
729 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
730 /* The decl hasn't been decomposed somehow. */
731 && !DECL_HAS_VALUE_EXPR_P (decl));
732 }
733
734
735 static void
736 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
737 location_t location, tree dirflag)
738 {
739 tree type, base, limit, addr, size, t;
740
741 /* Don't instrument read operations. */
742 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
743 return;
744
745 /* Don't instrument marked nodes. */
746 if (mf_marked_p (*tp))
747 return;
748
749 t = *tp;
750 type = TREE_TYPE (t);
751
752 if (type == error_mark_node)
753 return;
754
755 size = TYPE_SIZE_UNIT (type);
756
757 switch (TREE_CODE (t))
758 {
759 case ARRAY_REF:
760 case COMPONENT_REF:
761 {
762 /* This is trickier than it may first appear. The reason is
763 that we are looking at expressions from the "inside out" at
764 this point. We may have a complex nested aggregate/array
765 expression (e.g. "a.b[i].c"), maybe with an indirection as
766 the leftmost operator ("p->a.b.d"), where instrumentation
767 is necessary. Or we may have an innocent "a.b.c"
768 expression that must not be instrumented. We need to
769 recurse all the way down the nesting structure to figure it
770 out: looking just at the outer node is not enough. */
771 tree var;
772 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
773 /* If we have a bitfield component reference, we must note the
774 innermost addressable object in ELT, from which we will
775 construct the byte-addressable bounds of the bitfield. */
776 tree elt = NULL_TREE;
777 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
778 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
779
780 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
781 containment hierarchy to find the outermost VAR_DECL. */
782 var = TREE_OPERAND (t, 0);
783 while (1)
784 {
785 if (bitfield_ref_p && elt == NULL_TREE
786 && (TREE_CODE (var) == ARRAY_REF
787 || TREE_CODE (var) == COMPONENT_REF))
788 elt = var;
789
790 if (TREE_CODE (var) == ARRAY_REF)
791 {
792 component_ref_only = 0;
793 var = TREE_OPERAND (var, 0);
794 }
795 else if (TREE_CODE (var) == COMPONENT_REF)
796 var = TREE_OPERAND (var, 0);
797 else if (INDIRECT_REF_P (var)
798 || TREE_CODE (var) == MEM_REF)
799 {
800 base = TREE_OPERAND (var, 0);
801 break;
802 }
803 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
804 {
805 var = TREE_OPERAND (var, 0);
806 if (CONSTANT_CLASS_P (var)
807 && TREE_CODE (var) != STRING_CST)
808 return;
809 }
810 else
811 {
812 gcc_assert (TREE_CODE (var) == VAR_DECL
813 || TREE_CODE (var) == PARM_DECL
814 || TREE_CODE (var) == RESULT_DECL
815 || TREE_CODE (var) == STRING_CST);
816 /* Don't instrument this access if the underlying
817 variable is not "eligible". This test matches
818 those arrays that have only known-valid indexes,
819 and thus are not labeled TREE_ADDRESSABLE. */
820 if (! mf_decl_eligible_p (var) || component_ref_only)
821 return;
822 else
823 {
824 base = build1 (ADDR_EXPR,
825 build_pointer_type (TREE_TYPE (var)), var);
826 break;
827 }
828 }
829 }
830
831 /* Handle the case of ordinary non-indirection structure
832 accesses. These have only nested COMPONENT_REF nodes (no
833 INDIRECT_REF), but pass through the above filter loop.
834 Note that it's possible for such a struct variable to match
835 the eligible_p test because someone else might take its
836 address sometime. */
837
838 /* We need special processing for bitfield components, because
839 their addresses cannot be taken. */
840 if (bitfield_ref_p)
841 {
842 tree field = TREE_OPERAND (t, 1);
843
844 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
845 size = DECL_SIZE_UNIT (field);
846
847 if (elt)
848 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
849 elt);
850 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
851 addr = fold_build_pointer_plus_loc (location,
852 addr, byte_position (field));
853 }
854 else
855 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
856
857 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
858 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
859 fold_convert (mf_uintptr_type, addr),
860 size),
861 integer_one_node);
862 }
863 break;
864
865 case INDIRECT_REF:
866 addr = TREE_OPERAND (t, 0);
867 base = addr;
868 limit = fold_build_pointer_plus_hwi_loc
869 (location, fold_build_pointer_plus_loc (location, base, size), -1);
870 break;
871
872 case MEM_REF:
873 if (addr_expr_of_non_mem_decl_p (TREE_OPERAND (t, 0)))
874 return;
875
876 addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
877 TREE_OPERAND (t, 1));
878 base = addr;
879 limit = fold_build_pointer_plus_hwi_loc (location,
880 fold_build_pointer_plus_loc (location,
881 base, size), -1);
882 break;
883
884 case TARGET_MEM_REF:
885 if (addr_expr_of_non_mem_decl_p (TMR_BASE (t)))
886 return;
887
888 addr = tree_mem_ref_addr (ptr_type_node, t);
889 base = addr;
890 limit = fold_build_pointer_plus_hwi_loc (location,
891 fold_build_pointer_plus_loc (location,
892 base, size), -1);
893 break;
894
895 case ARRAY_RANGE_REF:
896 warning (OPT_Wmudflap,
897 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
898 return;
899
900 case BIT_FIELD_REF:
901 /* ??? merge with COMPONENT_REF code above? */
902 {
903 tree ofs, rem, bpu;
904
905 /* If we're not dereferencing something, then the access
906 must be ok. */
907 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
908 return;
909
910 bpu = bitsize_int (BITS_PER_UNIT);
911 ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2));
912 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
913 ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu);
914
915 size = fold_convert (bitsizetype, TREE_OPERAND (t, 1));
916 size = size_binop_loc (location, PLUS_EXPR, size, rem);
917 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
918 size = fold_convert (sizetype, size);
919
920 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
921 addr = fold_convert (ptr_type_node, addr);
922 addr = fold_build_pointer_plus_loc (location, addr, ofs);
923
924 base = addr;
925 limit = fold_build_pointer_plus_hwi_loc (location,
926 fold_build_pointer_plus_loc (location,
927 base, size), -1);
928 }
929 break;
930
931 default:
932 return;
933 }
934
935 mf_build_check_statement_for (base, limit, iter, location, dirflag);
936 }
937 /* Transform
938 1) Memory references.
939 */
940 static void
941 mf_xform_statements (void)
942 {
943 basic_block bb, next;
944 gimple_stmt_iterator i;
945 int saved_last_basic_block = last_basic_block;
946 enum gimple_rhs_class grhs_class;
947
948 bb = ENTRY_BLOCK_PTR ->next_bb;
949 do
950 {
951 next = bb->next_bb;
952 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
953 {
954 gimple s = gsi_stmt (i);
955
956 /* Only a few GIMPLE statements can reference memory. */
957 switch (gimple_code (s))
958 {
959 case GIMPLE_ASSIGN:
960 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
961 gimple_location (s), integer_one_node);
962 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
963 gimple_location (s), integer_zero_node);
964 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
965 if (grhs_class == GIMPLE_BINARY_RHS)
966 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
967 gimple_location (s), integer_zero_node);
968 break;
969
970 case GIMPLE_RETURN:
971 if (gimple_return_retval (s) != NULL_TREE)
972 {
973 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
974 gimple_location (s),
975 integer_zero_node);
976 }
977 break;
978
979 default:
980 ;
981 }
982 }
983 bb = next;
984 }
985 while (bb && bb->index <= saved_last_basic_block);
986 }
987
988 /* ------------------------------------------------------------------------ */
989 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
990 transforms on the current function.
991
992 This is the first part of the mudflap instrumentation. It works on
993 high-level GIMPLE because after lowering, all variables are moved out
994 of their BIND_EXPR binding context, and we lose liveness information
995 for the declarations we wish to instrument. */
996
997 static unsigned int
998 execute_mudflap_function_decls (void)
999 {
1000 struct gimplify_ctx gctx;
1001
1002 /* Don't instrument functions such as the synthetic constructor
1003 built during mudflap_finish_file. */
1004 if (mf_marked_p (current_function_decl)
1005 || mf_artificial (current_function_decl))
1006 return 0;
1007
1008 push_gimplify_context (&gctx);
1009
1010 mf_xform_decls (gimple_body (current_function_decl),
1011 DECL_ARGUMENTS (current_function_decl));
1012
1013 pop_gimplify_context (NULL);
1014 return 0;
1015 }
1016
1017 /* This struct is passed between mf_xform_decls to store state needed
1018 during the traversal searching for objects that have their
1019 addresses taken. */
1020 struct mf_xform_decls_data
1021 {
1022 tree param_decls;
1023 };
1024
1025
1026 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1027 _DECLs if appropriate. Arrange to call the __mf_register function
1028 now, and the __mf_unregister function later for each. Return the
1029 gimple sequence after synthesis. */
1030 gimple_seq
1031 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1032 {
1033 gimple_seq finally_stmts = NULL;
1034 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1035
1036 while (decl != NULL_TREE)
1037 {
1038 if (mf_decl_eligible_p (decl)
1039 /* Not already processed. */
1040 && ! mf_marked_p (decl)
1041 /* Automatic variable. */
1042 && ! DECL_EXTERNAL (decl)
1043 && ! TREE_STATIC (decl))
1044 {
1045 tree size = NULL_TREE, variable_name;
1046 gimple unregister_fncall, register_fncall;
1047 tree unregister_fncall_param, register_fncall_param;
1048
1049 /* Variable-sized objects should have sizes already been
1050 gimplified when we got here. */
1051 size = fold_convert (size_type_node,
1052 TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1053 gcc_assert (is_gimple_val (size));
1054
1055
1056 unregister_fncall_param =
1057 mf_mark (build1 (ADDR_EXPR,
1058 build_pointer_type (TREE_TYPE (decl)),
1059 decl));
1060 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1061 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1062 unregister_fncall_param,
1063 size,
1064 integer_three_node);
1065
1066
1067 variable_name = mf_varname_tree (decl);
1068 register_fncall_param =
1069 mf_mark (build1 (ADDR_EXPR,
1070 build_pointer_type (TREE_TYPE (decl)),
1071 decl));
1072 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1073 "name") */
1074 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1075 register_fncall_param,
1076 size,
1077 integer_three_node,
1078 variable_name);
1079
1080
1081 /* Accumulate the two calls. */
1082 gimple_set_location (register_fncall, location);
1083 gimple_set_location (unregister_fncall, location);
1084
1085 /* Add the __mf_register call at the current appending point. */
1086 if (gsi_end_p (initially_stmts))
1087 {
1088 if (!mf_artificial (decl))
1089 warning (OPT_Wmudflap,
1090 "mudflap cannot track %qE in stub function",
1091 DECL_NAME (decl));
1092 }
1093 else
1094 {
1095 gsi_insert_before (&initially_stmts, register_fncall,
1096 GSI_SAME_STMT);
1097
1098 /* Accumulate the FINALLY piece. */
1099 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1100 }
1101 mf_mark (decl);
1102 }
1103
1104 decl = DECL_CHAIN (decl);
1105 }
1106
1107 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1108 if (finally_stmts != NULL)
1109 {
1110 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1111 gimple_seq new_seq = NULL;
1112
1113 gimple_seq_add_stmt (&new_seq, stmt);
1114 return new_seq;
1115 }
1116 else
1117 return seq;
1118 }
1119
1120
1121 /* Process every variable mentioned in BIND_EXPRs. */
1122 static tree
1123 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1124 bool *handled_operands_p ATTRIBUTE_UNUSED,
1125 struct walk_stmt_info *wi)
1126 {
1127 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1128 gimple stmt = gsi_stmt (*gsi);
1129
1130 switch (gimple_code (stmt))
1131 {
1132 case GIMPLE_BIND:
1133 {
1134 /* Process function parameters now (but only once). */
1135 if (d->param_decls)
1136 {
1137 gimple_bind_set_body (stmt,
1138 mx_register_decls (d->param_decls,
1139 gimple_bind_body (stmt),
1140 gimple_location (stmt)));
1141 d->param_decls = NULL_TREE;
1142 }
1143
1144 gimple_bind_set_body (stmt,
1145 mx_register_decls (gimple_bind_vars (stmt),
1146 gimple_bind_body (stmt),
1147 gimple_location (stmt)));
1148 }
1149 break;
1150
1151 default:
1152 break;
1153 }
1154
1155 return NULL_TREE;
1156 }
1157
1158 /* Perform the object lifetime tracking mudflap transform on the given function
1159 tree. The tree is mutated in place, with possibly copied subtree nodes.
1160
1161 For every auto variable declared, if its address is ever taken
1162 within the function, then supply its lifetime to the mudflap
1163 runtime with the __mf_register and __mf_unregister calls.
1164 */
1165
1166 static void
1167 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1168 {
1169 struct mf_xform_decls_data d;
1170 struct walk_stmt_info wi;
1171 struct pointer_set_t *pset = pointer_set_create ();
1172
1173 d.param_decls = fnparams;
1174 memset (&wi, 0, sizeof (wi));
1175 wi.info = (void*) &d;
1176 wi.pset = pset;
1177 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1178 pointer_set_destroy (pset);
1179 }
1180
1181
1182 /* ------------------------------------------------------------------------ */
1183 /* Externally visible mudflap functions. */
1184
1185
1186 /* Mark and return the given tree node to prevent further mudflap
1187 transforms. */
1188 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1189
1190 tree
1191 mf_mark (tree t)
1192 {
1193 void **slot;
1194
1195 if (marked_trees == NULL)
1196 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1197 NULL);
1198
1199 slot = htab_find_slot (marked_trees, t, INSERT);
1200 *slot = t;
1201 return t;
1202 }
1203
1204 int
1205 mf_marked_p (tree t)
1206 {
1207 void *entry;
1208
1209 if (marked_trees == NULL)
1210 return 0;
1211
1212 entry = htab_find (marked_trees, t);
1213 return (entry != NULL);
1214 }
1215
1216 /* Remember given node as a static of some kind: global data,
1217 function-scope static, or an anonymous constant. Its assembler
1218 label is given. */
1219
1220 /* A list of globals whose incomplete declarations we encountered.
1221 Instead of emitting the __mf_register call for them here, it's
1222 delayed until program finish time. If they're still incomplete by
1223 then, warnings are emitted. */
1224
1225 static GTY (()) vec<tree, va_gc> *deferred_static_decls;
1226
1227 /* A list of statements for calling __mf_register() at startup time. */
1228 static GTY (()) tree enqueued_call_stmt_chain;
1229
1230 static void
1231 mudflap_register_call (tree obj, tree object_size, tree varname)
1232 {
1233 tree arg, call_stmt;
1234
1235 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1236 arg = fold_convert (ptr_type_node, arg);
1237
1238 call_stmt = build_call_expr (mf_register_fndecl, 4,
1239 arg,
1240 fold_convert (size_type_node, object_size),
1241 /* __MF_TYPE_STATIC */
1242 build_int_cst (integer_type_node, 4),
1243 varname);
1244
1245 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1246 }
1247
1248 void
1249 mudflap_enqueue_decl (tree obj)
1250 {
1251 if (mf_marked_p (obj))
1252 return;
1253
1254 /* We don't need to process variable decls that are internally
1255 generated extern. If we did, we'd end up with warnings for them
1256 during mudflap_finish_file (). That would confuse the user,
1257 since the text would refer to variables that don't show up in the
1258 user's source code. */
1259 if (DECL_P (obj) && DECL_EXTERNAL (obj) && mf_artificial (obj))
1260 return;
1261
1262 vec_safe_push (deferred_static_decls, obj);
1263 }
1264
1265
1266 void
1267 mudflap_enqueue_constant (tree obj)
1268 {
1269 tree object_size, varname;
1270
1271 if (mf_marked_p (obj))
1272 return;
1273
1274 if (TREE_CODE (obj) == STRING_CST)
1275 object_size = size_int (TREE_STRING_LENGTH (obj));
1276 else
1277 object_size = size_in_bytes (TREE_TYPE (obj));
1278
1279 if (TREE_CODE (obj) == STRING_CST)
1280 varname = mf_build_string ("string literal");
1281 else
1282 varname = mf_build_string ("constant");
1283
1284 mudflap_register_call (obj, object_size, varname);
1285 }
1286
1287
1288 /* Emit any file-wide instrumentation. */
1289 void
1290 mudflap_finish_file (void)
1291 {
1292 tree ctor_statements = NULL_TREE;
1293
1294 /* No need to continue when there were errors. */
1295 if (seen_error ())
1296 return;
1297
1298 /* Insert a call to __mf_init. */
1299 {
1300 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1301 append_to_statement_list (call2_stmt, &ctor_statements);
1302 }
1303
1304 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1305 if (flag_mudflap_ignore_reads)
1306 {
1307 tree arg = mf_build_string ("-ignore-reads");
1308 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1309 append_to_statement_list (call_stmt, &ctor_statements);
1310 }
1311
1312 /* Process all enqueued object decls. */
1313 if (deferred_static_decls)
1314 {
1315 size_t i;
1316 tree obj;
1317 FOR_EACH_VEC_ELT (*deferred_static_decls, i, obj)
1318 {
1319 gcc_assert (DECL_P (obj));
1320
1321 if (mf_marked_p (obj))
1322 continue;
1323
1324 /* Omit registration for static unaddressed objects. NB:
1325 Perform registration for non-static objects regardless of
1326 TREE_USED or TREE_ADDRESSABLE, because they may be used
1327 from other compilation units. */
1328 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1329 continue;
1330
1331 /* If we're neither emitting nor referencing the symbol,
1332 don't register it. We have to register external symbols
1333 if they happen to be in other files not compiled with
1334 mudflap (say system libraries), and we must not register
1335 internal symbols that we don't emit or they'll become
1336 dangling references or force symbols to be emitted that
1337 didn't have to. */
1338 if (!symtab_get_node (obj))
1339 continue;
1340
1341 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1342 {
1343 warning (OPT_Wmudflap,
1344 "mudflap cannot track unknown size extern %qE",
1345 DECL_NAME (obj));
1346 continue;
1347 }
1348
1349 mudflap_register_call (obj,
1350 size_in_bytes (TREE_TYPE (obj)),
1351 mf_varname_tree (obj));
1352 }
1353
1354 deferred_static_decls->truncate (0);
1355 }
1356
1357 /* Append all the enqueued registration calls. */
1358 if (enqueued_call_stmt_chain)
1359 {
1360 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1361 enqueued_call_stmt_chain = NULL_TREE;
1362 }
1363
1364 cgraph_build_static_cdtor ('I', ctor_statements,
1365 MAX_RESERVED_INIT_PRIORITY-1);
1366 }
1367
1368
1369 static bool
1370 gate_mudflap (void)
1371 {
1372 return flag_mudflap != 0;
1373 }
1374
1375 namespace {
1376
1377 const pass_data pass_data_mudflap_1 =
1378 {
1379 GIMPLE_PASS, /* type */
1380 "mudflap1", /* name */
1381 OPTGROUP_NONE, /* optinfo_flags */
1382 true, /* has_gate */
1383 true, /* has_execute */
1384 TV_NONE, /* tv_id */
1385 PROP_gimple_any, /* properties_required */
1386 0, /* properties_provided */
1387 0, /* properties_destroyed */
1388 0, /* todo_flags_start */
1389 0, /* todo_flags_finish */
1390 };
1391
1392 class pass_mudflap_1 : public gimple_opt_pass
1393 {
1394 public:
1395 pass_mudflap_1(gcc::context *ctxt)
1396 : gimple_opt_pass(pass_data_mudflap_1, ctxt)
1397 {}
1398
1399 /* opt_pass methods: */
1400 bool gate () { return gate_mudflap (); }
1401 unsigned int execute () { return execute_mudflap_function_decls (); }
1402
1403 }; // class pass_mudflap_1
1404
1405 } // anon namespace
1406
1407 gimple_opt_pass *
1408 make_pass_mudflap_1 (gcc::context *ctxt)
1409 {
1410 return new pass_mudflap_1 (ctxt);
1411 }
1412
1413 namespace {
1414
1415 const pass_data pass_data_mudflap_2 =
1416 {
1417 GIMPLE_PASS, /* type */
1418 "mudflap2", /* name */
1419 OPTGROUP_NONE, /* optinfo_flags */
1420 true, /* has_gate */
1421 true, /* has_execute */
1422 TV_NONE, /* tv_id */
1423 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
1424 0, /* properties_provided */
1425 0, /* properties_destroyed */
1426 0, /* todo_flags_start */
1427 ( TODO_verify_flow | TODO_verify_stmts
1428 | TODO_update_ssa ), /* todo_flags_finish */
1429 };
1430
1431 class pass_mudflap_2 : public gimple_opt_pass
1432 {
1433 public:
1434 pass_mudflap_2(gcc::context *ctxt)
1435 : gimple_opt_pass(pass_data_mudflap_2, ctxt)
1436 {}
1437
1438 /* opt_pass methods: */
1439 bool gate () { return gate_mudflap (); }
1440 unsigned int execute () { return execute_mudflap_function_ops (); }
1441
1442 }; // class pass_mudflap_2
1443
1444 } // anon namespace
1445
1446 gimple_opt_pass *
1447 make_pass_mudflap_2 (gcc::context *ctxt)
1448 {
1449 return new pass_mudflap_2 (ctxt);
1450 }
1451
1452 #include "gt-tree-mudflap.h"