lto-cgraph.c (output_profile_summary, [...]): Use gcov streaming; stream hot bb thres...
[gcc.git] / gcc / tree-mudflap.c
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 Contributed by Frank Ch. Eigler <fche@redhat.com>
4 and Graydon Hoare <graydon@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "tree-iterator.h"
35 #include "tree-flow.h"
36 #include "tree-mudflap.h"
37 #include "tree-pass.h"
38 #include "hashtab.h"
39 #include "diagnostic.h"
40 #include "demangle.h"
41 #include "langhooks.h"
42 #include "ggc.h"
43 #include "cgraph.h"
44 #include "gimple.h"
45
46 extern void add_bb_to_loop (basic_block, struct loop *);
47
48 /* Internal function decls */
49
50
51 /* Options. */
52 #define flag_mudflap_threads (flag_mudflap == 2)
53
54 /* Helpers. */
55 static tree mf_build_string (const char *string);
56 static tree mf_varname_tree (tree);
57 static tree mf_file_function_line_tree (location_t);
58
59 /* Indirection-related instrumentation. */
60 static void mf_decl_cache_locals (void);
61 static void mf_decl_clear_locals (void);
62 static void mf_xform_statements (void);
63 static unsigned int execute_mudflap_function_ops (void);
64
65 /* Addressable variables instrumentation. */
66 static void mf_xform_decls (gimple_seq, tree);
67 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
68 struct walk_stmt_info *);
69 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
70 static unsigned int execute_mudflap_function_decls (void);
71
72 /* Return true if DECL is artificial stub that shouldn't be instrumented by
73 mf. We should instrument clones of non-artificial functions. */
74 static inline bool
75 mf_artificial (const_tree decl)
76 {
77 return DECL_ARTIFICIAL (DECL_ORIGIN (decl));
78 }
79
80 /* ------------------------------------------------------------------------ */
81 /* Some generally helpful functions for mudflap instrumentation. */
82
83 /* Build a reference to a literal string. */
84 static tree
85 mf_build_string (const char *string)
86 {
87 size_t len = strlen (string);
88 tree result = mf_mark (build_string (len + 1, string));
89
90 TREE_TYPE (result) = build_array_type
91 (char_type_node, build_index_type (size_int (len)));
92 TREE_CONSTANT (result) = 1;
93 TREE_READONLY (result) = 1;
94 TREE_STATIC (result) = 1;
95
96 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
97
98 return mf_mark (result);
99 }
100
101 /* Create a properly typed STRING_CST node that describes the given
102 declaration. It will be used as an argument for __mf_register().
103 Try to construct a helpful string, including file/function/variable
104 name. */
105
106 static tree
107 mf_varname_tree (tree decl)
108 {
109 static pretty_printer buf_rec;
110 static int initialized = 0;
111 pretty_printer *buf = & buf_rec;
112 const char *buf_contents;
113 tree result;
114
115 gcc_assert (decl);
116
117 if (!initialized)
118 {
119 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
120 initialized = 1;
121 }
122 pp_clear_output_area (buf);
123
124 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
125 {
126 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
127 const char *sourcefile;
128 unsigned sourceline = xloc.line;
129 unsigned sourcecolumn = 0;
130 sourcecolumn = xloc.column;
131 sourcefile = xloc.file;
132 if (sourcefile == NULL && current_function_decl != NULL_TREE)
133 sourcefile = DECL_SOURCE_FILE (current_function_decl);
134 if (sourcefile == NULL)
135 sourcefile = "<unknown file>";
136
137 pp_string (buf, sourcefile);
138
139 if (sourceline != 0)
140 {
141 pp_string (buf, ":");
142 pp_decimal_int (buf, sourceline);
143
144 if (sourcecolumn != 0)
145 {
146 pp_string (buf, ":");
147 pp_decimal_int (buf, sourcecolumn);
148 }
149 }
150 }
151
152 if (current_function_decl != NULL_TREE)
153 {
154 /* Add (FUNCTION) */
155 pp_string (buf, " (");
156 {
157 const char *funcname = NULL;
158 if (DECL_NAME (current_function_decl))
159 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
160 if (funcname == NULL)
161 funcname = "anonymous fn";
162
163 pp_string (buf, funcname);
164 }
165 pp_string (buf, ") ");
166 }
167 else
168 pp_string (buf, " ");
169
170 /* Add <variable-declaration>, possibly demangled. */
171 {
172 const char *declname = NULL;
173
174 if (DECL_NAME (decl) != NULL)
175 {
176 if (strcmp ("GNU C++", lang_hooks.name) == 0)
177 {
178 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
179 the libiberty demangler. */
180 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
181 DMGL_AUTO | DMGL_VERBOSE);
182 }
183 if (declname == NULL)
184 declname = lang_hooks.decl_printable_name (decl, 3);
185 }
186 if (declname == NULL)
187 declname = "<unnamed variable>";
188
189 pp_string (buf, declname);
190 }
191
192 /* Return the lot as a new STRING_CST. */
193 buf_contents = pp_base_formatted_text (buf);
194 result = mf_build_string (buf_contents);
195 pp_clear_output_area (buf);
196
197 return result;
198 }
199
200
201 /* And another friend, for producing a simpler message. */
202
203 static tree
204 mf_file_function_line_tree (location_t location)
205 {
206 expanded_location xloc = expand_location (location);
207 const char *file = NULL, *colon, *line, *op, *name, *cp;
208 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
209 char *string;
210 tree result;
211
212 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
213 file = xloc.file;
214 if (file == NULL && current_function_decl != NULL_TREE)
215 file = DECL_SOURCE_FILE (current_function_decl);
216 if (file == NULL)
217 file = "<unknown file>";
218
219 if (xloc.line > 0)
220 {
221 if (xloc.column > 0)
222 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
223 else
224 sprintf (linecolbuf, "%d", xloc.line);
225 colon = ":";
226 line = linecolbuf;
227 }
228 else
229 colon = line = "";
230
231 /* Add (FUNCTION). */
232 name = lang_hooks.decl_printable_name (current_function_decl, 1);
233 if (name)
234 {
235 op = " (";
236 cp = ")";
237 }
238 else
239 op = name = cp = "";
240
241 string = concat (file, colon, line, op, name, cp, NULL);
242 result = mf_build_string (string);
243 free (string);
244
245 return result;
246 }
247
248
249 /* global tree nodes */
250
251 /* Global tree objects for global variables and functions exported by
252 mudflap runtime library. mf_init_extern_trees must be called
253 before using these. */
254
255 /* uintptr_t (usually "unsigned long") */
256 static GTY (()) tree mf_uintptr_type;
257
258 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
259 static GTY (()) tree mf_cache_struct_type;
260
261 /* struct __mf_cache * const */
262 static GTY (()) tree mf_cache_structptr_type;
263
264 /* extern struct __mf_cache __mf_lookup_cache []; */
265 static GTY (()) tree mf_cache_array_decl;
266
267 /* extern unsigned char __mf_lc_shift; */
268 static GTY (()) tree mf_cache_shift_decl;
269
270 /* extern uintptr_t __mf_lc_mask; */
271 static GTY (()) tree mf_cache_mask_decl;
272
273 /* Their function-scope local shadows, used in single-threaded mode only. */
274
275 /* auto const unsigned char __mf_lc_shift_l; */
276 static GTY (()) tree mf_cache_shift_decl_l;
277
278 /* auto const uintptr_t __mf_lc_mask_l; */
279 static GTY (()) tree mf_cache_mask_decl_l;
280
281 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
282 static GTY (()) tree mf_check_fndecl;
283
284 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
285 static GTY (()) tree mf_register_fndecl;
286
287 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
288 static GTY (()) tree mf_unregister_fndecl;
289
290 /* extern void __mf_init (); */
291 static GTY (()) tree mf_init_fndecl;
292
293 /* extern int __mf_set_options (const char*); */
294 static GTY (()) tree mf_set_options_fndecl;
295
296
297 /* Helper for mudflap_init: construct a decl with the given category,
298 name, and type, mark it an external reference, and pushdecl it. */
299 static inline tree
300 mf_make_builtin (enum tree_code category, const char *name, tree type)
301 {
302 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
303 category, get_identifier (name), type));
304 TREE_PUBLIC (decl) = 1;
305 DECL_EXTERNAL (decl) = 1;
306 lang_hooks.decls.pushdecl (decl);
307 /* The decl was declared by the compiler. */
308 DECL_ARTIFICIAL (decl) = 1;
309 /* And we don't want debug info for it. */
310 DECL_IGNORED_P (decl) = 1;
311 return decl;
312 }
313
314 /* Helper for mudflap_init: construct a tree corresponding to the type
315 struct __mf_cache { uintptr_t low; uintptr_t high; };
316 where uintptr_t is the FIELD_TYPE argument. */
317 static inline tree
318 mf_make_mf_cache_struct_type (tree field_type)
319 {
320 /* There is, abominably, no language-independent way to construct a
321 RECORD_TYPE. So we have to call the basic type construction
322 primitives by hand. */
323 tree fieldlo = build_decl (UNKNOWN_LOCATION,
324 FIELD_DECL, get_identifier ("low"), field_type);
325 tree fieldhi = build_decl (UNKNOWN_LOCATION,
326 FIELD_DECL, get_identifier ("high"), field_type);
327
328 tree struct_type = make_node (RECORD_TYPE);
329 DECL_CONTEXT (fieldlo) = struct_type;
330 DECL_CONTEXT (fieldhi) = struct_type;
331 DECL_CHAIN (fieldlo) = fieldhi;
332 TYPE_FIELDS (struct_type) = fieldlo;
333 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
334 layout_type (struct_type);
335
336 return struct_type;
337 }
338
339 /* Initialize the global tree nodes that correspond to mf-runtime.h
340 declarations. */
341 void
342 mudflap_init (void)
343 {
344 static bool done = false;
345 tree mf_const_string_type;
346 tree mf_cache_array_type;
347 tree mf_check_register_fntype;
348 tree mf_unregister_fntype;
349 tree mf_init_fntype;
350 tree mf_set_options_fntype;
351
352 if (done)
353 return;
354 done = true;
355
356 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
357 /*unsignedp=*/true);
358 mf_const_string_type
359 = build_pointer_type (build_qualified_type
360 (char_type_node, TYPE_QUAL_CONST));
361
362 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
363 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
364 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
365 mf_check_register_fntype =
366 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
367 integer_type_node, mf_const_string_type, NULL_TREE);
368 mf_unregister_fntype =
369 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
370 integer_type_node, NULL_TREE);
371 mf_init_fntype =
372 build_function_type_list (void_type_node, NULL_TREE);
373 mf_set_options_fntype =
374 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
375
376 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
377 mf_cache_array_type);
378 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
379 unsigned_char_type_node);
380 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
381 mf_uintptr_type);
382 /* Don't process these in mudflap_enqueue_decl, should they come by
383 there for some reason. */
384 mf_mark (mf_cache_array_decl);
385 mf_mark (mf_cache_shift_decl);
386 mf_mark (mf_cache_mask_decl);
387 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
388 mf_check_register_fntype);
389 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
390 mf_check_register_fntype);
391 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
392 mf_unregister_fntype);
393 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
394 mf_init_fntype);
395 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
396 mf_set_options_fntype);
397 }
398
399
400 /* ------------------------------------------------------------------------ */
401 /* This is the second part of the mudflap instrumentation. It works on
402 low-level GIMPLE using the CFG, because we want to run this pass after
403 tree optimizations have been performed, but we have to preserve the CFG
404 for expansion from trees to RTL.
405 Below is the list of transformations performed on statements in the
406 current function.
407
408 1) Memory reference transforms: Perform the mudflap indirection-related
409 tree transforms on memory references.
410
411 2) Mark BUILTIN_ALLOCA calls not inlineable.
412
413 */
414
415 static unsigned int
416 execute_mudflap_function_ops (void)
417 {
418 struct gimplify_ctx gctx;
419
420 /* Don't instrument functions such as the synthetic constructor
421 built during mudflap_finish_file. */
422 if (mf_marked_p (current_function_decl)
423 || mf_artificial (current_function_decl))
424 return 0;
425
426 push_gimplify_context (&gctx);
427
428 /* In multithreaded mode, don't cache the lookup cache parameters. */
429 if (! flag_mudflap_threads)
430 mf_decl_cache_locals ();
431
432 mf_xform_statements ();
433
434 if (! flag_mudflap_threads)
435 mf_decl_clear_locals ();
436
437 pop_gimplify_context (NULL);
438 return 0;
439 }
440
441 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
442 if BB has more than one edge, STMT will be replicated for each edge.
443 Also, abnormal edges will be ignored. */
444
445 static void
446 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
447 {
448 edge e;
449 edge_iterator ei;
450 unsigned n_copies = -1;
451
452 FOR_EACH_EDGE (e, ei, bb->succs)
453 if (!(e->flags & EDGE_ABNORMAL))
454 n_copies++;
455
456 FOR_EACH_EDGE (e, ei, bb->succs)
457 if (!(e->flags & EDGE_ABNORMAL))
458 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
459 }
460
461 /* Create and initialize local shadow variables for the lookup cache
462 globals. Put their decls in the *_l globals for use by
463 mf_build_check_statement_for. */
464
465 static void
466 mf_decl_cache_locals (void)
467 {
468 gimple g;
469 gimple_seq seq = NULL;
470
471 /* Build the cache vars. */
472 mf_cache_shift_decl_l
473 = mf_mark (create_tmp_reg (TREE_TYPE (mf_cache_shift_decl),
474 "__mf_lookup_shift_l"));
475
476 mf_cache_mask_decl_l
477 = mf_mark (create_tmp_reg (TREE_TYPE (mf_cache_mask_decl),
478 "__mf_lookup_mask_l"));
479
480 /* Build initialization nodes for the cache vars. We just load the
481 globals into the cache variables. */
482 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
483 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
484 gimple_seq_add_stmt (&seq, g);
485
486 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
487 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
488 gimple_seq_add_stmt (&seq, g);
489
490 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
491
492 gsi_commit_edge_inserts ();
493 }
494
495
496 static void
497 mf_decl_clear_locals (void)
498 {
499 /* Unset local shadows. */
500 mf_cache_shift_decl_l = NULL_TREE;
501 mf_cache_mask_decl_l = NULL_TREE;
502 }
503
504 static void
505 mf_build_check_statement_for (tree base, tree limit,
506 gimple_stmt_iterator *instr_gsi,
507 location_t location, tree dirflag)
508 {
509 gimple_stmt_iterator gsi;
510 basic_block cond_bb, then_bb, join_bb;
511 edge e;
512 tree cond, t, u, v;
513 tree mf_base;
514 tree mf_elem;
515 tree mf_limit;
516 gimple g;
517 gimple_seq seq, stmts;
518
519 /* We first need to split the current basic block, and start altering
520 the CFG. This allows us to insert the statements we're about to
521 construct into the right basic blocks. */
522
523 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
524 gsi = *instr_gsi;
525 gsi_prev (&gsi);
526 if (! gsi_end_p (gsi))
527 e = split_block (cond_bb, gsi_stmt (gsi));
528 else
529 e = split_block_after_labels (cond_bb);
530 cond_bb = e->src;
531 join_bb = e->dest;
532
533 /* A recap at this point: join_bb is the basic block at whose head
534 is the gimple statement for which this check expression is being
535 built. cond_bb is the (possibly new, synthetic) basic block the
536 end of which will contain the cache-lookup code, and a
537 conditional that jumps to the cache-miss code or, much more
538 likely, over to join_bb. */
539
540 /* Create the bb that contains the cache-miss fallback block (mf_check). */
541 then_bb = create_empty_bb (cond_bb);
542 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
543 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
544
545 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
546 e = find_edge (cond_bb, join_bb);
547 e->flags = EDGE_FALSE_VALUE;
548 e->count = cond_bb->count;
549 e->probability = REG_BR_PROB_BASE;
550
551 /* Update dominance info. Note that bb_join's data was
552 updated by split_block. */
553 if (dom_info_available_p (CDI_DOMINATORS))
554 {
555 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
556 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
557 }
558
559 /* Update loop info. */
560 if (current_loops)
561 add_bb_to_loop (then_bb, cond_bb->loop_father);
562
563 /* Build our local variables. */
564 mf_elem = create_tmp_reg (mf_cache_structptr_type, "__mf_elem");
565 mf_base = create_tmp_reg (mf_uintptr_type, "__mf_base");
566 mf_limit = create_tmp_reg (mf_uintptr_type, "__mf_limit");
567
568 /* Build: __mf_base = (uintptr_t) <base address expression>. */
569 seq = NULL;
570 t = fold_convert_loc (location, mf_uintptr_type,
571 unshare_expr (base));
572 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
573 gimple_seq_add_seq (&seq, stmts);
574 g = gimple_build_assign (mf_base, t);
575 gimple_set_location (g, location);
576 gimple_seq_add_stmt (&seq, g);
577
578 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
579 t = fold_convert_loc (location, mf_uintptr_type,
580 unshare_expr (limit));
581 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
582 gimple_seq_add_seq (&seq, stmts);
583 g = gimple_build_assign (mf_limit, t);
584 gimple_set_location (g, location);
585 gimple_seq_add_stmt (&seq, g);
586
587 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
588 & __mf_mask]. */
589 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
590 flag_mudflap_threads ? mf_cache_shift_decl
591 : mf_cache_shift_decl_l);
592 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
593 flag_mudflap_threads ? mf_cache_mask_decl
594 : mf_cache_mask_decl_l);
595 t = build4 (ARRAY_REF,
596 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
597 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
598 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
599 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
600 gimple_seq_add_seq (&seq, stmts);
601 g = gimple_build_assign (mf_elem, t);
602 gimple_set_location (g, location);
603 gimple_seq_add_stmt (&seq, g);
604
605 /* Quick validity check.
606
607 if (__mf_elem->low > __mf_base
608 || (__mf_elem_high < __mf_limit))
609 {
610 __mf_check ();
611 ... and only if single-threaded:
612 __mf_lookup_shift_1 = f...;
613 __mf_lookup_mask_l = ...;
614 }
615
616 It is expected that this body of code is rarely executed so we mark
617 the edge to the THEN clause of the conditional jump as unlikely. */
618
619 /* Construct t <-- '__mf_elem->low > __mf_base'. */
620 t = build3 (COMPONENT_REF, mf_uintptr_type,
621 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
622 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
623 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
624
625 /* Construct '__mf_elem->high < __mf_limit'.
626
627 First build:
628 1) u <-- '__mf_elem->high'
629 2) v <-- '__mf_limit'.
630
631 Then build 'u <-- (u < v). */
632
633 u = build3 (COMPONENT_REF, mf_uintptr_type,
634 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
635 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
636
637 v = mf_limit;
638
639 u = build2 (LT_EXPR, boolean_type_node, u, v);
640
641 /* Build the composed conditional: t <-- 't || u'. Then store the
642 result of the evaluation of 't' in a temporary variable which we
643 can use as the condition for the conditional jump. */
644 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
645 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
646 gimple_seq_add_seq (&seq, stmts);
647 cond = create_tmp_reg (boolean_type_node, "__mf_unlikely_cond");
648 g = gimple_build_assign (cond, t);
649 gimple_set_location (g, location);
650 gimple_seq_add_stmt (&seq, g);
651
652 /* Build the conditional jump. 'cond' is just a temporary so we can
653 simply build a void COND_EXPR. We do need labels in both arms though. */
654 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
655 NULL_TREE);
656 gimple_set_location (g, location);
657 gimple_seq_add_stmt (&seq, g);
658
659 /* At this point, after so much hard work, we have only constructed
660 the conditional jump,
661
662 if (__mf_elem->low > __mf_base
663 || (__mf_elem_high < __mf_limit))
664
665 The lowered GIMPLE tree representing this code is in the statement
666 list starting at 'head'.
667
668 We can insert this now in the current basic block, i.e. the one that
669 the statement we're instrumenting was originally in. */
670 gsi = gsi_last_bb (cond_bb);
671 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
672
673 /* Now build up the body of the cache-miss handling:
674
675 __mf_check();
676 refresh *_l vars.
677
678 This is the body of the conditional. */
679
680 seq = NULL;
681 /* u is a string, so it is already a gimple value. */
682 u = mf_file_function_line_tree (location);
683 /* NB: we pass the overall [base..limit] range to mf_check. */
684 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
685 fold_build2_loc (location,
686 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
687 build_int_cst (mf_uintptr_type, 1));
688 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
689 gimple_seq_add_seq (&seq, stmts);
690 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
691 gimple_seq_add_stmt (&seq, g);
692
693 if (! flag_mudflap_threads)
694 {
695 if (stmt_ends_bb_p (g))
696 {
697 gsi = gsi_start_bb (then_bb);
698 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
699 e = split_block (then_bb, g);
700 then_bb = e->dest;
701 seq = NULL;
702 }
703
704 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
705 gimple_seq_add_stmt (&seq, g);
706
707 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
708 gimple_seq_add_stmt (&seq, g);
709 }
710
711 /* Insert the check code in the THEN block. */
712 gsi = gsi_start_bb (then_bb);
713 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
714
715 *instr_gsi = gsi_start_bb (join_bb);
716 }
717
718
719 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
720 eligible for instrumentation. For the mudflap1 pass, this implies
721 that it should be registered with the libmudflap runtime. For the
722 mudflap2 pass this means instrumenting an indirection operation with
723 respect to the object.
724 */
725 static int
726 mf_decl_eligible_p (tree decl)
727 {
728 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
729 /* The decl must have its address taken. In the case of
730 arrays, this flag is also set if the indexes are not
731 compile-time known valid constants. */
732 /* XXX: not sufficient: return-by-value structs! */
733 && TREE_ADDRESSABLE (decl)
734 /* The type of the variable must be complete. */
735 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
736 /* The decl hasn't been decomposed somehow. */
737 && !DECL_HAS_VALUE_EXPR_P (decl));
738 }
739
740
741 static void
742 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
743 location_t location, tree dirflag)
744 {
745 tree type, base, limit, addr, size, t;
746
747 /* Don't instrument read operations. */
748 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
749 return;
750
751 /* Don't instrument marked nodes. */
752 if (mf_marked_p (*tp))
753 return;
754
755 t = *tp;
756 type = TREE_TYPE (t);
757
758 if (type == error_mark_node)
759 return;
760
761 size = TYPE_SIZE_UNIT (type);
762
763 switch (TREE_CODE (t))
764 {
765 case ARRAY_REF:
766 case COMPONENT_REF:
767 {
768 /* This is trickier than it may first appear. The reason is
769 that we are looking at expressions from the "inside out" at
770 this point. We may have a complex nested aggregate/array
771 expression (e.g. "a.b[i].c"), maybe with an indirection as
772 the leftmost operator ("p->a.b.d"), where instrumentation
773 is necessary. Or we may have an innocent "a.b.c"
774 expression that must not be instrumented. We need to
775 recurse all the way down the nesting structure to figure it
776 out: looking just at the outer node is not enough. */
777 tree var;
778 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
779 /* If we have a bitfield component reference, we must note the
780 innermost addressable object in ELT, from which we will
781 construct the byte-addressable bounds of the bitfield. */
782 tree elt = NULL_TREE;
783 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
784 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
785
786 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
787 containment hierarchy to find the outermost VAR_DECL. */
788 var = TREE_OPERAND (t, 0);
789 while (1)
790 {
791 if (bitfield_ref_p && elt == NULL_TREE
792 && (TREE_CODE (var) == ARRAY_REF
793 || TREE_CODE (var) == COMPONENT_REF))
794 elt = var;
795
796 if (TREE_CODE (var) == ARRAY_REF)
797 {
798 component_ref_only = 0;
799 var = TREE_OPERAND (var, 0);
800 }
801 else if (TREE_CODE (var) == COMPONENT_REF)
802 var = TREE_OPERAND (var, 0);
803 else if (INDIRECT_REF_P (var)
804 || TREE_CODE (var) == MEM_REF)
805 {
806 base = TREE_OPERAND (var, 0);
807 break;
808 }
809 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
810 {
811 var = TREE_OPERAND (var, 0);
812 if (CONSTANT_CLASS_P (var)
813 && TREE_CODE (var) != STRING_CST)
814 return;
815 }
816 else
817 {
818 gcc_assert (TREE_CODE (var) == VAR_DECL
819 || TREE_CODE (var) == PARM_DECL
820 || TREE_CODE (var) == RESULT_DECL
821 || TREE_CODE (var) == STRING_CST);
822 /* Don't instrument this access if the underlying
823 variable is not "eligible". This test matches
824 those arrays that have only known-valid indexes,
825 and thus are not labeled TREE_ADDRESSABLE. */
826 if (! mf_decl_eligible_p (var) || component_ref_only)
827 return;
828 else
829 {
830 base = build1 (ADDR_EXPR,
831 build_pointer_type (TREE_TYPE (var)), var);
832 break;
833 }
834 }
835 }
836
837 /* Handle the case of ordinary non-indirection structure
838 accesses. These have only nested COMPONENT_REF nodes (no
839 INDIRECT_REF), but pass through the above filter loop.
840 Note that it's possible for such a struct variable to match
841 the eligible_p test because someone else might take its
842 address sometime. */
843
844 /* We need special processing for bitfield components, because
845 their addresses cannot be taken. */
846 if (bitfield_ref_p)
847 {
848 tree field = TREE_OPERAND (t, 1);
849
850 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
851 size = DECL_SIZE_UNIT (field);
852
853 if (elt)
854 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
855 elt);
856 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
857 addr = fold_build_pointer_plus_loc (location,
858 addr, byte_position (field));
859 }
860 else
861 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
862
863 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
864 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
865 fold_convert (mf_uintptr_type, addr),
866 size),
867 integer_one_node);
868 }
869 break;
870
871 case INDIRECT_REF:
872 addr = TREE_OPERAND (t, 0);
873 base = addr;
874 limit = fold_build_pointer_plus_hwi_loc
875 (location, fold_build_pointer_plus_loc (location, base, size), -1);
876 break;
877
878 case MEM_REF:
879 if (addr_expr_of_non_mem_decl_p (TREE_OPERAND (t, 0)))
880 return;
881
882 addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
883 TREE_OPERAND (t, 1));
884 base = addr;
885 limit = fold_build_pointer_plus_hwi_loc (location,
886 fold_build_pointer_plus_loc (location,
887 base, size), -1);
888 break;
889
890 case TARGET_MEM_REF:
891 if (addr_expr_of_non_mem_decl_p (TMR_BASE (t)))
892 return;
893
894 addr = tree_mem_ref_addr (ptr_type_node, t);
895 base = addr;
896 limit = fold_build_pointer_plus_hwi_loc (location,
897 fold_build_pointer_plus_loc (location,
898 base, size), -1);
899 break;
900
901 case ARRAY_RANGE_REF:
902 warning (OPT_Wmudflap,
903 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
904 return;
905
906 case BIT_FIELD_REF:
907 /* ??? merge with COMPONENT_REF code above? */
908 {
909 tree ofs, rem, bpu;
910
911 /* If we're not dereferencing something, then the access
912 must be ok. */
913 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
914 return;
915
916 bpu = bitsize_int (BITS_PER_UNIT);
917 ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2));
918 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
919 ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu);
920
921 size = fold_convert (bitsizetype, TREE_OPERAND (t, 1));
922 size = size_binop_loc (location, PLUS_EXPR, size, rem);
923 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
924 size = fold_convert (sizetype, size);
925
926 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
927 addr = fold_convert (ptr_type_node, addr);
928 addr = fold_build_pointer_plus_loc (location, addr, ofs);
929
930 base = addr;
931 limit = fold_build_pointer_plus_hwi_loc (location,
932 fold_build_pointer_plus_loc (location,
933 base, size), -1);
934 }
935 break;
936
937 default:
938 return;
939 }
940
941 mf_build_check_statement_for (base, limit, iter, location, dirflag);
942 }
943 /* Transform
944 1) Memory references.
945 */
946 static void
947 mf_xform_statements (void)
948 {
949 basic_block bb, next;
950 gimple_stmt_iterator i;
951 int saved_last_basic_block = last_basic_block;
952 enum gimple_rhs_class grhs_class;
953
954 bb = ENTRY_BLOCK_PTR ->next_bb;
955 do
956 {
957 next = bb->next_bb;
958 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
959 {
960 gimple s = gsi_stmt (i);
961
962 /* Only a few GIMPLE statements can reference memory. */
963 switch (gimple_code (s))
964 {
965 case GIMPLE_ASSIGN:
966 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
967 gimple_location (s), integer_one_node);
968 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
969 gimple_location (s), integer_zero_node);
970 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
971 if (grhs_class == GIMPLE_BINARY_RHS)
972 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
973 gimple_location (s), integer_zero_node);
974 break;
975
976 case GIMPLE_RETURN:
977 if (gimple_return_retval (s) != NULL_TREE)
978 {
979 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
980 gimple_location (s),
981 integer_zero_node);
982 }
983 break;
984
985 default:
986 ;
987 }
988 }
989 bb = next;
990 }
991 while (bb && bb->index <= saved_last_basic_block);
992 }
993
994 /* ------------------------------------------------------------------------ */
995 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
996 transforms on the current function.
997
998 This is the first part of the mudflap instrumentation. It works on
999 high-level GIMPLE because after lowering, all variables are moved out
1000 of their BIND_EXPR binding context, and we lose liveness information
1001 for the declarations we wish to instrument. */
1002
1003 static unsigned int
1004 execute_mudflap_function_decls (void)
1005 {
1006 struct gimplify_ctx gctx;
1007
1008 /* Don't instrument functions such as the synthetic constructor
1009 built during mudflap_finish_file. */
1010 if (mf_marked_p (current_function_decl)
1011 || mf_artificial (current_function_decl))
1012 return 0;
1013
1014 push_gimplify_context (&gctx);
1015
1016 mf_xform_decls (gimple_body (current_function_decl),
1017 DECL_ARGUMENTS (current_function_decl));
1018
1019 pop_gimplify_context (NULL);
1020 return 0;
1021 }
1022
1023 /* This struct is passed between mf_xform_decls to store state needed
1024 during the traversal searching for objects that have their
1025 addresses taken. */
1026 struct mf_xform_decls_data
1027 {
1028 tree param_decls;
1029 };
1030
1031
1032 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1033 _DECLs if appropriate. Arrange to call the __mf_register function
1034 now, and the __mf_unregister function later for each. Return the
1035 gimple sequence after synthesis. */
1036 gimple_seq
1037 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1038 {
1039 gimple_seq finally_stmts = NULL;
1040 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1041
1042 while (decl != NULL_TREE)
1043 {
1044 if (mf_decl_eligible_p (decl)
1045 /* Not already processed. */
1046 && ! mf_marked_p (decl)
1047 /* Automatic variable. */
1048 && ! DECL_EXTERNAL (decl)
1049 && ! TREE_STATIC (decl))
1050 {
1051 tree size = NULL_TREE, variable_name;
1052 gimple unregister_fncall, register_fncall;
1053 tree unregister_fncall_param, register_fncall_param;
1054
1055 /* Variable-sized objects should have sizes already been
1056 gimplified when we got here. */
1057 size = fold_convert (size_type_node,
1058 TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1059 gcc_assert (is_gimple_val (size));
1060
1061
1062 unregister_fncall_param =
1063 mf_mark (build1 (ADDR_EXPR,
1064 build_pointer_type (TREE_TYPE (decl)),
1065 decl));
1066 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1067 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1068 unregister_fncall_param,
1069 size,
1070 integer_three_node);
1071
1072
1073 variable_name = mf_varname_tree (decl);
1074 register_fncall_param =
1075 mf_mark (build1 (ADDR_EXPR,
1076 build_pointer_type (TREE_TYPE (decl)),
1077 decl));
1078 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1079 "name") */
1080 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1081 register_fncall_param,
1082 size,
1083 integer_three_node,
1084 variable_name);
1085
1086
1087 /* Accumulate the two calls. */
1088 gimple_set_location (register_fncall, location);
1089 gimple_set_location (unregister_fncall, location);
1090
1091 /* Add the __mf_register call at the current appending point. */
1092 if (gsi_end_p (initially_stmts))
1093 {
1094 if (!mf_artificial (decl))
1095 warning (OPT_Wmudflap,
1096 "mudflap cannot track %qE in stub function",
1097 DECL_NAME (decl));
1098 }
1099 else
1100 {
1101 gsi_insert_before (&initially_stmts, register_fncall,
1102 GSI_SAME_STMT);
1103
1104 /* Accumulate the FINALLY piece. */
1105 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1106 }
1107 mf_mark (decl);
1108 }
1109
1110 decl = DECL_CHAIN (decl);
1111 }
1112
1113 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1114 if (finally_stmts != NULL)
1115 {
1116 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1117 gimple_seq new_seq = NULL;
1118
1119 gimple_seq_add_stmt (&new_seq, stmt);
1120 return new_seq;
1121 }
1122 else
1123 return seq;
1124 }
1125
1126
1127 /* Process every variable mentioned in BIND_EXPRs. */
1128 static tree
1129 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1130 bool *handled_operands_p ATTRIBUTE_UNUSED,
1131 struct walk_stmt_info *wi)
1132 {
1133 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1134 gimple stmt = gsi_stmt (*gsi);
1135
1136 switch (gimple_code (stmt))
1137 {
1138 case GIMPLE_BIND:
1139 {
1140 /* Process function parameters now (but only once). */
1141 if (d->param_decls)
1142 {
1143 gimple_bind_set_body (stmt,
1144 mx_register_decls (d->param_decls,
1145 gimple_bind_body (stmt),
1146 gimple_location (stmt)));
1147 d->param_decls = NULL_TREE;
1148 }
1149
1150 gimple_bind_set_body (stmt,
1151 mx_register_decls (gimple_bind_vars (stmt),
1152 gimple_bind_body (stmt),
1153 gimple_location (stmt)));
1154 }
1155 break;
1156
1157 default:
1158 break;
1159 }
1160
1161 return NULL_TREE;
1162 }
1163
1164 /* Perform the object lifetime tracking mudflap transform on the given function
1165 tree. The tree is mutated in place, with possibly copied subtree nodes.
1166
1167 For every auto variable declared, if its address is ever taken
1168 within the function, then supply its lifetime to the mudflap
1169 runtime with the __mf_register and __mf_unregister calls.
1170 */
1171
1172 static void
1173 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1174 {
1175 struct mf_xform_decls_data d;
1176 struct walk_stmt_info wi;
1177 struct pointer_set_t *pset = pointer_set_create ();
1178
1179 d.param_decls = fnparams;
1180 memset (&wi, 0, sizeof (wi));
1181 wi.info = (void*) &d;
1182 wi.pset = pset;
1183 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1184 pointer_set_destroy (pset);
1185 }
1186
1187
1188 /* ------------------------------------------------------------------------ */
1189 /* Externally visible mudflap functions. */
1190
1191
1192 /* Mark and return the given tree node to prevent further mudflap
1193 transforms. */
1194 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1195
1196 tree
1197 mf_mark (tree t)
1198 {
1199 void **slot;
1200
1201 if (marked_trees == NULL)
1202 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1203 NULL);
1204
1205 slot = htab_find_slot (marked_trees, t, INSERT);
1206 *slot = t;
1207 return t;
1208 }
1209
1210 int
1211 mf_marked_p (tree t)
1212 {
1213 void *entry;
1214
1215 if (marked_trees == NULL)
1216 return 0;
1217
1218 entry = htab_find (marked_trees, t);
1219 return (entry != NULL);
1220 }
1221
1222 /* Remember given node as a static of some kind: global data,
1223 function-scope static, or an anonymous constant. Its assembler
1224 label is given. */
1225
1226 /* A list of globals whose incomplete declarations we encountered.
1227 Instead of emitting the __mf_register call for them here, it's
1228 delayed until program finish time. If they're still incomplete by
1229 then, warnings are emitted. */
1230
1231 static GTY (()) vec<tree, va_gc> *deferred_static_decls;
1232
1233 /* A list of statements for calling __mf_register() at startup time. */
1234 static GTY (()) tree enqueued_call_stmt_chain;
1235
1236 static void
1237 mudflap_register_call (tree obj, tree object_size, tree varname)
1238 {
1239 tree arg, call_stmt;
1240
1241 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1242 arg = fold_convert (ptr_type_node, arg);
1243
1244 call_stmt = build_call_expr (mf_register_fndecl, 4,
1245 arg,
1246 fold_convert (size_type_node, object_size),
1247 /* __MF_TYPE_STATIC */
1248 build_int_cst (integer_type_node, 4),
1249 varname);
1250
1251 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1252 }
1253
1254 void
1255 mudflap_enqueue_decl (tree obj)
1256 {
1257 if (mf_marked_p (obj))
1258 return;
1259
1260 /* We don't need to process variable decls that are internally
1261 generated extern. If we did, we'd end up with warnings for them
1262 during mudflap_finish_file (). That would confuse the user,
1263 since the text would refer to variables that don't show up in the
1264 user's source code. */
1265 if (DECL_P (obj) && DECL_EXTERNAL (obj) && mf_artificial (obj))
1266 return;
1267
1268 vec_safe_push (deferred_static_decls, obj);
1269 }
1270
1271
1272 void
1273 mudflap_enqueue_constant (tree obj)
1274 {
1275 tree object_size, varname;
1276
1277 if (mf_marked_p (obj))
1278 return;
1279
1280 if (TREE_CODE (obj) == STRING_CST)
1281 object_size = size_int (TREE_STRING_LENGTH (obj));
1282 else
1283 object_size = size_in_bytes (TREE_TYPE (obj));
1284
1285 if (TREE_CODE (obj) == STRING_CST)
1286 varname = mf_build_string ("string literal");
1287 else
1288 varname = mf_build_string ("constant");
1289
1290 mudflap_register_call (obj, object_size, varname);
1291 }
1292
1293
1294 /* Emit any file-wide instrumentation. */
1295 void
1296 mudflap_finish_file (void)
1297 {
1298 tree ctor_statements = NULL_TREE;
1299
1300 /* No need to continue when there were errors. */
1301 if (seen_error ())
1302 return;
1303
1304 /* Insert a call to __mf_init. */
1305 {
1306 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1307 append_to_statement_list (call2_stmt, &ctor_statements);
1308 }
1309
1310 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1311 if (flag_mudflap_ignore_reads)
1312 {
1313 tree arg = mf_build_string ("-ignore-reads");
1314 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1315 append_to_statement_list (call_stmt, &ctor_statements);
1316 }
1317
1318 /* Process all enqueued object decls. */
1319 if (deferred_static_decls)
1320 {
1321 size_t i;
1322 tree obj;
1323 FOR_EACH_VEC_ELT (*deferred_static_decls, i, obj)
1324 {
1325 gcc_assert (DECL_P (obj));
1326
1327 if (mf_marked_p (obj))
1328 continue;
1329
1330 /* Omit registration for static unaddressed objects. NB:
1331 Perform registration for non-static objects regardless of
1332 TREE_USED or TREE_ADDRESSABLE, because they may be used
1333 from other compilation units. */
1334 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1335 continue;
1336
1337 /* If we're neither emitting nor referencing the symbol,
1338 don't register it. We have to register external symbols
1339 if they happen to be in other files not compiled with
1340 mudflap (say system libraries), and we must not register
1341 internal symbols that we don't emit or they'll become
1342 dangling references or force symbols to be emitted that
1343 didn't have to. */
1344 if (!symtab_get_node (obj))
1345 continue;
1346
1347 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1348 {
1349 warning (OPT_Wmudflap,
1350 "mudflap cannot track unknown size extern %qE",
1351 DECL_NAME (obj));
1352 continue;
1353 }
1354
1355 mudflap_register_call (obj,
1356 size_in_bytes (TREE_TYPE (obj)),
1357 mf_varname_tree (obj));
1358 }
1359
1360 deferred_static_decls->truncate (0);
1361 }
1362
1363 /* Append all the enqueued registration calls. */
1364 if (enqueued_call_stmt_chain)
1365 {
1366 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1367 enqueued_call_stmt_chain = NULL_TREE;
1368 }
1369
1370 cgraph_build_static_cdtor ('I', ctor_statements,
1371 MAX_RESERVED_INIT_PRIORITY-1);
1372 }
1373
1374
1375 static bool
1376 gate_mudflap (void)
1377 {
1378 return flag_mudflap != 0;
1379 }
1380
1381 struct gimple_opt_pass pass_mudflap_1 =
1382 {
1383 {
1384 GIMPLE_PASS,
1385 "mudflap1", /* name */
1386 OPTGROUP_NONE, /* optinfo_flags */
1387 gate_mudflap, /* gate */
1388 execute_mudflap_function_decls, /* execute */
1389 NULL, /* sub */
1390 NULL, /* next */
1391 0, /* static_pass_number */
1392 TV_NONE, /* tv_id */
1393 PROP_gimple_any, /* properties_required */
1394 0, /* properties_provided */
1395 0, /* properties_destroyed */
1396 0, /* todo_flags_start */
1397 0 /* todo_flags_finish */
1398 }
1399 };
1400
1401 struct gimple_opt_pass pass_mudflap_2 =
1402 {
1403 {
1404 GIMPLE_PASS,
1405 "mudflap2", /* name */
1406 OPTGROUP_NONE, /* optinfo_flags */
1407 gate_mudflap, /* gate */
1408 execute_mudflap_function_ops, /* execute */
1409 NULL, /* sub */
1410 NULL, /* next */
1411 0, /* static_pass_number */
1412 TV_NONE, /* tv_id */
1413 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1414 0, /* properties_provided */
1415 0, /* properties_destroyed */
1416 0, /* todo_flags_start */
1417 TODO_verify_flow | TODO_verify_stmts
1418 | TODO_update_ssa /* todo_flags_finish */
1419 }
1420 };
1421
1422 #include "gt-tree-mudflap.h"