tree-pretty-print.c (dump_location): New.
[gcc.git] / gcc / tree-mudflap.c
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tm_p.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "tree-inline.h"
36 #include "gimple.h"
37 #include "tree-iterator.h"
38 #include "tree-flow.h"
39 #include "tree-mudflap.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "hashtab.h"
43 #include "diagnostic.h"
44 #include <demangle.h>
45 #include "langhooks.h"
46 #include "ggc.h"
47 #include "cgraph.h"
48 #include "toplev.h"
49 #include "gimple.h"
50
51 /* Internal function decls */
52
53
54 /* Options. */
55 #define flag_mudflap_threads (flag_mudflap == 2)
56
57 /* Helpers. */
58 static tree mf_build_string (const char *string);
59 static tree mf_varname_tree (tree);
60 static tree mf_file_function_line_tree (location_t);
61
62 /* Indirection-related instrumentation. */
63 static void mf_decl_cache_locals (void);
64 static void mf_decl_clear_locals (void);
65 static void mf_xform_derefs (void);
66 static unsigned int execute_mudflap_function_ops (void);
67
68 /* Addressable variables instrumentation. */
69 static void mf_xform_decls (gimple_seq, tree);
70 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
71 struct walk_stmt_info *);
72 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
73 static unsigned int execute_mudflap_function_decls (void);
74
75
76 /* ------------------------------------------------------------------------ */
77 /* Some generally helpful functions for mudflap instrumentation. */
78
79 /* Build a reference to a literal string. */
80 static tree
81 mf_build_string (const char *string)
82 {
83 size_t len = strlen (string);
84 tree result = mf_mark (build_string (len + 1, string));
85
86 TREE_TYPE (result) = build_array_type
87 (char_type_node, build_index_type (build_int_cst (NULL_TREE, len)));
88 TREE_CONSTANT (result) = 1;
89 TREE_READONLY (result) = 1;
90 TREE_STATIC (result) = 1;
91
92 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
93
94 return mf_mark (result);
95 }
96
97 /* Create a properly typed STRING_CST node that describes the given
98 declaration. It will be used as an argument for __mf_register().
99 Try to construct a helpful string, including file/function/variable
100 name. */
101
102 static tree
103 mf_varname_tree (tree decl)
104 {
105 static pretty_printer buf_rec;
106 static int initialized = 0;
107 pretty_printer *buf = & buf_rec;
108 const char *buf_contents;
109 tree result;
110
111 gcc_assert (decl);
112
113 if (!initialized)
114 {
115 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
116 initialized = 1;
117 }
118 pp_clear_output_area (buf);
119
120 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
121 {
122 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
123 const char *sourcefile;
124 unsigned sourceline = xloc.line;
125 unsigned sourcecolumn = 0;
126 sourcecolumn = xloc.column;
127 sourcefile = xloc.file;
128 if (sourcefile == NULL && current_function_decl != NULL_TREE)
129 sourcefile = DECL_SOURCE_FILE (current_function_decl);
130 if (sourcefile == NULL)
131 sourcefile = "<unknown file>";
132
133 pp_string (buf, sourcefile);
134
135 if (sourceline != 0)
136 {
137 pp_string (buf, ":");
138 pp_decimal_int (buf, sourceline);
139
140 if (sourcecolumn != 0)
141 {
142 pp_string (buf, ":");
143 pp_decimal_int (buf, sourcecolumn);
144 }
145 }
146 }
147
148 if (current_function_decl != NULL_TREE)
149 {
150 /* Add (FUNCTION) */
151 pp_string (buf, " (");
152 {
153 const char *funcname = NULL;
154 if (DECL_NAME (current_function_decl))
155 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
156 if (funcname == NULL)
157 funcname = "anonymous fn";
158
159 pp_string (buf, funcname);
160 }
161 pp_string (buf, ") ");
162 }
163 else
164 pp_string (buf, " ");
165
166 /* Add <variable-declaration>, possibly demangled. */
167 {
168 const char *declname = NULL;
169
170 if (DECL_NAME (decl) != NULL)
171 {
172 if (strcmp ("GNU C++", lang_hooks.name) == 0)
173 {
174 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
175 the libiberty demangler. */
176 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
177 DMGL_AUTO | DMGL_VERBOSE);
178 }
179 if (declname == NULL)
180 declname = lang_hooks.decl_printable_name (decl, 3);
181 }
182 if (declname == NULL)
183 declname = "<unnamed variable>";
184
185 pp_string (buf, declname);
186 }
187
188 /* Return the lot as a new STRING_CST. */
189 buf_contents = pp_base_formatted_text (buf);
190 result = mf_build_string (buf_contents);
191 pp_clear_output_area (buf);
192
193 return result;
194 }
195
196
197 /* And another friend, for producing a simpler message. */
198
199 static tree
200 mf_file_function_line_tree (location_t location)
201 {
202 expanded_location xloc = expand_location (location);
203 const char *file = NULL, *colon, *line, *op, *name, *cp;
204 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
205 char *string;
206 tree result;
207
208 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
209 file = xloc.file;
210 if (file == NULL && current_function_decl != NULL_TREE)
211 file = DECL_SOURCE_FILE (current_function_decl);
212 if (file == NULL)
213 file = "<unknown file>";
214
215 if (xloc.line > 0)
216 {
217 if (xloc.column > 0)
218 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
219 else
220 sprintf (linecolbuf, "%d", xloc.line);
221 colon = ":";
222 line = linecolbuf;
223 }
224 else
225 colon = line = "";
226
227 /* Add (FUNCTION). */
228 name = lang_hooks.decl_printable_name (current_function_decl, 1);
229 if (name)
230 {
231 op = " (";
232 cp = ")";
233 }
234 else
235 op = name = cp = "";
236
237 string = concat (file, colon, line, op, name, cp, NULL);
238 result = mf_build_string (string);
239 free (string);
240
241 return result;
242 }
243
244
245 /* global tree nodes */
246
247 /* Global tree objects for global variables and functions exported by
248 mudflap runtime library. mf_init_extern_trees must be called
249 before using these. */
250
251 /* uintptr_t (usually "unsigned long") */
252 static GTY (()) tree mf_uintptr_type;
253
254 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
255 static GTY (()) tree mf_cache_struct_type;
256
257 /* struct __mf_cache * const */
258 static GTY (()) tree mf_cache_structptr_type;
259
260 /* extern struct __mf_cache __mf_lookup_cache []; */
261 static GTY (()) tree mf_cache_array_decl;
262
263 /* extern unsigned char __mf_lc_shift; */
264 static GTY (()) tree mf_cache_shift_decl;
265
266 /* extern uintptr_t __mf_lc_mask; */
267 static GTY (()) tree mf_cache_mask_decl;
268
269 /* Their function-scope local shadows, used in single-threaded mode only. */
270
271 /* auto const unsigned char __mf_lc_shift_l; */
272 static GTY (()) tree mf_cache_shift_decl_l;
273
274 /* auto const uintptr_t __mf_lc_mask_l; */
275 static GTY (()) tree mf_cache_mask_decl_l;
276
277 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
278 static GTY (()) tree mf_check_fndecl;
279
280 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
281 static GTY (()) tree mf_register_fndecl;
282
283 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
284 static GTY (()) tree mf_unregister_fndecl;
285
286 /* extern void __mf_init (); */
287 static GTY (()) tree mf_init_fndecl;
288
289 /* extern int __mf_set_options (const char*); */
290 static GTY (()) tree mf_set_options_fndecl;
291
292
293 /* Helper for mudflap_init: construct a decl with the given category,
294 name, and type, mark it an external reference, and pushdecl it. */
295 static inline tree
296 mf_make_builtin (enum tree_code category, const char *name, tree type)
297 {
298 tree decl = mf_mark (build_decl (category, get_identifier (name), type));
299 TREE_PUBLIC (decl) = 1;
300 DECL_EXTERNAL (decl) = 1;
301 lang_hooks.decls.pushdecl (decl);
302 /* The decl was declared by the compiler. */
303 DECL_ARTIFICIAL (decl) = 1;
304 /* And we don't want debug info for it. */
305 DECL_IGNORED_P (decl) = 1;
306 return decl;
307 }
308
309 /* Helper for mudflap_init: construct a tree corresponding to the type
310 struct __mf_cache { uintptr_t low; uintptr_t high; };
311 where uintptr_t is the FIELD_TYPE argument. */
312 static inline tree
313 mf_make_mf_cache_struct_type (tree field_type)
314 {
315 /* There is, abominably, no language-independent way to construct a
316 RECORD_TYPE. So we have to call the basic type construction
317 primitives by hand. */
318 tree fieldlo = build_decl (FIELD_DECL, get_identifier ("low"), field_type);
319 tree fieldhi = build_decl (FIELD_DECL, get_identifier ("high"), field_type);
320
321 tree struct_type = make_node (RECORD_TYPE);
322 DECL_CONTEXT (fieldlo) = struct_type;
323 DECL_CONTEXT (fieldhi) = struct_type;
324 TREE_CHAIN (fieldlo) = fieldhi;
325 TYPE_FIELDS (struct_type) = fieldlo;
326 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
327 layout_type (struct_type);
328
329 return struct_type;
330 }
331
332 #define build_function_type_0(rtype) \
333 build_function_type (rtype, void_list_node)
334 #define build_function_type_1(rtype, arg1) \
335 build_function_type (rtype, tree_cons (0, arg1, void_list_node))
336 #define build_function_type_3(rtype, arg1, arg2, arg3) \
337 build_function_type (rtype, \
338 tree_cons (0, arg1, \
339 tree_cons (0, arg2, \
340 tree_cons (0, arg3, \
341 void_list_node))))
342 #define build_function_type_4(rtype, arg1, arg2, arg3, arg4) \
343 build_function_type (rtype, \
344 tree_cons (0, arg1, \
345 tree_cons (0, arg2, \
346 tree_cons (0, arg3, \
347 tree_cons (0, arg4, \
348 void_list_node)))))
349
350 /* Initialize the global tree nodes that correspond to mf-runtime.h
351 declarations. */
352 void
353 mudflap_init (void)
354 {
355 static bool done = false;
356 tree mf_const_string_type;
357 tree mf_cache_array_type;
358 tree mf_check_register_fntype;
359 tree mf_unregister_fntype;
360 tree mf_init_fntype;
361 tree mf_set_options_fntype;
362
363 if (done)
364 return;
365 done = true;
366
367 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
368 /*unsignedp=*/true);
369 mf_const_string_type
370 = build_pointer_type (build_qualified_type
371 (char_type_node, TYPE_QUAL_CONST));
372
373 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
374 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
375 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
376 mf_check_register_fntype =
377 build_function_type_4 (void_type_node, ptr_type_node, size_type_node,
378 integer_type_node, mf_const_string_type);
379 mf_unregister_fntype =
380 build_function_type_3 (void_type_node, ptr_type_node, size_type_node,
381 integer_type_node);
382 mf_init_fntype =
383 build_function_type_0 (void_type_node);
384 mf_set_options_fntype =
385 build_function_type_1 (integer_type_node, mf_const_string_type);
386
387 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
388 mf_cache_array_type);
389 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
390 unsigned_char_type_node);
391 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
392 mf_uintptr_type);
393 /* Don't process these in mudflap_enqueue_decl, should they come by
394 there for some reason. */
395 mf_mark (mf_cache_array_decl);
396 mf_mark (mf_cache_shift_decl);
397 mf_mark (mf_cache_mask_decl);
398 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
399 mf_check_register_fntype);
400 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
401 mf_check_register_fntype);
402 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
403 mf_unregister_fntype);
404 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
405 mf_init_fntype);
406 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
407 mf_set_options_fntype);
408 }
409 #undef build_function_type_4
410 #undef build_function_type_3
411 #undef build_function_type_1
412 #undef build_function_type_0
413
414
415 /* ------------------------------------------------------------------------ */
416 /* Memory reference transforms. Perform the mudflap indirection-related
417 tree transforms on the current function.
418
419 This is the second part of the mudflap instrumentation. It works on
420 low-level GIMPLE using the CFG, because we want to run this pass after
421 tree optimizations have been performed, but we have to preserve the CFG
422 for expansion from trees to RTL. */
423
424 static unsigned int
425 execute_mudflap_function_ops (void)
426 {
427 struct gimplify_ctx gctx;
428
429 /* Don't instrument functions such as the synthetic constructor
430 built during mudflap_finish_file. */
431 if (mf_marked_p (current_function_decl) ||
432 DECL_ARTIFICIAL (current_function_decl))
433 return 0;
434
435 push_gimplify_context (&gctx);
436
437 /* In multithreaded mode, don't cache the lookup cache parameters. */
438 if (! flag_mudflap_threads)
439 mf_decl_cache_locals ();
440
441 mf_xform_derefs ();
442
443 if (! flag_mudflap_threads)
444 mf_decl_clear_locals ();
445
446 pop_gimplify_context (NULL);
447 return 0;
448 }
449
450 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
451 if BB has more than one edge, STMT will be replicated for each edge.
452 Also, abnormal edges will be ignored. */
453
454 static void
455 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
456 {
457 edge e;
458 edge_iterator ei;
459 unsigned n_copies = -1;
460
461 FOR_EACH_EDGE (e, ei, bb->succs)
462 if (!(e->flags & EDGE_ABNORMAL))
463 n_copies++;
464
465 FOR_EACH_EDGE (e, ei, bb->succs)
466 if (!(e->flags & EDGE_ABNORMAL))
467 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
468 }
469
470 /* Create and initialize local shadow variables for the lookup cache
471 globals. Put their decls in the *_l globals for use by
472 mf_build_check_statement_for. */
473
474 static void
475 mf_decl_cache_locals (void)
476 {
477 gimple g;
478 gimple_seq seq = gimple_seq_alloc ();
479
480 /* Build the cache vars. */
481 mf_cache_shift_decl_l
482 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
483 "__mf_lookup_shift_l"));
484
485 mf_cache_mask_decl_l
486 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
487 "__mf_lookup_mask_l"));
488
489 /* Build initialization nodes for the cache vars. We just load the
490 globals into the cache variables. */
491 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
492 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
493 gimple_seq_add_stmt (&seq, g);
494
495 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
496 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
497 gimple_seq_add_stmt (&seq, g);
498
499 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
500
501 gsi_commit_edge_inserts ();
502 }
503
504
505 static void
506 mf_decl_clear_locals (void)
507 {
508 /* Unset local shadows. */
509 mf_cache_shift_decl_l = NULL_TREE;
510 mf_cache_mask_decl_l = NULL_TREE;
511 }
512
513 static void
514 mf_build_check_statement_for (tree base, tree limit,
515 gimple_stmt_iterator *instr_gsi,
516 location_t location, tree dirflag)
517 {
518 gimple_stmt_iterator gsi;
519 basic_block cond_bb, then_bb, join_bb;
520 edge e;
521 tree cond, t, u, v;
522 tree mf_base;
523 tree mf_elem;
524 tree mf_limit;
525 gimple g;
526 gimple_seq seq, stmts;
527
528 /* We first need to split the current basic block, and start altering
529 the CFG. This allows us to insert the statements we're about to
530 construct into the right basic blocks. */
531
532 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
533 gsi = *instr_gsi;
534 gsi_prev (&gsi);
535 if (! gsi_end_p (gsi))
536 e = split_block (cond_bb, gsi_stmt (gsi));
537 else
538 e = split_block_after_labels (cond_bb);
539 cond_bb = e->src;
540 join_bb = e->dest;
541
542 /* A recap at this point: join_bb is the basic block at whose head
543 is the gimple statement for which this check expression is being
544 built. cond_bb is the (possibly new, synthetic) basic block the
545 end of which will contain the cache-lookup code, and a
546 conditional that jumps to the cache-miss code or, much more
547 likely, over to join_bb. */
548
549 /* Create the bb that contains the cache-miss fallback block (mf_check). */
550 then_bb = create_empty_bb (cond_bb);
551 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
552 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
553
554 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
555 e = find_edge (cond_bb, join_bb);
556 e->flags = EDGE_FALSE_VALUE;
557 e->count = cond_bb->count;
558 e->probability = REG_BR_PROB_BASE;
559
560 /* Update dominance info. Note that bb_join's data was
561 updated by split_block. */
562 if (dom_info_available_p (CDI_DOMINATORS))
563 {
564 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
565 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
566 }
567
568 /* Build our local variables. */
569 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
570 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
571 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
572
573 /* Build: __mf_base = (uintptr_t) <base address expression>. */
574 seq = gimple_seq_alloc ();
575 t = fold_convert (mf_uintptr_type, unshare_expr (base));
576 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
577 gimple_seq_add_seq (&seq, stmts);
578 g = gimple_build_assign (mf_base, t);
579 gimple_set_location (g, location);
580 gimple_seq_add_stmt (&seq, g);
581
582 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
583 t = fold_convert (mf_uintptr_type, unshare_expr (limit));
584 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
585 gimple_seq_add_seq (&seq, stmts);
586 g = gimple_build_assign (mf_limit, t);
587 gimple_set_location (g, location);
588 gimple_seq_add_stmt (&seq, g);
589
590 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
591 & __mf_mask]. */
592 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
593 flag_mudflap_threads ? mf_cache_shift_decl
594 : mf_cache_shift_decl_l);
595 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
596 flag_mudflap_threads ? mf_cache_mask_decl
597 : mf_cache_mask_decl_l);
598 t = build4 (ARRAY_REF,
599 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
600 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
601 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
602 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
603 gimple_seq_add_seq (&seq, stmts);
604 g = gimple_build_assign (mf_elem, t);
605 gimple_set_location (g, location);
606 gimple_seq_add_stmt (&seq, g);
607
608 /* Quick validity check.
609
610 if (__mf_elem->low > __mf_base
611 || (__mf_elem_high < __mf_limit))
612 {
613 __mf_check ();
614 ... and only if single-threaded:
615 __mf_lookup_shift_1 = f...;
616 __mf_lookup_mask_l = ...;
617 }
618
619 It is expected that this body of code is rarely executed so we mark
620 the edge to the THEN clause of the conditional jump as unlikely. */
621
622 /* Construct t <-- '__mf_elem->low > __mf_base'. */
623 t = build3 (COMPONENT_REF, mf_uintptr_type,
624 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
625 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
626 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
627
628 /* Construct '__mf_elem->high < __mf_limit'.
629
630 First build:
631 1) u <-- '__mf_elem->high'
632 2) v <-- '__mf_limit'.
633
634 Then build 'u <-- (u < v). */
635
636 u = build3 (COMPONENT_REF, mf_uintptr_type,
637 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
638 TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
639
640 v = mf_limit;
641
642 u = build2 (LT_EXPR, boolean_type_node, u, v);
643
644 /* Build the composed conditional: t <-- 't || u'. Then store the
645 result of the evaluation of 't' in a temporary variable which we
646 can use as the condition for the conditional jump. */
647 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
648 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
649 gimple_seq_add_seq (&seq, stmts);
650 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
651 g = gimple_build_assign (cond, t);
652 gimple_set_location (g, location);
653 gimple_seq_add_stmt (&seq, g);
654
655 /* Build the conditional jump. 'cond' is just a temporary so we can
656 simply build a void COND_EXPR. We do need labels in both arms though. */
657 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
658 NULL_TREE);
659 gimple_set_location (g, location);
660 gimple_seq_add_stmt (&seq, g);
661
662 /* At this point, after so much hard work, we have only constructed
663 the conditional jump,
664
665 if (__mf_elem->low > __mf_base
666 || (__mf_elem_high < __mf_limit))
667
668 The lowered GIMPLE tree representing this code is in the statement
669 list starting at 'head'.
670
671 We can insert this now in the current basic block, i.e. the one that
672 the statement we're instrumenting was originally in. */
673 gsi = gsi_last_bb (cond_bb);
674 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
675
676 /* Now build up the body of the cache-miss handling:
677
678 __mf_check();
679 refresh *_l vars.
680
681 This is the body of the conditional. */
682
683 seq = gimple_seq_alloc ();
684 /* u is a string, so it is already a gimple value. */
685 u = mf_file_function_line_tree (location);
686 /* NB: we pass the overall [base..limit] range to mf_check. */
687 v = fold_build2 (PLUS_EXPR, mf_uintptr_type,
688 fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
689 build_int_cst (mf_uintptr_type, 1));
690 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
691 gimple_seq_add_seq (&seq, stmts);
692 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
693 gimple_seq_add_stmt (&seq, g);
694
695 if (! flag_mudflap_threads)
696 {
697 if (stmt_ends_bb_p (g))
698 {
699 gsi = gsi_start_bb (then_bb);
700 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
701 e = split_block (then_bb, g);
702 then_bb = e->dest;
703 seq = gimple_seq_alloc ();
704 }
705
706 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
707 gimple_seq_add_stmt (&seq, g);
708
709 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
710 gimple_seq_add_stmt (&seq, g);
711 }
712
713 /* Insert the check code in the THEN block. */
714 gsi = gsi_start_bb (then_bb);
715 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
716
717 *instr_gsi = gsi_start_bb (join_bb);
718 }
719
720
721 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
722 eligible for instrumentation. For the mudflap1 pass, this implies
723 that it should be registered with the libmudflap runtime. For the
724 mudflap2 pass this means instrumenting an indirection operation with
725 respect to the object.
726 */
727 static int
728 mf_decl_eligible_p (tree decl)
729 {
730 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
731 /* The decl must have its address taken. In the case of
732 arrays, this flag is also set if the indexes are not
733 compile-time known valid constants. */
734 /* XXX: not sufficient: return-by-value structs! */
735 && TREE_ADDRESSABLE (decl)
736 /* The type of the variable must be complete. */
737 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
738 /* The decl hasn't been decomposed somehow. */
739 && !DECL_HAS_VALUE_EXPR_P (decl));
740 }
741
742
743 static void
744 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
745 location_t location, tree dirflag)
746 {
747 tree type, base, limit, addr, size, t;
748
749 /* Don't instrument read operations. */
750 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
751 return;
752
753 /* Don't instrument marked nodes. */
754 if (mf_marked_p (*tp))
755 return;
756
757 t = *tp;
758 type = TREE_TYPE (t);
759
760 if (type == error_mark_node)
761 return;
762
763 size = TYPE_SIZE_UNIT (type);
764
765 switch (TREE_CODE (t))
766 {
767 case ARRAY_REF:
768 case COMPONENT_REF:
769 {
770 /* This is trickier than it may first appear. The reason is
771 that we are looking at expressions from the "inside out" at
772 this point. We may have a complex nested aggregate/array
773 expression (e.g. "a.b[i].c"), maybe with an indirection as
774 the leftmost operator ("p->a.b.d"), where instrumentation
775 is necessary. Or we may have an innocent "a.b.c"
776 expression that must not be instrumented. We need to
777 recurse all the way down the nesting structure to figure it
778 out: looking just at the outer node is not enough. */
779 tree var;
780 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
781 /* If we have a bitfield component reference, we must note the
782 innermost addressable object in ELT, from which we will
783 construct the byte-addressable bounds of the bitfield. */
784 tree elt = NULL_TREE;
785 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
786 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
787
788 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
789 containment hierarchy to find the outermost VAR_DECL. */
790 var = TREE_OPERAND (t, 0);
791 while (1)
792 {
793 if (bitfield_ref_p && elt == NULL_TREE
794 && (TREE_CODE (var) == ARRAY_REF
795 || TREE_CODE (var) == COMPONENT_REF))
796 elt = var;
797
798 if (TREE_CODE (var) == ARRAY_REF)
799 {
800 component_ref_only = 0;
801 var = TREE_OPERAND (var, 0);
802 }
803 else if (TREE_CODE (var) == COMPONENT_REF)
804 var = TREE_OPERAND (var, 0);
805 else if (INDIRECT_REF_P (var))
806 {
807 base = TREE_OPERAND (var, 0);
808 break;
809 }
810 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
811 {
812 var = TREE_OPERAND (var, 0);
813 if (CONSTANT_CLASS_P (var)
814 && TREE_CODE (var) != STRING_CST)
815 return;
816 }
817 else
818 {
819 gcc_assert (TREE_CODE (var) == VAR_DECL
820 || TREE_CODE (var) == PARM_DECL
821 || TREE_CODE (var) == RESULT_DECL
822 || TREE_CODE (var) == STRING_CST);
823 /* Don't instrument this access if the underlying
824 variable is not "eligible". This test matches
825 those arrays that have only known-valid indexes,
826 and thus are not labeled TREE_ADDRESSABLE. */
827 if (! mf_decl_eligible_p (var) || component_ref_only)
828 return;
829 else
830 {
831 base = build1 (ADDR_EXPR,
832 build_pointer_type (TREE_TYPE (var)), var);
833 break;
834 }
835 }
836 }
837
838 /* Handle the case of ordinary non-indirection structure
839 accesses. These have only nested COMPONENT_REF nodes (no
840 INDIRECT_REF), but pass through the above filter loop.
841 Note that it's possible for such a struct variable to match
842 the eligible_p test because someone else might take its
843 address sometime. */
844
845 /* We need special processing for bitfield components, because
846 their addresses cannot be taken. */
847 if (bitfield_ref_p)
848 {
849 tree field = TREE_OPERAND (t, 1);
850
851 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
852 size = DECL_SIZE_UNIT (field);
853
854 if (elt)
855 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
856 elt);
857 addr = fold_convert (ptr_type_node, elt ? elt : base);
858 addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
859 addr, fold_convert (sizetype,
860 byte_position (field)));
861 }
862 else
863 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
864
865 limit = fold_build2 (MINUS_EXPR, mf_uintptr_type,
866 fold_build2 (PLUS_EXPR, mf_uintptr_type,
867 convert (mf_uintptr_type, addr),
868 size),
869 integer_one_node);
870 }
871 break;
872
873 case INDIRECT_REF:
874 addr = TREE_OPERAND (t, 0);
875 base = addr;
876 limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
877 fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base,
878 size),
879 size_int (-1));
880 break;
881
882 case TARGET_MEM_REF:
883 addr = tree_mem_ref_addr (ptr_type_node, t);
884 base = addr;
885 limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
886 fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base,
887 size),
888 size_int (-1));
889 break;
890
891 case ARRAY_RANGE_REF:
892 warning (OPT_Wmudflap,
893 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
894 return;
895
896 case BIT_FIELD_REF:
897 /* ??? merge with COMPONENT_REF code above? */
898 {
899 tree ofs, rem, bpu;
900
901 /* If we're not dereferencing something, then the access
902 must be ok. */
903 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
904 return;
905
906 bpu = bitsize_int (BITS_PER_UNIT);
907 ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
908 rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu);
909 ofs = fold_convert (sizetype, size_binop (TRUNC_DIV_EXPR, ofs, bpu));
910
911 size = convert (bitsizetype, TREE_OPERAND (t, 1));
912 size = size_binop (PLUS_EXPR, size, rem);
913 size = size_binop (CEIL_DIV_EXPR, size, bpu);
914 size = convert (sizetype, size);
915
916 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
917 addr = convert (ptr_type_node, addr);
918 addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, addr, ofs);
919
920 base = addr;
921 limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
922 fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
923 base, size),
924 size_int (-1));
925 }
926 break;
927
928 default:
929 return;
930 }
931
932 mf_build_check_statement_for (base, limit, iter, location, dirflag);
933 }
934
935 static void
936 mf_xform_derefs (void)
937 {
938 basic_block bb, next;
939 gimple_stmt_iterator i;
940 int saved_last_basic_block = last_basic_block;
941 enum gimple_rhs_class grhs_class;
942
943 bb = ENTRY_BLOCK_PTR ->next_bb;
944 do
945 {
946 next = bb->next_bb;
947 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
948 {
949 gimple s = gsi_stmt (i);
950
951 /* Only a few GIMPLE statements can reference memory. */
952 switch (gimple_code (s))
953 {
954 case GIMPLE_ASSIGN:
955 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
956 gimple_location (s), integer_one_node);
957 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
958 gimple_location (s), integer_zero_node);
959 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
960 if (grhs_class == GIMPLE_BINARY_RHS)
961 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
962 gimple_location (s), integer_zero_node);
963 break;
964
965 case GIMPLE_RETURN:
966 if (gimple_return_retval (s) != NULL_TREE)
967 {
968 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
969 gimple_location (s),
970 integer_zero_node);
971 }
972 break;
973
974 default:
975 ;
976 }
977 }
978 bb = next;
979 }
980 while (bb && bb->index <= saved_last_basic_block);
981 }
982
983 /* ------------------------------------------------------------------------ */
984 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
985 transforms on the current function.
986
987 This is the first part of the mudflap instrumentation. It works on
988 high-level GIMPLE because after lowering, all variables are moved out
989 of their BIND_EXPR binding context, and we lose liveness information
990 for the declarations we wish to instrument. */
991
992 static unsigned int
993 execute_mudflap_function_decls (void)
994 {
995 struct gimplify_ctx gctx;
996
997 /* Don't instrument functions such as the synthetic constructor
998 built during mudflap_finish_file. */
999 if (mf_marked_p (current_function_decl) ||
1000 DECL_ARTIFICIAL (current_function_decl))
1001 return 0;
1002
1003 push_gimplify_context (&gctx);
1004
1005 mf_xform_decls (gimple_body (current_function_decl),
1006 DECL_ARGUMENTS (current_function_decl));
1007
1008 pop_gimplify_context (NULL);
1009 return 0;
1010 }
1011
1012 /* This struct is passed between mf_xform_decls to store state needed
1013 during the traversal searching for objects that have their
1014 addresses taken. */
1015 struct mf_xform_decls_data
1016 {
1017 tree param_decls;
1018 };
1019
1020
1021 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1022 _DECLs if appropriate. Arrange to call the __mf_register function
1023 now, and the __mf_unregister function later for each. Return the
1024 gimple sequence after synthesis. */
1025 gimple_seq
1026 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1027 {
1028 gimple_seq finally_stmts = NULL;
1029 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1030
1031 while (decl != NULL_TREE)
1032 {
1033 if (mf_decl_eligible_p (decl)
1034 /* Not already processed. */
1035 && ! mf_marked_p (decl)
1036 /* Automatic variable. */
1037 && ! DECL_EXTERNAL (decl)
1038 && ! TREE_STATIC (decl))
1039 {
1040 tree size = NULL_TREE, variable_name;
1041 gimple unregister_fncall, register_fncall;
1042 tree unregister_fncall_param, register_fncall_param;
1043
1044 /* Variable-sized objects should have sizes already been
1045 gimplified when we got here. */
1046 size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1047 gcc_assert (is_gimple_val (size));
1048
1049
1050 unregister_fncall_param =
1051 mf_mark (build1 (ADDR_EXPR,
1052 build_pointer_type (TREE_TYPE (decl)),
1053 decl));
1054 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1055 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1056 unregister_fncall_param,
1057 size,
1058 build_int_cst (NULL_TREE, 3));
1059
1060
1061 variable_name = mf_varname_tree (decl);
1062 register_fncall_param =
1063 mf_mark (build1 (ADDR_EXPR,
1064 build_pointer_type (TREE_TYPE (decl)),
1065 decl));
1066 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1067 "name") */
1068 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1069 register_fncall_param,
1070 size,
1071 build_int_cst (NULL_TREE, 3),
1072 variable_name);
1073
1074
1075 /* Accumulate the two calls. */
1076 gimple_set_location (register_fncall, location);
1077 gimple_set_location (unregister_fncall, location);
1078
1079 /* Add the __mf_register call at the current appending point. */
1080 if (gsi_end_p (initially_stmts))
1081 {
1082 if (!DECL_ARTIFICIAL (decl))
1083 warning (OPT_Wmudflap,
1084 "mudflap cannot track %qE in stub function",
1085 DECL_NAME (decl));
1086 }
1087 else
1088 {
1089 gsi_insert_before (&initially_stmts, register_fncall,
1090 GSI_SAME_STMT);
1091
1092 /* Accumulate the FINALLY piece. */
1093 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1094 }
1095 mf_mark (decl);
1096 }
1097
1098 decl = TREE_CHAIN (decl);
1099 }
1100
1101 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1102 if (finally_stmts != NULL)
1103 {
1104 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1105 gimple_seq new_seq = gimple_seq_alloc ();
1106
1107 gimple_seq_add_stmt (&new_seq, stmt);
1108 return new_seq;
1109 }
1110 else
1111 return seq;
1112 }
1113
1114
1115 /* Process every variable mentioned in BIND_EXPRs. */
1116 static tree
1117 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1118 bool *handled_operands_p ATTRIBUTE_UNUSED,
1119 struct walk_stmt_info *wi)
1120 {
1121 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1122 gimple stmt = gsi_stmt (*gsi);
1123
1124 switch (gimple_code (stmt))
1125 {
1126 case GIMPLE_BIND:
1127 {
1128 /* Process function parameters now (but only once). */
1129 if (d->param_decls)
1130 {
1131 gimple_bind_set_body (stmt,
1132 mx_register_decls (d->param_decls,
1133 gimple_bind_body (stmt),
1134 gimple_location (stmt)));
1135 d->param_decls = NULL_TREE;
1136 }
1137
1138 gimple_bind_set_body (stmt,
1139 mx_register_decls (gimple_bind_vars (stmt),
1140 gimple_bind_body (stmt),
1141 gimple_location (stmt)));
1142 }
1143 break;
1144
1145 default:
1146 break;
1147 }
1148
1149 return NULL_TREE;
1150 }
1151
1152 /* Perform the object lifetime tracking mudflap transform on the given function
1153 tree. The tree is mutated in place, with possibly copied subtree nodes.
1154
1155 For every auto variable declared, if its address is ever taken
1156 within the function, then supply its lifetime to the mudflap
1157 runtime with the __mf_register and __mf_unregister calls.
1158 */
1159
1160 static void
1161 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1162 {
1163 struct mf_xform_decls_data d;
1164 struct walk_stmt_info wi;
1165 struct pointer_set_t *pset = pointer_set_create ();
1166
1167 d.param_decls = fnparams;
1168 memset (&wi, 0, sizeof (wi));
1169 wi.info = (void*) &d;
1170 wi.pset = pset;
1171 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1172 pointer_set_destroy (pset);
1173 }
1174
1175
1176 /* ------------------------------------------------------------------------ */
1177 /* Externally visible mudflap functions. */
1178
1179
1180 /* Mark and return the given tree node to prevent further mudflap
1181 transforms. */
1182 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1183
1184 tree
1185 mf_mark (tree t)
1186 {
1187 void **slot;
1188
1189 if (marked_trees == NULL)
1190 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1191 NULL);
1192
1193 slot = htab_find_slot (marked_trees, t, INSERT);
1194 *slot = t;
1195 return t;
1196 }
1197
1198 int
1199 mf_marked_p (tree t)
1200 {
1201 void *entry;
1202
1203 if (marked_trees == NULL)
1204 return 0;
1205
1206 entry = htab_find (marked_trees, t);
1207 return (entry != NULL);
1208 }
1209
1210 /* Remember given node as a static of some kind: global data,
1211 function-scope static, or an anonymous constant. Its assembler
1212 label is given. */
1213
1214 /* A list of globals whose incomplete declarations we encountered.
1215 Instead of emitting the __mf_register call for them here, it's
1216 delayed until program finish time. If they're still incomplete by
1217 then, warnings are emitted. */
1218
1219 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1220
1221 /* A list of statements for calling __mf_register() at startup time. */
1222 static GTY (()) tree enqueued_call_stmt_chain;
1223
1224 static void
1225 mudflap_register_call (tree obj, tree object_size, tree varname)
1226 {
1227 tree arg, call_stmt;
1228
1229 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1230 arg = convert (ptr_type_node, arg);
1231
1232 call_stmt = build_call_expr (mf_register_fndecl, 4,
1233 arg,
1234 convert (size_type_node, object_size),
1235 /* __MF_TYPE_STATIC */
1236 build_int_cst (NULL_TREE, 4),
1237 varname);
1238
1239 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1240 }
1241
1242 void
1243 mudflap_enqueue_decl (tree obj)
1244 {
1245 if (mf_marked_p (obj))
1246 return;
1247
1248 /* We don't need to process variable decls that are internally
1249 generated extern. If we did, we'd end up with warnings for them
1250 during mudflap_finish_file (). That would confuse the user,
1251 since the text would refer to variables that don't show up in the
1252 user's source code. */
1253 if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj))
1254 return;
1255
1256 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1257 }
1258
1259
1260 void
1261 mudflap_enqueue_constant (tree obj)
1262 {
1263 tree object_size, varname;
1264
1265 if (mf_marked_p (obj))
1266 return;
1267
1268 if (TREE_CODE (obj) == STRING_CST)
1269 object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj));
1270 else
1271 object_size = size_in_bytes (TREE_TYPE (obj));
1272
1273 if (TREE_CODE (obj) == STRING_CST)
1274 varname = mf_build_string ("string literal");
1275 else
1276 varname = mf_build_string ("constant");
1277
1278 mudflap_register_call (obj, object_size, varname);
1279 }
1280
1281
1282 /* Emit any file-wide instrumentation. */
1283 void
1284 mudflap_finish_file (void)
1285 {
1286 tree ctor_statements = NULL_TREE;
1287
1288 /* No need to continue when there were errors. */
1289 if (errorcount != 0 || sorrycount != 0)
1290 return;
1291
1292 /* Insert a call to __mf_init. */
1293 {
1294 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1295 append_to_statement_list (call2_stmt, &ctor_statements);
1296 }
1297
1298 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1299 if (flag_mudflap_ignore_reads)
1300 {
1301 tree arg = mf_build_string ("-ignore-reads");
1302 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1303 append_to_statement_list (call_stmt, &ctor_statements);
1304 }
1305
1306 /* Process all enqueued object decls. */
1307 if (deferred_static_decls)
1308 {
1309 size_t i;
1310 tree obj;
1311 for (i = 0; VEC_iterate (tree, deferred_static_decls, i, obj); i++)
1312 {
1313 gcc_assert (DECL_P (obj));
1314
1315 if (mf_marked_p (obj))
1316 continue;
1317
1318 /* Omit registration for static unaddressed objects. NB:
1319 Perform registration for non-static objects regardless of
1320 TREE_USED or TREE_ADDRESSABLE, because they may be used
1321 from other compilation units. */
1322 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1323 continue;
1324
1325 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1326 {
1327 warning (OPT_Wmudflap,
1328 "mudflap cannot track unknown size extern %qE",
1329 DECL_NAME (obj));
1330 continue;
1331 }
1332
1333 mudflap_register_call (obj,
1334 size_in_bytes (TREE_TYPE (obj)),
1335 mf_varname_tree (obj));
1336 }
1337
1338 VEC_truncate (tree, deferred_static_decls, 0);
1339 }
1340
1341 /* Append all the enqueued registration calls. */
1342 if (enqueued_call_stmt_chain)
1343 {
1344 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1345 enqueued_call_stmt_chain = NULL_TREE;
1346 }
1347
1348 cgraph_build_static_cdtor ('I', ctor_statements,
1349 MAX_RESERVED_INIT_PRIORITY-1);
1350 }
1351
1352
1353 static bool
1354 gate_mudflap (void)
1355 {
1356 return flag_mudflap != 0;
1357 }
1358
1359 struct gimple_opt_pass pass_mudflap_1 =
1360 {
1361 {
1362 GIMPLE_PASS,
1363 "mudflap1", /* name */
1364 gate_mudflap, /* gate */
1365 execute_mudflap_function_decls, /* execute */
1366 NULL, /* sub */
1367 NULL, /* next */
1368 0, /* static_pass_number */
1369 TV_NONE, /* tv_id */
1370 PROP_gimple_any, /* properties_required */
1371 0, /* properties_provided */
1372 0, /* properties_destroyed */
1373 0, /* todo_flags_start */
1374 TODO_dump_func /* todo_flags_finish */
1375 }
1376 };
1377
1378 struct gimple_opt_pass pass_mudflap_2 =
1379 {
1380 {
1381 GIMPLE_PASS,
1382 "mudflap2", /* name */
1383 gate_mudflap, /* gate */
1384 execute_mudflap_function_ops, /* execute */
1385 NULL, /* sub */
1386 NULL, /* next */
1387 0, /* static_pass_number */
1388 TV_NONE, /* tv_id */
1389 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1390 0, /* properties_provided */
1391 0, /* properties_destroyed */
1392 0, /* todo_flags_start */
1393 TODO_verify_flow | TODO_verify_stmts
1394 | TODO_dump_func | TODO_update_ssa /* todo_flags_finish */
1395 }
1396 };
1397
1398 #include "gt-tree-mudflap.h"