* jvspec.c (jvgenmain_spec): Don't handle -fnew-verifier.
[gcc.git] / gcc / tree-mudflap.c
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "tree-inline.h"
34 #include "gimple.h"
35 #include "tree-iterator.h"
36 #include "tree-flow.h"
37 #include "tree-mudflap.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "hashtab.h"
41 #include "diagnostic.h"
42 #include <demangle.h>
43 #include "langhooks.h"
44 #include "ggc.h"
45 #include "cgraph.h"
46 #include "toplev.h"
47 #include "gimple.h"
48
49 /* Internal function decls */
50
51
52 /* Options. */
53 #define flag_mudflap_threads (flag_mudflap == 2)
54
55 /* Helpers. */
56 static tree mf_build_string (const char *string);
57 static tree mf_varname_tree (tree);
58 static tree mf_file_function_line_tree (location_t);
59
60 /* Indirection-related instrumentation. */
61 static void mf_decl_cache_locals (void);
62 static void mf_decl_clear_locals (void);
63 static void mf_xform_statements (void);
64 static unsigned int execute_mudflap_function_ops (void);
65
66 /* Addressable variables instrumentation. */
67 static void mf_xform_decls (gimple_seq, tree);
68 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
69 struct walk_stmt_info *);
70 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
71 static unsigned int execute_mudflap_function_decls (void);
72
73
74 /* ------------------------------------------------------------------------ */
75 /* Some generally helpful functions for mudflap instrumentation. */
76
77 /* Build a reference to a literal string. */
78 static tree
79 mf_build_string (const char *string)
80 {
81 size_t len = strlen (string);
82 tree result = mf_mark (build_string (len + 1, string));
83
84 TREE_TYPE (result) = build_array_type
85 (char_type_node, build_index_type (build_int_cst (NULL_TREE, len)));
86 TREE_CONSTANT (result) = 1;
87 TREE_READONLY (result) = 1;
88 TREE_STATIC (result) = 1;
89
90 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
91
92 return mf_mark (result);
93 }
94
95 /* Create a properly typed STRING_CST node that describes the given
96 declaration. It will be used as an argument for __mf_register().
97 Try to construct a helpful string, including file/function/variable
98 name. */
99
100 static tree
101 mf_varname_tree (tree decl)
102 {
103 static pretty_printer buf_rec;
104 static int initialized = 0;
105 pretty_printer *buf = & buf_rec;
106 const char *buf_contents;
107 tree result;
108
109 gcc_assert (decl);
110
111 if (!initialized)
112 {
113 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
114 initialized = 1;
115 }
116 pp_clear_output_area (buf);
117
118 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
119 {
120 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
121 const char *sourcefile;
122 unsigned sourceline = xloc.line;
123 unsigned sourcecolumn = 0;
124 sourcecolumn = xloc.column;
125 sourcefile = xloc.file;
126 if (sourcefile == NULL && current_function_decl != NULL_TREE)
127 sourcefile = DECL_SOURCE_FILE (current_function_decl);
128 if (sourcefile == NULL)
129 sourcefile = "<unknown file>";
130
131 pp_string (buf, sourcefile);
132
133 if (sourceline != 0)
134 {
135 pp_string (buf, ":");
136 pp_decimal_int (buf, sourceline);
137
138 if (sourcecolumn != 0)
139 {
140 pp_string (buf, ":");
141 pp_decimal_int (buf, sourcecolumn);
142 }
143 }
144 }
145
146 if (current_function_decl != NULL_TREE)
147 {
148 /* Add (FUNCTION) */
149 pp_string (buf, " (");
150 {
151 const char *funcname = NULL;
152 if (DECL_NAME (current_function_decl))
153 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
154 if (funcname == NULL)
155 funcname = "anonymous fn";
156
157 pp_string (buf, funcname);
158 }
159 pp_string (buf, ") ");
160 }
161 else
162 pp_string (buf, " ");
163
164 /* Add <variable-declaration>, possibly demangled. */
165 {
166 const char *declname = NULL;
167
168 if (DECL_NAME (decl) != NULL)
169 {
170 if (strcmp ("GNU C++", lang_hooks.name) == 0)
171 {
172 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
173 the libiberty demangler. */
174 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
175 DMGL_AUTO | DMGL_VERBOSE);
176 }
177 if (declname == NULL)
178 declname = lang_hooks.decl_printable_name (decl, 3);
179 }
180 if (declname == NULL)
181 declname = "<unnamed variable>";
182
183 pp_string (buf, declname);
184 }
185
186 /* Return the lot as a new STRING_CST. */
187 buf_contents = pp_base_formatted_text (buf);
188 result = mf_build_string (buf_contents);
189 pp_clear_output_area (buf);
190
191 return result;
192 }
193
194
195 /* And another friend, for producing a simpler message. */
196
197 static tree
198 mf_file_function_line_tree (location_t location)
199 {
200 expanded_location xloc = expand_location (location);
201 const char *file = NULL, *colon, *line, *op, *name, *cp;
202 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
203 char *string;
204 tree result;
205
206 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
207 file = xloc.file;
208 if (file == NULL && current_function_decl != NULL_TREE)
209 file = DECL_SOURCE_FILE (current_function_decl);
210 if (file == NULL)
211 file = "<unknown file>";
212
213 if (xloc.line > 0)
214 {
215 if (xloc.column > 0)
216 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
217 else
218 sprintf (linecolbuf, "%d", xloc.line);
219 colon = ":";
220 line = linecolbuf;
221 }
222 else
223 colon = line = "";
224
225 /* Add (FUNCTION). */
226 name = lang_hooks.decl_printable_name (current_function_decl, 1);
227 if (name)
228 {
229 op = " (";
230 cp = ")";
231 }
232 else
233 op = name = cp = "";
234
235 string = concat (file, colon, line, op, name, cp, NULL);
236 result = mf_build_string (string);
237 free (string);
238
239 return result;
240 }
241
242
243 /* global tree nodes */
244
245 /* Global tree objects for global variables and functions exported by
246 mudflap runtime library. mf_init_extern_trees must be called
247 before using these. */
248
249 /* uintptr_t (usually "unsigned long") */
250 static GTY (()) tree mf_uintptr_type;
251
252 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
253 static GTY (()) tree mf_cache_struct_type;
254
255 /* struct __mf_cache * const */
256 static GTY (()) tree mf_cache_structptr_type;
257
258 /* extern struct __mf_cache __mf_lookup_cache []; */
259 static GTY (()) tree mf_cache_array_decl;
260
261 /* extern unsigned char __mf_lc_shift; */
262 static GTY (()) tree mf_cache_shift_decl;
263
264 /* extern uintptr_t __mf_lc_mask; */
265 static GTY (()) tree mf_cache_mask_decl;
266
267 /* Their function-scope local shadows, used in single-threaded mode only. */
268
269 /* auto const unsigned char __mf_lc_shift_l; */
270 static GTY (()) tree mf_cache_shift_decl_l;
271
272 /* auto const uintptr_t __mf_lc_mask_l; */
273 static GTY (()) tree mf_cache_mask_decl_l;
274
275 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
276 static GTY (()) tree mf_check_fndecl;
277
278 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
279 static GTY (()) tree mf_register_fndecl;
280
281 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
282 static GTY (()) tree mf_unregister_fndecl;
283
284 /* extern void __mf_init (); */
285 static GTY (()) tree mf_init_fndecl;
286
287 /* extern int __mf_set_options (const char*); */
288 static GTY (()) tree mf_set_options_fndecl;
289
290
291 /* Helper for mudflap_init: construct a decl with the given category,
292 name, and type, mark it an external reference, and pushdecl it. */
293 static inline tree
294 mf_make_builtin (enum tree_code category, const char *name, tree type)
295 {
296 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
297 category, get_identifier (name), type));
298 TREE_PUBLIC (decl) = 1;
299 DECL_EXTERNAL (decl) = 1;
300 lang_hooks.decls.pushdecl (decl);
301 /* The decl was declared by the compiler. */
302 DECL_ARTIFICIAL (decl) = 1;
303 /* And we don't want debug info for it. */
304 DECL_IGNORED_P (decl) = 1;
305 return decl;
306 }
307
308 /* Helper for mudflap_init: construct a tree corresponding to the type
309 struct __mf_cache { uintptr_t low; uintptr_t high; };
310 where uintptr_t is the FIELD_TYPE argument. */
311 static inline tree
312 mf_make_mf_cache_struct_type (tree field_type)
313 {
314 /* There is, abominably, no language-independent way to construct a
315 RECORD_TYPE. So we have to call the basic type construction
316 primitives by hand. */
317 tree fieldlo = build_decl (UNKNOWN_LOCATION,
318 FIELD_DECL, get_identifier ("low"), field_type);
319 tree fieldhi = build_decl (UNKNOWN_LOCATION,
320 FIELD_DECL, get_identifier ("high"), field_type);
321
322 tree struct_type = make_node (RECORD_TYPE);
323 DECL_CONTEXT (fieldlo) = struct_type;
324 DECL_CONTEXT (fieldhi) = struct_type;
325 DECL_CHAIN (fieldlo) = fieldhi;
326 TYPE_FIELDS (struct_type) = fieldlo;
327 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
328 layout_type (struct_type);
329
330 return struct_type;
331 }
332
333 /* Initialize the global tree nodes that correspond to mf-runtime.h
334 declarations. */
335 void
336 mudflap_init (void)
337 {
338 static bool done = false;
339 tree mf_const_string_type;
340 tree mf_cache_array_type;
341 tree mf_check_register_fntype;
342 tree mf_unregister_fntype;
343 tree mf_init_fntype;
344 tree mf_set_options_fntype;
345
346 if (done)
347 return;
348 done = true;
349
350 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
351 /*unsignedp=*/true);
352 mf_const_string_type
353 = build_pointer_type (build_qualified_type
354 (char_type_node, TYPE_QUAL_CONST));
355
356 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
357 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
358 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
359 mf_check_register_fntype =
360 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
361 integer_type_node, mf_const_string_type, NULL_TREE);
362 mf_unregister_fntype =
363 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
364 integer_type_node, NULL_TREE);
365 mf_init_fntype =
366 build_function_type_list (void_type_node, NULL_TREE);
367 mf_set_options_fntype =
368 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
369
370 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
371 mf_cache_array_type);
372 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
373 unsigned_char_type_node);
374 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
375 mf_uintptr_type);
376 /* Don't process these in mudflap_enqueue_decl, should they come by
377 there for some reason. */
378 mf_mark (mf_cache_array_decl);
379 mf_mark (mf_cache_shift_decl);
380 mf_mark (mf_cache_mask_decl);
381 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
382 mf_check_register_fntype);
383 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
384 mf_check_register_fntype);
385 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
386 mf_unregister_fntype);
387 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
388 mf_init_fntype);
389 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
390 mf_set_options_fntype);
391 }
392
393
394 /* ------------------------------------------------------------------------ */
395 /* This is the second part of the mudflap instrumentation. It works on
396 low-level GIMPLE using the CFG, because we want to run this pass after
397 tree optimizations have been performed, but we have to preserve the CFG
398 for expansion from trees to RTL.
399 Below is the list of transformations performed on statements in the
400 current function.
401
402 1) Memory reference transforms: Perform the mudflap indirection-related
403 tree transforms on memory references.
404
405 2) Mark BUILTIN_ALLOCA calls not inlineable.
406
407 */
408
409 static unsigned int
410 execute_mudflap_function_ops (void)
411 {
412 struct gimplify_ctx gctx;
413
414 /* Don't instrument functions such as the synthetic constructor
415 built during mudflap_finish_file. */
416 if (mf_marked_p (current_function_decl) ||
417 DECL_ARTIFICIAL (current_function_decl))
418 return 0;
419
420 push_gimplify_context (&gctx);
421
422 /* In multithreaded mode, don't cache the lookup cache parameters. */
423 if (! flag_mudflap_threads)
424 mf_decl_cache_locals ();
425
426 mf_xform_statements ();
427
428 if (! flag_mudflap_threads)
429 mf_decl_clear_locals ();
430
431 pop_gimplify_context (NULL);
432 return 0;
433 }
434
435 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
436 if BB has more than one edge, STMT will be replicated for each edge.
437 Also, abnormal edges will be ignored. */
438
439 static void
440 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
441 {
442 edge e;
443 edge_iterator ei;
444 unsigned n_copies = -1;
445
446 FOR_EACH_EDGE (e, ei, bb->succs)
447 if (!(e->flags & EDGE_ABNORMAL))
448 n_copies++;
449
450 FOR_EACH_EDGE (e, ei, bb->succs)
451 if (!(e->flags & EDGE_ABNORMAL))
452 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
453 }
454
455 /* Create and initialize local shadow variables for the lookup cache
456 globals. Put their decls in the *_l globals for use by
457 mf_build_check_statement_for. */
458
459 static void
460 mf_decl_cache_locals (void)
461 {
462 gimple g;
463 gimple_seq seq = gimple_seq_alloc ();
464
465 /* Build the cache vars. */
466 mf_cache_shift_decl_l
467 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
468 "__mf_lookup_shift_l"));
469
470 mf_cache_mask_decl_l
471 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
472 "__mf_lookup_mask_l"));
473
474 /* Build initialization nodes for the cache vars. We just load the
475 globals into the cache variables. */
476 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
477 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
478 gimple_seq_add_stmt (&seq, g);
479
480 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
481 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
482 gimple_seq_add_stmt (&seq, g);
483
484 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
485
486 gsi_commit_edge_inserts ();
487 }
488
489
490 static void
491 mf_decl_clear_locals (void)
492 {
493 /* Unset local shadows. */
494 mf_cache_shift_decl_l = NULL_TREE;
495 mf_cache_mask_decl_l = NULL_TREE;
496 }
497
498 static void
499 mf_build_check_statement_for (tree base, tree limit,
500 gimple_stmt_iterator *instr_gsi,
501 location_t location, tree dirflag)
502 {
503 gimple_stmt_iterator gsi;
504 basic_block cond_bb, then_bb, join_bb;
505 edge e;
506 tree cond, t, u, v;
507 tree mf_base;
508 tree mf_elem;
509 tree mf_limit;
510 gimple g;
511 gimple_seq seq, stmts;
512
513 /* We first need to split the current basic block, and start altering
514 the CFG. This allows us to insert the statements we're about to
515 construct into the right basic blocks. */
516
517 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
518 gsi = *instr_gsi;
519 gsi_prev (&gsi);
520 if (! gsi_end_p (gsi))
521 e = split_block (cond_bb, gsi_stmt (gsi));
522 else
523 e = split_block_after_labels (cond_bb);
524 cond_bb = e->src;
525 join_bb = e->dest;
526
527 /* A recap at this point: join_bb is the basic block at whose head
528 is the gimple statement for which this check expression is being
529 built. cond_bb is the (possibly new, synthetic) basic block the
530 end of which will contain the cache-lookup code, and a
531 conditional that jumps to the cache-miss code or, much more
532 likely, over to join_bb. */
533
534 /* Create the bb that contains the cache-miss fallback block (mf_check). */
535 then_bb = create_empty_bb (cond_bb);
536 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
537 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
538
539 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
540 e = find_edge (cond_bb, join_bb);
541 e->flags = EDGE_FALSE_VALUE;
542 e->count = cond_bb->count;
543 e->probability = REG_BR_PROB_BASE;
544
545 /* Update dominance info. Note that bb_join's data was
546 updated by split_block. */
547 if (dom_info_available_p (CDI_DOMINATORS))
548 {
549 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
550 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
551 }
552
553 /* Build our local variables. */
554 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
555 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
556 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
557
558 /* Build: __mf_base = (uintptr_t) <base address expression>. */
559 seq = gimple_seq_alloc ();
560 t = fold_convert_loc (location, mf_uintptr_type,
561 unshare_expr (base));
562 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
563 gimple_seq_add_seq (&seq, stmts);
564 g = gimple_build_assign (mf_base, t);
565 gimple_set_location (g, location);
566 gimple_seq_add_stmt (&seq, g);
567
568 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
569 t = fold_convert_loc (location, mf_uintptr_type,
570 unshare_expr (limit));
571 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
572 gimple_seq_add_seq (&seq, stmts);
573 g = gimple_build_assign (mf_limit, t);
574 gimple_set_location (g, location);
575 gimple_seq_add_stmt (&seq, g);
576
577 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
578 & __mf_mask]. */
579 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
580 flag_mudflap_threads ? mf_cache_shift_decl
581 : mf_cache_shift_decl_l);
582 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
583 flag_mudflap_threads ? mf_cache_mask_decl
584 : mf_cache_mask_decl_l);
585 t = build4 (ARRAY_REF,
586 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
587 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
588 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
589 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
590 gimple_seq_add_seq (&seq, stmts);
591 g = gimple_build_assign (mf_elem, t);
592 gimple_set_location (g, location);
593 gimple_seq_add_stmt (&seq, g);
594
595 /* Quick validity check.
596
597 if (__mf_elem->low > __mf_base
598 || (__mf_elem_high < __mf_limit))
599 {
600 __mf_check ();
601 ... and only if single-threaded:
602 __mf_lookup_shift_1 = f...;
603 __mf_lookup_mask_l = ...;
604 }
605
606 It is expected that this body of code is rarely executed so we mark
607 the edge to the THEN clause of the conditional jump as unlikely. */
608
609 /* Construct t <-- '__mf_elem->low > __mf_base'. */
610 t = build3 (COMPONENT_REF, mf_uintptr_type,
611 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
612 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
613 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
614
615 /* Construct '__mf_elem->high < __mf_limit'.
616
617 First build:
618 1) u <-- '__mf_elem->high'
619 2) v <-- '__mf_limit'.
620
621 Then build 'u <-- (u < v). */
622
623 u = build3 (COMPONENT_REF, mf_uintptr_type,
624 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
625 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
626
627 v = mf_limit;
628
629 u = build2 (LT_EXPR, boolean_type_node, u, v);
630
631 /* Build the composed conditional: t <-- 't || u'. Then store the
632 result of the evaluation of 't' in a temporary variable which we
633 can use as the condition for the conditional jump. */
634 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
635 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
636 gimple_seq_add_seq (&seq, stmts);
637 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
638 g = gimple_build_assign (cond, t);
639 gimple_set_location (g, location);
640 gimple_seq_add_stmt (&seq, g);
641
642 /* Build the conditional jump. 'cond' is just a temporary so we can
643 simply build a void COND_EXPR. We do need labels in both arms though. */
644 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
645 NULL_TREE);
646 gimple_set_location (g, location);
647 gimple_seq_add_stmt (&seq, g);
648
649 /* At this point, after so much hard work, we have only constructed
650 the conditional jump,
651
652 if (__mf_elem->low > __mf_base
653 || (__mf_elem_high < __mf_limit))
654
655 The lowered GIMPLE tree representing this code is in the statement
656 list starting at 'head'.
657
658 We can insert this now in the current basic block, i.e. the one that
659 the statement we're instrumenting was originally in. */
660 gsi = gsi_last_bb (cond_bb);
661 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
662
663 /* Now build up the body of the cache-miss handling:
664
665 __mf_check();
666 refresh *_l vars.
667
668 This is the body of the conditional. */
669
670 seq = gimple_seq_alloc ();
671 /* u is a string, so it is already a gimple value. */
672 u = mf_file_function_line_tree (location);
673 /* NB: we pass the overall [base..limit] range to mf_check. */
674 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
675 fold_build2_loc (location,
676 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
677 build_int_cst (mf_uintptr_type, 1));
678 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
679 gimple_seq_add_seq (&seq, stmts);
680 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
681 gimple_seq_add_stmt (&seq, g);
682
683 if (! flag_mudflap_threads)
684 {
685 if (stmt_ends_bb_p (g))
686 {
687 gsi = gsi_start_bb (then_bb);
688 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
689 e = split_block (then_bb, g);
690 then_bb = e->dest;
691 seq = gimple_seq_alloc ();
692 }
693
694 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
695 gimple_seq_add_stmt (&seq, g);
696
697 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
698 gimple_seq_add_stmt (&seq, g);
699 }
700
701 /* Insert the check code in the THEN block. */
702 gsi = gsi_start_bb (then_bb);
703 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
704
705 *instr_gsi = gsi_start_bb (join_bb);
706 }
707
708
709 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
710 eligible for instrumentation. For the mudflap1 pass, this implies
711 that it should be registered with the libmudflap runtime. For the
712 mudflap2 pass this means instrumenting an indirection operation with
713 respect to the object.
714 */
715 static int
716 mf_decl_eligible_p (tree decl)
717 {
718 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
719 /* The decl must have its address taken. In the case of
720 arrays, this flag is also set if the indexes are not
721 compile-time known valid constants. */
722 /* XXX: not sufficient: return-by-value structs! */
723 && TREE_ADDRESSABLE (decl)
724 /* The type of the variable must be complete. */
725 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
726 /* The decl hasn't been decomposed somehow. */
727 && !DECL_HAS_VALUE_EXPR_P (decl));
728 }
729
730
731 static void
732 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
733 location_t location, tree dirflag)
734 {
735 tree type, base, limit, addr, size, t;
736
737 /* Don't instrument read operations. */
738 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
739 return;
740
741 /* Don't instrument marked nodes. */
742 if (mf_marked_p (*tp))
743 return;
744
745 t = *tp;
746 type = TREE_TYPE (t);
747
748 if (type == error_mark_node)
749 return;
750
751 size = TYPE_SIZE_UNIT (type);
752
753 switch (TREE_CODE (t))
754 {
755 case ARRAY_REF:
756 case COMPONENT_REF:
757 {
758 /* This is trickier than it may first appear. The reason is
759 that we are looking at expressions from the "inside out" at
760 this point. We may have a complex nested aggregate/array
761 expression (e.g. "a.b[i].c"), maybe with an indirection as
762 the leftmost operator ("p->a.b.d"), where instrumentation
763 is necessary. Or we may have an innocent "a.b.c"
764 expression that must not be instrumented. We need to
765 recurse all the way down the nesting structure to figure it
766 out: looking just at the outer node is not enough. */
767 tree var;
768 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
769 /* If we have a bitfield component reference, we must note the
770 innermost addressable object in ELT, from which we will
771 construct the byte-addressable bounds of the bitfield. */
772 tree elt = NULL_TREE;
773 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
774 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
775
776 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
777 containment hierarchy to find the outermost VAR_DECL. */
778 var = TREE_OPERAND (t, 0);
779 while (1)
780 {
781 if (bitfield_ref_p && elt == NULL_TREE
782 && (TREE_CODE (var) == ARRAY_REF
783 || TREE_CODE (var) == COMPONENT_REF))
784 elt = var;
785
786 if (TREE_CODE (var) == ARRAY_REF)
787 {
788 component_ref_only = 0;
789 var = TREE_OPERAND (var, 0);
790 }
791 else if (TREE_CODE (var) == COMPONENT_REF)
792 var = TREE_OPERAND (var, 0);
793 else if (INDIRECT_REF_P (var)
794 || TREE_CODE (var) == MEM_REF)
795 {
796 base = TREE_OPERAND (var, 0);
797 break;
798 }
799 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
800 {
801 var = TREE_OPERAND (var, 0);
802 if (CONSTANT_CLASS_P (var)
803 && TREE_CODE (var) != STRING_CST)
804 return;
805 }
806 else
807 {
808 gcc_assert (TREE_CODE (var) == VAR_DECL
809 || TREE_CODE (var) == PARM_DECL
810 || TREE_CODE (var) == RESULT_DECL
811 || TREE_CODE (var) == STRING_CST);
812 /* Don't instrument this access if the underlying
813 variable is not "eligible". This test matches
814 those arrays that have only known-valid indexes,
815 and thus are not labeled TREE_ADDRESSABLE. */
816 if (! mf_decl_eligible_p (var) || component_ref_only)
817 return;
818 else
819 {
820 base = build1 (ADDR_EXPR,
821 build_pointer_type (TREE_TYPE (var)), var);
822 break;
823 }
824 }
825 }
826
827 /* Handle the case of ordinary non-indirection structure
828 accesses. These have only nested COMPONENT_REF nodes (no
829 INDIRECT_REF), but pass through the above filter loop.
830 Note that it's possible for such a struct variable to match
831 the eligible_p test because someone else might take its
832 address sometime. */
833
834 /* We need special processing for bitfield components, because
835 their addresses cannot be taken. */
836 if (bitfield_ref_p)
837 {
838 tree field = TREE_OPERAND (t, 1);
839
840 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
841 size = DECL_SIZE_UNIT (field);
842
843 if (elt)
844 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
845 elt);
846 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
847 addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
848 addr, fold_convert_loc (location, sizetype,
849 byte_position (field)));
850 }
851 else
852 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
853
854 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
855 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
856 convert (mf_uintptr_type, addr),
857 size),
858 integer_one_node);
859 }
860 break;
861
862 case INDIRECT_REF:
863 addr = TREE_OPERAND (t, 0);
864 base = addr;
865 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
866 fold_build2_loc (location,
867 POINTER_PLUS_EXPR, ptr_type_node, base,
868 size),
869 size_int (-1));
870 break;
871
872 case MEM_REF:
873 addr = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (t, 1)),
874 TREE_OPERAND (t, 0),
875 fold_convert (sizetype, TREE_OPERAND (t, 1)));
876 base = addr;
877 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
878 fold_build2_loc (location,
879 POINTER_PLUS_EXPR, ptr_type_node, base,
880 size),
881 size_int (-1));
882 break;
883
884 case TARGET_MEM_REF:
885 addr = tree_mem_ref_addr (ptr_type_node, t);
886 base = addr;
887 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
888 fold_build2_loc (location,
889 POINTER_PLUS_EXPR, ptr_type_node, base,
890 size),
891 size_int (-1));
892 break;
893
894 case ARRAY_RANGE_REF:
895 warning (OPT_Wmudflap,
896 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
897 return;
898
899 case BIT_FIELD_REF:
900 /* ??? merge with COMPONENT_REF code above? */
901 {
902 tree ofs, rem, bpu;
903
904 /* If we're not dereferencing something, then the access
905 must be ok. */
906 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
907 return;
908
909 bpu = bitsize_int (BITS_PER_UNIT);
910 ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
911 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
912 ofs = fold_convert_loc (location,
913 sizetype,
914 size_binop_loc (location,
915 TRUNC_DIV_EXPR, ofs, bpu));
916
917 size = convert (bitsizetype, TREE_OPERAND (t, 1));
918 size = size_binop_loc (location, PLUS_EXPR, size, rem);
919 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
920 size = convert (sizetype, size);
921
922 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
923 addr = convert (ptr_type_node, addr);
924 addr = fold_build2_loc (location, POINTER_PLUS_EXPR,
925 ptr_type_node, addr, ofs);
926
927 base = addr;
928 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
929 fold_build2_loc (location,
930 POINTER_PLUS_EXPR, ptr_type_node,
931 base, size),
932 size_int (-1));
933 }
934 break;
935
936 default:
937 return;
938 }
939
940 mf_build_check_statement_for (base, limit, iter, location, dirflag);
941 }
942 /* Transform
943 1) Memory references.
944 2) BUILTIN_ALLOCA calls.
945 */
946 static void
947 mf_xform_statements (void)
948 {
949 basic_block bb, next;
950 gimple_stmt_iterator i;
951 int saved_last_basic_block = last_basic_block;
952 enum gimple_rhs_class grhs_class;
953
954 bb = ENTRY_BLOCK_PTR ->next_bb;
955 do
956 {
957 next = bb->next_bb;
958 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
959 {
960 gimple s = gsi_stmt (i);
961
962 /* Only a few GIMPLE statements can reference memory. */
963 switch (gimple_code (s))
964 {
965 case GIMPLE_ASSIGN:
966 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
967 gimple_location (s), integer_one_node);
968 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
969 gimple_location (s), integer_zero_node);
970 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
971 if (grhs_class == GIMPLE_BINARY_RHS)
972 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
973 gimple_location (s), integer_zero_node);
974 break;
975
976 case GIMPLE_RETURN:
977 if (gimple_return_retval (s) != NULL_TREE)
978 {
979 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
980 gimple_location (s),
981 integer_zero_node);
982 }
983 break;
984
985 case GIMPLE_CALL:
986 {
987 tree fndecl = gimple_call_fndecl (s);
988 if (fndecl && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA))
989 gimple_call_set_cannot_inline (s, true);
990 }
991 break;
992
993 default:
994 ;
995 }
996 }
997 bb = next;
998 }
999 while (bb && bb->index <= saved_last_basic_block);
1000 }
1001
1002 /* ------------------------------------------------------------------------ */
1003 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
1004 transforms on the current function.
1005
1006 This is the first part of the mudflap instrumentation. It works on
1007 high-level GIMPLE because after lowering, all variables are moved out
1008 of their BIND_EXPR binding context, and we lose liveness information
1009 for the declarations we wish to instrument. */
1010
1011 static unsigned int
1012 execute_mudflap_function_decls (void)
1013 {
1014 struct gimplify_ctx gctx;
1015
1016 /* Don't instrument functions such as the synthetic constructor
1017 built during mudflap_finish_file. */
1018 if (mf_marked_p (current_function_decl) ||
1019 DECL_ARTIFICIAL (current_function_decl))
1020 return 0;
1021
1022 push_gimplify_context (&gctx);
1023
1024 mf_xform_decls (gimple_body (current_function_decl),
1025 DECL_ARGUMENTS (current_function_decl));
1026
1027 pop_gimplify_context (NULL);
1028 return 0;
1029 }
1030
1031 /* This struct is passed between mf_xform_decls to store state needed
1032 during the traversal searching for objects that have their
1033 addresses taken. */
1034 struct mf_xform_decls_data
1035 {
1036 tree param_decls;
1037 };
1038
1039
1040 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1041 _DECLs if appropriate. Arrange to call the __mf_register function
1042 now, and the __mf_unregister function later for each. Return the
1043 gimple sequence after synthesis. */
1044 gimple_seq
1045 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1046 {
1047 gimple_seq finally_stmts = NULL;
1048 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1049
1050 while (decl != NULL_TREE)
1051 {
1052 if (mf_decl_eligible_p (decl)
1053 /* Not already processed. */
1054 && ! mf_marked_p (decl)
1055 /* Automatic variable. */
1056 && ! DECL_EXTERNAL (decl)
1057 && ! TREE_STATIC (decl))
1058 {
1059 tree size = NULL_TREE, variable_name;
1060 gimple unregister_fncall, register_fncall;
1061 tree unregister_fncall_param, register_fncall_param;
1062
1063 /* Variable-sized objects should have sizes already been
1064 gimplified when we got here. */
1065 size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1066 gcc_assert (is_gimple_val (size));
1067
1068
1069 unregister_fncall_param =
1070 mf_mark (build1 (ADDR_EXPR,
1071 build_pointer_type (TREE_TYPE (decl)),
1072 decl));
1073 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1074 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1075 unregister_fncall_param,
1076 size,
1077 integer_three_node);
1078
1079
1080 variable_name = mf_varname_tree (decl);
1081 register_fncall_param =
1082 mf_mark (build1 (ADDR_EXPR,
1083 build_pointer_type (TREE_TYPE (decl)),
1084 decl));
1085 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1086 "name") */
1087 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1088 register_fncall_param,
1089 size,
1090 integer_three_node,
1091 variable_name);
1092
1093
1094 /* Accumulate the two calls. */
1095 gimple_set_location (register_fncall, location);
1096 gimple_set_location (unregister_fncall, location);
1097
1098 /* Add the __mf_register call at the current appending point. */
1099 if (gsi_end_p (initially_stmts))
1100 {
1101 if (!DECL_ARTIFICIAL (decl))
1102 warning (OPT_Wmudflap,
1103 "mudflap cannot track %qE in stub function",
1104 DECL_NAME (decl));
1105 }
1106 else
1107 {
1108 gsi_insert_before (&initially_stmts, register_fncall,
1109 GSI_SAME_STMT);
1110
1111 /* Accumulate the FINALLY piece. */
1112 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1113 }
1114 mf_mark (decl);
1115 }
1116
1117 decl = DECL_CHAIN (decl);
1118 }
1119
1120 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1121 if (finally_stmts != NULL)
1122 {
1123 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1124 gimple_seq new_seq = gimple_seq_alloc ();
1125
1126 gimple_seq_add_stmt (&new_seq, stmt);
1127 return new_seq;
1128 }
1129 else
1130 return seq;
1131 }
1132
1133
1134 /* Process every variable mentioned in BIND_EXPRs. */
1135 static tree
1136 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1137 bool *handled_operands_p ATTRIBUTE_UNUSED,
1138 struct walk_stmt_info *wi)
1139 {
1140 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1141 gimple stmt = gsi_stmt (*gsi);
1142
1143 switch (gimple_code (stmt))
1144 {
1145 case GIMPLE_BIND:
1146 {
1147 /* Process function parameters now (but only once). */
1148 if (d->param_decls)
1149 {
1150 gimple_bind_set_body (stmt,
1151 mx_register_decls (d->param_decls,
1152 gimple_bind_body (stmt),
1153 gimple_location (stmt)));
1154 d->param_decls = NULL_TREE;
1155 }
1156
1157 gimple_bind_set_body (stmt,
1158 mx_register_decls (gimple_bind_vars (stmt),
1159 gimple_bind_body (stmt),
1160 gimple_location (stmt)));
1161 }
1162 break;
1163
1164 default:
1165 break;
1166 }
1167
1168 return NULL_TREE;
1169 }
1170
1171 /* Perform the object lifetime tracking mudflap transform on the given function
1172 tree. The tree is mutated in place, with possibly copied subtree nodes.
1173
1174 For every auto variable declared, if its address is ever taken
1175 within the function, then supply its lifetime to the mudflap
1176 runtime with the __mf_register and __mf_unregister calls.
1177 */
1178
1179 static void
1180 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1181 {
1182 struct mf_xform_decls_data d;
1183 struct walk_stmt_info wi;
1184 struct pointer_set_t *pset = pointer_set_create ();
1185
1186 d.param_decls = fnparams;
1187 memset (&wi, 0, sizeof (wi));
1188 wi.info = (void*) &d;
1189 wi.pset = pset;
1190 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1191 pointer_set_destroy (pset);
1192 }
1193
1194
1195 /* ------------------------------------------------------------------------ */
1196 /* Externally visible mudflap functions. */
1197
1198
1199 /* Mark and return the given tree node to prevent further mudflap
1200 transforms. */
1201 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1202
1203 tree
1204 mf_mark (tree t)
1205 {
1206 void **slot;
1207
1208 if (marked_trees == NULL)
1209 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1210 NULL);
1211
1212 slot = htab_find_slot (marked_trees, t, INSERT);
1213 *slot = t;
1214 return t;
1215 }
1216
1217 int
1218 mf_marked_p (tree t)
1219 {
1220 void *entry;
1221
1222 if (marked_trees == NULL)
1223 return 0;
1224
1225 entry = htab_find (marked_trees, t);
1226 return (entry != NULL);
1227 }
1228
1229 /* Remember given node as a static of some kind: global data,
1230 function-scope static, or an anonymous constant. Its assembler
1231 label is given. */
1232
1233 /* A list of globals whose incomplete declarations we encountered.
1234 Instead of emitting the __mf_register call for them here, it's
1235 delayed until program finish time. If they're still incomplete by
1236 then, warnings are emitted. */
1237
1238 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1239
1240 /* A list of statements for calling __mf_register() at startup time. */
1241 static GTY (()) tree enqueued_call_stmt_chain;
1242
1243 static void
1244 mudflap_register_call (tree obj, tree object_size, tree varname)
1245 {
1246 tree arg, call_stmt;
1247
1248 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1249 arg = convert (ptr_type_node, arg);
1250
1251 call_stmt = build_call_expr (mf_register_fndecl, 4,
1252 arg,
1253 convert (size_type_node, object_size),
1254 /* __MF_TYPE_STATIC */
1255 build_int_cst (NULL_TREE, 4),
1256 varname);
1257
1258 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1259 }
1260
1261 void
1262 mudflap_enqueue_decl (tree obj)
1263 {
1264 if (mf_marked_p (obj))
1265 return;
1266
1267 /* We don't need to process variable decls that are internally
1268 generated extern. If we did, we'd end up with warnings for them
1269 during mudflap_finish_file (). That would confuse the user,
1270 since the text would refer to variables that don't show up in the
1271 user's source code. */
1272 if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj))
1273 return;
1274
1275 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1276 }
1277
1278
1279 void
1280 mudflap_enqueue_constant (tree obj)
1281 {
1282 tree object_size, varname;
1283
1284 if (mf_marked_p (obj))
1285 return;
1286
1287 if (TREE_CODE (obj) == STRING_CST)
1288 object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj));
1289 else
1290 object_size = size_in_bytes (TREE_TYPE (obj));
1291
1292 if (TREE_CODE (obj) == STRING_CST)
1293 varname = mf_build_string ("string literal");
1294 else
1295 varname = mf_build_string ("constant");
1296
1297 mudflap_register_call (obj, object_size, varname);
1298 }
1299
1300
1301 /* Emit any file-wide instrumentation. */
1302 void
1303 mudflap_finish_file (void)
1304 {
1305 tree ctor_statements = NULL_TREE;
1306
1307 /* No need to continue when there were errors. */
1308 if (seen_error ())
1309 return;
1310
1311 /* Insert a call to __mf_init. */
1312 {
1313 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1314 append_to_statement_list (call2_stmt, &ctor_statements);
1315 }
1316
1317 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1318 if (flag_mudflap_ignore_reads)
1319 {
1320 tree arg = mf_build_string ("-ignore-reads");
1321 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1322 append_to_statement_list (call_stmt, &ctor_statements);
1323 }
1324
1325 /* Process all enqueued object decls. */
1326 if (deferred_static_decls)
1327 {
1328 size_t i;
1329 tree obj;
1330 FOR_EACH_VEC_ELT (tree, deferred_static_decls, i, obj)
1331 {
1332 gcc_assert (DECL_P (obj));
1333
1334 if (mf_marked_p (obj))
1335 continue;
1336
1337 /* Omit registration for static unaddressed objects. NB:
1338 Perform registration for non-static objects regardless of
1339 TREE_USED or TREE_ADDRESSABLE, because they may be used
1340 from other compilation units. */
1341 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1342 continue;
1343
1344 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1345 {
1346 warning (OPT_Wmudflap,
1347 "mudflap cannot track unknown size extern %qE",
1348 DECL_NAME (obj));
1349 continue;
1350 }
1351
1352 mudflap_register_call (obj,
1353 size_in_bytes (TREE_TYPE (obj)),
1354 mf_varname_tree (obj));
1355 }
1356
1357 VEC_truncate (tree, deferred_static_decls, 0);
1358 }
1359
1360 /* Append all the enqueued registration calls. */
1361 if (enqueued_call_stmt_chain)
1362 {
1363 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1364 enqueued_call_stmt_chain = NULL_TREE;
1365 }
1366
1367 cgraph_build_static_cdtor ('I', ctor_statements,
1368 MAX_RESERVED_INIT_PRIORITY-1);
1369 }
1370
1371
1372 static bool
1373 gate_mudflap (void)
1374 {
1375 return flag_mudflap != 0;
1376 }
1377
1378 struct gimple_opt_pass pass_mudflap_1 =
1379 {
1380 {
1381 GIMPLE_PASS,
1382 "mudflap1", /* name */
1383 gate_mudflap, /* gate */
1384 execute_mudflap_function_decls, /* execute */
1385 NULL, /* sub */
1386 NULL, /* next */
1387 0, /* static_pass_number */
1388 TV_NONE, /* tv_id */
1389 PROP_gimple_any, /* properties_required */
1390 0, /* properties_provided */
1391 0, /* properties_destroyed */
1392 0, /* todo_flags_start */
1393 TODO_dump_func /* todo_flags_finish */
1394 }
1395 };
1396
1397 struct gimple_opt_pass pass_mudflap_2 =
1398 {
1399 {
1400 GIMPLE_PASS,
1401 "mudflap2", /* name */
1402 gate_mudflap, /* gate */
1403 execute_mudflap_function_ops, /* execute */
1404 NULL, /* sub */
1405 NULL, /* next */
1406 0, /* static_pass_number */
1407 TV_NONE, /* tv_id */
1408 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1409 0, /* properties_provided */
1410 0, /* properties_destroyed */
1411 0, /* todo_flags_start */
1412 TODO_verify_flow | TODO_verify_stmts
1413 | TODO_dump_func | TODO_update_ssa /* todo_flags_finish */
1414 }
1415 };
1416
1417 #include "gt-tree-mudflap.h"