re PR rtl-optimization/30807 (postreload bug (might be generic in trunk))
[gcc.git] / gcc / tree-mudflap.c
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tm_p.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "tree-inline.h"
36 #include "gimple.h"
37 #include "tree-iterator.h"
38 #include "tree-flow.h"
39 #include "tree-mudflap.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "hashtab.h"
43 #include "diagnostic.h"
44 #include <demangle.h>
45 #include "langhooks.h"
46 #include "ggc.h"
47 #include "cgraph.h"
48 #include "toplev.h"
49 #include "gimple.h"
50
51 /* Internal function decls */
52
53
54 /* Options. */
55 #define flag_mudflap_threads (flag_mudflap == 2)
56
57 /* Helpers. */
58 static tree mf_build_string (const char *string);
59 static tree mf_varname_tree (tree);
60 static tree mf_file_function_line_tree (location_t);
61
62 /* Indirection-related instrumentation. */
63 static void mf_decl_cache_locals (void);
64 static void mf_decl_clear_locals (void);
65 static void mf_xform_statements (void);
66 static unsigned int execute_mudflap_function_ops (void);
67
68 /* Addressable variables instrumentation. */
69 static void mf_xform_decls (gimple_seq, tree);
70 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
71 struct walk_stmt_info *);
72 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
73 static unsigned int execute_mudflap_function_decls (void);
74
75
76 /* ------------------------------------------------------------------------ */
77 /* Some generally helpful functions for mudflap instrumentation. */
78
79 /* Build a reference to a literal string. */
80 static tree
81 mf_build_string (const char *string)
82 {
83 size_t len = strlen (string);
84 tree result = mf_mark (build_string (len + 1, string));
85
86 TREE_TYPE (result) = build_array_type
87 (char_type_node, build_index_type (build_int_cst (NULL_TREE, len)));
88 TREE_CONSTANT (result) = 1;
89 TREE_READONLY (result) = 1;
90 TREE_STATIC (result) = 1;
91
92 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
93
94 return mf_mark (result);
95 }
96
97 /* Create a properly typed STRING_CST node that describes the given
98 declaration. It will be used as an argument for __mf_register().
99 Try to construct a helpful string, including file/function/variable
100 name. */
101
102 static tree
103 mf_varname_tree (tree decl)
104 {
105 static pretty_printer buf_rec;
106 static int initialized = 0;
107 pretty_printer *buf = & buf_rec;
108 const char *buf_contents;
109 tree result;
110
111 gcc_assert (decl);
112
113 if (!initialized)
114 {
115 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
116 initialized = 1;
117 }
118 pp_clear_output_area (buf);
119
120 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
121 {
122 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
123 const char *sourcefile;
124 unsigned sourceline = xloc.line;
125 unsigned sourcecolumn = 0;
126 sourcecolumn = xloc.column;
127 sourcefile = xloc.file;
128 if (sourcefile == NULL && current_function_decl != NULL_TREE)
129 sourcefile = DECL_SOURCE_FILE (current_function_decl);
130 if (sourcefile == NULL)
131 sourcefile = "<unknown file>";
132
133 pp_string (buf, sourcefile);
134
135 if (sourceline != 0)
136 {
137 pp_string (buf, ":");
138 pp_decimal_int (buf, sourceline);
139
140 if (sourcecolumn != 0)
141 {
142 pp_string (buf, ":");
143 pp_decimal_int (buf, sourcecolumn);
144 }
145 }
146 }
147
148 if (current_function_decl != NULL_TREE)
149 {
150 /* Add (FUNCTION) */
151 pp_string (buf, " (");
152 {
153 const char *funcname = NULL;
154 if (DECL_NAME (current_function_decl))
155 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
156 if (funcname == NULL)
157 funcname = "anonymous fn";
158
159 pp_string (buf, funcname);
160 }
161 pp_string (buf, ") ");
162 }
163 else
164 pp_string (buf, " ");
165
166 /* Add <variable-declaration>, possibly demangled. */
167 {
168 const char *declname = NULL;
169
170 if (DECL_NAME (decl) != NULL)
171 {
172 if (strcmp ("GNU C++", lang_hooks.name) == 0)
173 {
174 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
175 the libiberty demangler. */
176 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
177 DMGL_AUTO | DMGL_VERBOSE);
178 }
179 if (declname == NULL)
180 declname = lang_hooks.decl_printable_name (decl, 3);
181 }
182 if (declname == NULL)
183 declname = "<unnamed variable>";
184
185 pp_string (buf, declname);
186 }
187
188 /* Return the lot as a new STRING_CST. */
189 buf_contents = pp_base_formatted_text (buf);
190 result = mf_build_string (buf_contents);
191 pp_clear_output_area (buf);
192
193 return result;
194 }
195
196
197 /* And another friend, for producing a simpler message. */
198
199 static tree
200 mf_file_function_line_tree (location_t location)
201 {
202 expanded_location xloc = expand_location (location);
203 const char *file = NULL, *colon, *line, *op, *name, *cp;
204 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
205 char *string;
206 tree result;
207
208 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
209 file = xloc.file;
210 if (file == NULL && current_function_decl != NULL_TREE)
211 file = DECL_SOURCE_FILE (current_function_decl);
212 if (file == NULL)
213 file = "<unknown file>";
214
215 if (xloc.line > 0)
216 {
217 if (xloc.column > 0)
218 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
219 else
220 sprintf (linecolbuf, "%d", xloc.line);
221 colon = ":";
222 line = linecolbuf;
223 }
224 else
225 colon = line = "";
226
227 /* Add (FUNCTION). */
228 name = lang_hooks.decl_printable_name (current_function_decl, 1);
229 if (name)
230 {
231 op = " (";
232 cp = ")";
233 }
234 else
235 op = name = cp = "";
236
237 string = concat (file, colon, line, op, name, cp, NULL);
238 result = mf_build_string (string);
239 free (string);
240
241 return result;
242 }
243
244
245 /* global tree nodes */
246
247 /* Global tree objects for global variables and functions exported by
248 mudflap runtime library. mf_init_extern_trees must be called
249 before using these. */
250
251 /* uintptr_t (usually "unsigned long") */
252 static GTY (()) tree mf_uintptr_type;
253
254 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
255 static GTY (()) tree mf_cache_struct_type;
256
257 /* struct __mf_cache * const */
258 static GTY (()) tree mf_cache_structptr_type;
259
260 /* extern struct __mf_cache __mf_lookup_cache []; */
261 static GTY (()) tree mf_cache_array_decl;
262
263 /* extern unsigned char __mf_lc_shift; */
264 static GTY (()) tree mf_cache_shift_decl;
265
266 /* extern uintptr_t __mf_lc_mask; */
267 static GTY (()) tree mf_cache_mask_decl;
268
269 /* Their function-scope local shadows, used in single-threaded mode only. */
270
271 /* auto const unsigned char __mf_lc_shift_l; */
272 static GTY (()) tree mf_cache_shift_decl_l;
273
274 /* auto const uintptr_t __mf_lc_mask_l; */
275 static GTY (()) tree mf_cache_mask_decl_l;
276
277 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
278 static GTY (()) tree mf_check_fndecl;
279
280 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
281 static GTY (()) tree mf_register_fndecl;
282
283 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
284 static GTY (()) tree mf_unregister_fndecl;
285
286 /* extern void __mf_init (); */
287 static GTY (()) tree mf_init_fndecl;
288
289 /* extern int __mf_set_options (const char*); */
290 static GTY (()) tree mf_set_options_fndecl;
291
292
293 /* Helper for mudflap_init: construct a decl with the given category,
294 name, and type, mark it an external reference, and pushdecl it. */
295 static inline tree
296 mf_make_builtin (enum tree_code category, const char *name, tree type)
297 {
298 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
299 category, get_identifier (name), type));
300 TREE_PUBLIC (decl) = 1;
301 DECL_EXTERNAL (decl) = 1;
302 lang_hooks.decls.pushdecl (decl);
303 /* The decl was declared by the compiler. */
304 DECL_ARTIFICIAL (decl) = 1;
305 /* And we don't want debug info for it. */
306 DECL_IGNORED_P (decl) = 1;
307 return decl;
308 }
309
310 /* Helper for mudflap_init: construct a tree corresponding to the type
311 struct __mf_cache { uintptr_t low; uintptr_t high; };
312 where uintptr_t is the FIELD_TYPE argument. */
313 static inline tree
314 mf_make_mf_cache_struct_type (tree field_type)
315 {
316 /* There is, abominably, no language-independent way to construct a
317 RECORD_TYPE. So we have to call the basic type construction
318 primitives by hand. */
319 tree fieldlo = build_decl (UNKNOWN_LOCATION,
320 FIELD_DECL, get_identifier ("low"), field_type);
321 tree fieldhi = build_decl (UNKNOWN_LOCATION,
322 FIELD_DECL, get_identifier ("high"), field_type);
323
324 tree struct_type = make_node (RECORD_TYPE);
325 DECL_CONTEXT (fieldlo) = struct_type;
326 DECL_CONTEXT (fieldhi) = struct_type;
327 TREE_CHAIN (fieldlo) = fieldhi;
328 TYPE_FIELDS (struct_type) = fieldlo;
329 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
330 layout_type (struct_type);
331
332 return struct_type;
333 }
334
335 #define build_function_type_0(rtype) \
336 build_function_type (rtype, void_list_node)
337 #define build_function_type_1(rtype, arg1) \
338 build_function_type (rtype, tree_cons (0, arg1, void_list_node))
339 #define build_function_type_3(rtype, arg1, arg2, arg3) \
340 build_function_type (rtype, \
341 tree_cons (0, arg1, \
342 tree_cons (0, arg2, \
343 tree_cons (0, arg3, \
344 void_list_node))))
345 #define build_function_type_4(rtype, arg1, arg2, arg3, arg4) \
346 build_function_type (rtype, \
347 tree_cons (0, arg1, \
348 tree_cons (0, arg2, \
349 tree_cons (0, arg3, \
350 tree_cons (0, arg4, \
351 void_list_node)))))
352
353 /* Initialize the global tree nodes that correspond to mf-runtime.h
354 declarations. */
355 void
356 mudflap_init (void)
357 {
358 static bool done = false;
359 tree mf_const_string_type;
360 tree mf_cache_array_type;
361 tree mf_check_register_fntype;
362 tree mf_unregister_fntype;
363 tree mf_init_fntype;
364 tree mf_set_options_fntype;
365
366 if (done)
367 return;
368 done = true;
369
370 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
371 /*unsignedp=*/true);
372 mf_const_string_type
373 = build_pointer_type (build_qualified_type
374 (char_type_node, TYPE_QUAL_CONST));
375
376 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
377 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
378 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
379 mf_check_register_fntype =
380 build_function_type_4 (void_type_node, ptr_type_node, size_type_node,
381 integer_type_node, mf_const_string_type);
382 mf_unregister_fntype =
383 build_function_type_3 (void_type_node, ptr_type_node, size_type_node,
384 integer_type_node);
385 mf_init_fntype =
386 build_function_type_0 (void_type_node);
387 mf_set_options_fntype =
388 build_function_type_1 (integer_type_node, mf_const_string_type);
389
390 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
391 mf_cache_array_type);
392 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
393 unsigned_char_type_node);
394 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
395 mf_uintptr_type);
396 /* Don't process these in mudflap_enqueue_decl, should they come by
397 there for some reason. */
398 mf_mark (mf_cache_array_decl);
399 mf_mark (mf_cache_shift_decl);
400 mf_mark (mf_cache_mask_decl);
401 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
402 mf_check_register_fntype);
403 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
404 mf_check_register_fntype);
405 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
406 mf_unregister_fntype);
407 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
408 mf_init_fntype);
409 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
410 mf_set_options_fntype);
411 }
412 #undef build_function_type_4
413 #undef build_function_type_3
414 #undef build_function_type_1
415 #undef build_function_type_0
416
417
418 /* ------------------------------------------------------------------------ */
419 /* This is the second part of the mudflap instrumentation. It works on
420 low-level GIMPLE using the CFG, because we want to run this pass after
421 tree optimizations have been performed, but we have to preserve the CFG
422 for expansion from trees to RTL.
423 Below is the list of transformations performed on statements in the
424 current function.
425
426 1) Memory reference transforms: Perform the mudflap indirection-related
427 tree transforms on memory references.
428
429 2) Mark BUILTIN_ALLOCA calls not inlineable.
430
431 */
432
433 static unsigned int
434 execute_mudflap_function_ops (void)
435 {
436 struct gimplify_ctx gctx;
437
438 /* Don't instrument functions such as the synthetic constructor
439 built during mudflap_finish_file. */
440 if (mf_marked_p (current_function_decl) ||
441 DECL_ARTIFICIAL (current_function_decl))
442 return 0;
443
444 push_gimplify_context (&gctx);
445
446 /* In multithreaded mode, don't cache the lookup cache parameters. */
447 if (! flag_mudflap_threads)
448 mf_decl_cache_locals ();
449
450 mf_xform_statements ();
451
452 if (! flag_mudflap_threads)
453 mf_decl_clear_locals ();
454
455 pop_gimplify_context (NULL);
456 return 0;
457 }
458
459 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
460 if BB has more than one edge, STMT will be replicated for each edge.
461 Also, abnormal edges will be ignored. */
462
463 static void
464 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
465 {
466 edge e;
467 edge_iterator ei;
468 unsigned n_copies = -1;
469
470 FOR_EACH_EDGE (e, ei, bb->succs)
471 if (!(e->flags & EDGE_ABNORMAL))
472 n_copies++;
473
474 FOR_EACH_EDGE (e, ei, bb->succs)
475 if (!(e->flags & EDGE_ABNORMAL))
476 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
477 }
478
479 /* Create and initialize local shadow variables for the lookup cache
480 globals. Put their decls in the *_l globals for use by
481 mf_build_check_statement_for. */
482
483 static void
484 mf_decl_cache_locals (void)
485 {
486 gimple g;
487 gimple_seq seq = gimple_seq_alloc ();
488
489 /* Build the cache vars. */
490 mf_cache_shift_decl_l
491 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
492 "__mf_lookup_shift_l"));
493
494 mf_cache_mask_decl_l
495 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
496 "__mf_lookup_mask_l"));
497
498 /* Build initialization nodes for the cache vars. We just load the
499 globals into the cache variables. */
500 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
501 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
502 gimple_seq_add_stmt (&seq, g);
503
504 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
505 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
506 gimple_seq_add_stmt (&seq, g);
507
508 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
509
510 gsi_commit_edge_inserts ();
511 }
512
513
514 static void
515 mf_decl_clear_locals (void)
516 {
517 /* Unset local shadows. */
518 mf_cache_shift_decl_l = NULL_TREE;
519 mf_cache_mask_decl_l = NULL_TREE;
520 }
521
522 static void
523 mf_build_check_statement_for (tree base, tree limit,
524 gimple_stmt_iterator *instr_gsi,
525 location_t location, tree dirflag)
526 {
527 gimple_stmt_iterator gsi;
528 basic_block cond_bb, then_bb, join_bb;
529 edge e;
530 tree cond, t, u, v;
531 tree mf_base;
532 tree mf_elem;
533 tree mf_limit;
534 gimple g;
535 gimple_seq seq, stmts;
536
537 /* We first need to split the current basic block, and start altering
538 the CFG. This allows us to insert the statements we're about to
539 construct into the right basic blocks. */
540
541 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
542 gsi = *instr_gsi;
543 gsi_prev (&gsi);
544 if (! gsi_end_p (gsi))
545 e = split_block (cond_bb, gsi_stmt (gsi));
546 else
547 e = split_block_after_labels (cond_bb);
548 cond_bb = e->src;
549 join_bb = e->dest;
550
551 /* A recap at this point: join_bb is the basic block at whose head
552 is the gimple statement for which this check expression is being
553 built. cond_bb is the (possibly new, synthetic) basic block the
554 end of which will contain the cache-lookup code, and a
555 conditional that jumps to the cache-miss code or, much more
556 likely, over to join_bb. */
557
558 /* Create the bb that contains the cache-miss fallback block (mf_check). */
559 then_bb = create_empty_bb (cond_bb);
560 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
561 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
562
563 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
564 e = find_edge (cond_bb, join_bb);
565 e->flags = EDGE_FALSE_VALUE;
566 e->count = cond_bb->count;
567 e->probability = REG_BR_PROB_BASE;
568
569 /* Update dominance info. Note that bb_join's data was
570 updated by split_block. */
571 if (dom_info_available_p (CDI_DOMINATORS))
572 {
573 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
574 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
575 }
576
577 /* Build our local variables. */
578 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
579 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
580 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
581
582 /* Build: __mf_base = (uintptr_t) <base address expression>. */
583 seq = gimple_seq_alloc ();
584 t = fold_convert (mf_uintptr_type, unshare_expr (base));
585 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
586 gimple_seq_add_seq (&seq, stmts);
587 g = gimple_build_assign (mf_base, t);
588 gimple_set_location (g, location);
589 gimple_seq_add_stmt (&seq, g);
590
591 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
592 t = fold_convert (mf_uintptr_type, unshare_expr (limit));
593 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
594 gimple_seq_add_seq (&seq, stmts);
595 g = gimple_build_assign (mf_limit, t);
596 gimple_set_location (g, location);
597 gimple_seq_add_stmt (&seq, g);
598
599 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
600 & __mf_mask]. */
601 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
602 flag_mudflap_threads ? mf_cache_shift_decl
603 : mf_cache_shift_decl_l);
604 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
605 flag_mudflap_threads ? mf_cache_mask_decl
606 : mf_cache_mask_decl_l);
607 t = build4 (ARRAY_REF,
608 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
609 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
610 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
611 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
612 gimple_seq_add_seq (&seq, stmts);
613 g = gimple_build_assign (mf_elem, t);
614 gimple_set_location (g, location);
615 gimple_seq_add_stmt (&seq, g);
616
617 /* Quick validity check.
618
619 if (__mf_elem->low > __mf_base
620 || (__mf_elem_high < __mf_limit))
621 {
622 __mf_check ();
623 ... and only if single-threaded:
624 __mf_lookup_shift_1 = f...;
625 __mf_lookup_mask_l = ...;
626 }
627
628 It is expected that this body of code is rarely executed so we mark
629 the edge to the THEN clause of the conditional jump as unlikely. */
630
631 /* Construct t <-- '__mf_elem->low > __mf_base'. */
632 t = build3 (COMPONENT_REF, mf_uintptr_type,
633 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
634 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
635 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
636
637 /* Construct '__mf_elem->high < __mf_limit'.
638
639 First build:
640 1) u <-- '__mf_elem->high'
641 2) v <-- '__mf_limit'.
642
643 Then build 'u <-- (u < v). */
644
645 u = build3 (COMPONENT_REF, mf_uintptr_type,
646 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
647 TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
648
649 v = mf_limit;
650
651 u = build2 (LT_EXPR, boolean_type_node, u, v);
652
653 /* Build the composed conditional: t <-- 't || u'. Then store the
654 result of the evaluation of 't' in a temporary variable which we
655 can use as the condition for the conditional jump. */
656 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
657 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
658 gimple_seq_add_seq (&seq, stmts);
659 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
660 g = gimple_build_assign (cond, t);
661 gimple_set_location (g, location);
662 gimple_seq_add_stmt (&seq, g);
663
664 /* Build the conditional jump. 'cond' is just a temporary so we can
665 simply build a void COND_EXPR. We do need labels in both arms though. */
666 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
667 NULL_TREE);
668 gimple_set_location (g, location);
669 gimple_seq_add_stmt (&seq, g);
670
671 /* At this point, after so much hard work, we have only constructed
672 the conditional jump,
673
674 if (__mf_elem->low > __mf_base
675 || (__mf_elem_high < __mf_limit))
676
677 The lowered GIMPLE tree representing this code is in the statement
678 list starting at 'head'.
679
680 We can insert this now in the current basic block, i.e. the one that
681 the statement we're instrumenting was originally in. */
682 gsi = gsi_last_bb (cond_bb);
683 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
684
685 /* Now build up the body of the cache-miss handling:
686
687 __mf_check();
688 refresh *_l vars.
689
690 This is the body of the conditional. */
691
692 seq = gimple_seq_alloc ();
693 /* u is a string, so it is already a gimple value. */
694 u = mf_file_function_line_tree (location);
695 /* NB: we pass the overall [base..limit] range to mf_check. */
696 v = fold_build2 (PLUS_EXPR, mf_uintptr_type,
697 fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
698 build_int_cst (mf_uintptr_type, 1));
699 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
700 gimple_seq_add_seq (&seq, stmts);
701 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
702 gimple_seq_add_stmt (&seq, g);
703
704 if (! flag_mudflap_threads)
705 {
706 if (stmt_ends_bb_p (g))
707 {
708 gsi = gsi_start_bb (then_bb);
709 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
710 e = split_block (then_bb, g);
711 then_bb = e->dest;
712 seq = gimple_seq_alloc ();
713 }
714
715 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
716 gimple_seq_add_stmt (&seq, g);
717
718 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
719 gimple_seq_add_stmt (&seq, g);
720 }
721
722 /* Insert the check code in the THEN block. */
723 gsi = gsi_start_bb (then_bb);
724 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
725
726 *instr_gsi = gsi_start_bb (join_bb);
727 }
728
729
730 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
731 eligible for instrumentation. For the mudflap1 pass, this implies
732 that it should be registered with the libmudflap runtime. For the
733 mudflap2 pass this means instrumenting an indirection operation with
734 respect to the object.
735 */
736 static int
737 mf_decl_eligible_p (tree decl)
738 {
739 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
740 /* The decl must have its address taken. In the case of
741 arrays, this flag is also set if the indexes are not
742 compile-time known valid constants. */
743 /* XXX: not sufficient: return-by-value structs! */
744 && TREE_ADDRESSABLE (decl)
745 /* The type of the variable must be complete. */
746 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
747 /* The decl hasn't been decomposed somehow. */
748 && !DECL_HAS_VALUE_EXPR_P (decl));
749 }
750
751
752 static void
753 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
754 location_t location, tree dirflag)
755 {
756 tree type, base, limit, addr, size, t;
757
758 /* Don't instrument read operations. */
759 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
760 return;
761
762 /* Don't instrument marked nodes. */
763 if (mf_marked_p (*tp))
764 return;
765
766 t = *tp;
767 type = TREE_TYPE (t);
768
769 if (type == error_mark_node)
770 return;
771
772 size = TYPE_SIZE_UNIT (type);
773
774 switch (TREE_CODE (t))
775 {
776 case ARRAY_REF:
777 case COMPONENT_REF:
778 {
779 /* This is trickier than it may first appear. The reason is
780 that we are looking at expressions from the "inside out" at
781 this point. We may have a complex nested aggregate/array
782 expression (e.g. "a.b[i].c"), maybe with an indirection as
783 the leftmost operator ("p->a.b.d"), where instrumentation
784 is necessary. Or we may have an innocent "a.b.c"
785 expression that must not be instrumented. We need to
786 recurse all the way down the nesting structure to figure it
787 out: looking just at the outer node is not enough. */
788 tree var;
789 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
790 /* If we have a bitfield component reference, we must note the
791 innermost addressable object in ELT, from which we will
792 construct the byte-addressable bounds of the bitfield. */
793 tree elt = NULL_TREE;
794 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
795 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
796
797 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
798 containment hierarchy to find the outermost VAR_DECL. */
799 var = TREE_OPERAND (t, 0);
800 while (1)
801 {
802 if (bitfield_ref_p && elt == NULL_TREE
803 && (TREE_CODE (var) == ARRAY_REF
804 || TREE_CODE (var) == COMPONENT_REF))
805 elt = var;
806
807 if (TREE_CODE (var) == ARRAY_REF)
808 {
809 component_ref_only = 0;
810 var = TREE_OPERAND (var, 0);
811 }
812 else if (TREE_CODE (var) == COMPONENT_REF)
813 var = TREE_OPERAND (var, 0);
814 else if (INDIRECT_REF_P (var))
815 {
816 base = TREE_OPERAND (var, 0);
817 break;
818 }
819 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
820 {
821 var = TREE_OPERAND (var, 0);
822 if (CONSTANT_CLASS_P (var)
823 && TREE_CODE (var) != STRING_CST)
824 return;
825 }
826 else
827 {
828 gcc_assert (TREE_CODE (var) == VAR_DECL
829 || TREE_CODE (var) == PARM_DECL
830 || TREE_CODE (var) == RESULT_DECL
831 || TREE_CODE (var) == STRING_CST);
832 /* Don't instrument this access if the underlying
833 variable is not "eligible". This test matches
834 those arrays that have only known-valid indexes,
835 and thus are not labeled TREE_ADDRESSABLE. */
836 if (! mf_decl_eligible_p (var) || component_ref_only)
837 return;
838 else
839 {
840 base = build1 (ADDR_EXPR,
841 build_pointer_type (TREE_TYPE (var)), var);
842 break;
843 }
844 }
845 }
846
847 /* Handle the case of ordinary non-indirection structure
848 accesses. These have only nested COMPONENT_REF nodes (no
849 INDIRECT_REF), but pass through the above filter loop.
850 Note that it's possible for such a struct variable to match
851 the eligible_p test because someone else might take its
852 address sometime. */
853
854 /* We need special processing for bitfield components, because
855 their addresses cannot be taken. */
856 if (bitfield_ref_p)
857 {
858 tree field = TREE_OPERAND (t, 1);
859
860 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
861 size = DECL_SIZE_UNIT (field);
862
863 if (elt)
864 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
865 elt);
866 addr = fold_convert (ptr_type_node, elt ? elt : base);
867 addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
868 addr, fold_convert (sizetype,
869 byte_position (field)));
870 }
871 else
872 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
873
874 limit = fold_build2 (MINUS_EXPR, mf_uintptr_type,
875 fold_build2 (PLUS_EXPR, mf_uintptr_type,
876 convert (mf_uintptr_type, addr),
877 size),
878 integer_one_node);
879 }
880 break;
881
882 case INDIRECT_REF:
883 addr = TREE_OPERAND (t, 0);
884 base = addr;
885 limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
886 fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base,
887 size),
888 size_int (-1));
889 break;
890
891 case TARGET_MEM_REF:
892 addr = tree_mem_ref_addr (ptr_type_node, t);
893 base = addr;
894 limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
895 fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base,
896 size),
897 size_int (-1));
898 break;
899
900 case ARRAY_RANGE_REF:
901 warning (OPT_Wmudflap,
902 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
903 return;
904
905 case BIT_FIELD_REF:
906 /* ??? merge with COMPONENT_REF code above? */
907 {
908 tree ofs, rem, bpu;
909
910 /* If we're not dereferencing something, then the access
911 must be ok. */
912 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
913 return;
914
915 bpu = bitsize_int (BITS_PER_UNIT);
916 ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
917 rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu);
918 ofs = fold_convert (sizetype, size_binop (TRUNC_DIV_EXPR, ofs, bpu));
919
920 size = convert (bitsizetype, TREE_OPERAND (t, 1));
921 size = size_binop (PLUS_EXPR, size, rem);
922 size = size_binop (CEIL_DIV_EXPR, size, bpu);
923 size = convert (sizetype, size);
924
925 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
926 addr = convert (ptr_type_node, addr);
927 addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, addr, ofs);
928
929 base = addr;
930 limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
931 fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
932 base, size),
933 size_int (-1));
934 }
935 break;
936
937 default:
938 return;
939 }
940
941 mf_build_check_statement_for (base, limit, iter, location, dirflag);
942 }
943 /* Transform
944 1) Memory references.
945 2) BUILTIN_ALLOCA calls.
946 */
947 static void
948 mf_xform_statements (void)
949 {
950 basic_block bb, next;
951 gimple_stmt_iterator i;
952 int saved_last_basic_block = last_basic_block;
953 enum gimple_rhs_class grhs_class;
954
955 bb = ENTRY_BLOCK_PTR ->next_bb;
956 do
957 {
958 next = bb->next_bb;
959 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
960 {
961 gimple s = gsi_stmt (i);
962
963 /* Only a few GIMPLE statements can reference memory. */
964 switch (gimple_code (s))
965 {
966 case GIMPLE_ASSIGN:
967 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
968 gimple_location (s), integer_one_node);
969 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
970 gimple_location (s), integer_zero_node);
971 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
972 if (grhs_class == GIMPLE_BINARY_RHS)
973 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
974 gimple_location (s), integer_zero_node);
975 break;
976
977 case GIMPLE_RETURN:
978 if (gimple_return_retval (s) != NULL_TREE)
979 {
980 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
981 gimple_location (s),
982 integer_zero_node);
983 }
984 break;
985
986 case GIMPLE_CALL:
987 {
988 tree fndecl = gimple_call_fndecl (s);
989 if (fndecl && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA))
990 gimple_call_set_cannot_inline (s, true);
991 }
992 break;
993
994 default:
995 ;
996 }
997 }
998 bb = next;
999 }
1000 while (bb && bb->index <= saved_last_basic_block);
1001 }
1002
1003 /* ------------------------------------------------------------------------ */
1004 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
1005 transforms on the current function.
1006
1007 This is the first part of the mudflap instrumentation. It works on
1008 high-level GIMPLE because after lowering, all variables are moved out
1009 of their BIND_EXPR binding context, and we lose liveness information
1010 for the declarations we wish to instrument. */
1011
1012 static unsigned int
1013 execute_mudflap_function_decls (void)
1014 {
1015 struct gimplify_ctx gctx;
1016
1017 /* Don't instrument functions such as the synthetic constructor
1018 built during mudflap_finish_file. */
1019 if (mf_marked_p (current_function_decl) ||
1020 DECL_ARTIFICIAL (current_function_decl))
1021 return 0;
1022
1023 push_gimplify_context (&gctx);
1024
1025 mf_xform_decls (gimple_body (current_function_decl),
1026 DECL_ARGUMENTS (current_function_decl));
1027
1028 pop_gimplify_context (NULL);
1029 return 0;
1030 }
1031
1032 /* This struct is passed between mf_xform_decls to store state needed
1033 during the traversal searching for objects that have their
1034 addresses taken. */
1035 struct mf_xform_decls_data
1036 {
1037 tree param_decls;
1038 };
1039
1040
1041 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1042 _DECLs if appropriate. Arrange to call the __mf_register function
1043 now, and the __mf_unregister function later for each. Return the
1044 gimple sequence after synthesis. */
1045 gimple_seq
1046 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1047 {
1048 gimple_seq finally_stmts = NULL;
1049 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1050
1051 while (decl != NULL_TREE)
1052 {
1053 if (mf_decl_eligible_p (decl)
1054 /* Not already processed. */
1055 && ! mf_marked_p (decl)
1056 /* Automatic variable. */
1057 && ! DECL_EXTERNAL (decl)
1058 && ! TREE_STATIC (decl))
1059 {
1060 tree size = NULL_TREE, variable_name;
1061 gimple unregister_fncall, register_fncall;
1062 tree unregister_fncall_param, register_fncall_param;
1063
1064 /* Variable-sized objects should have sizes already been
1065 gimplified when we got here. */
1066 size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1067 gcc_assert (is_gimple_val (size));
1068
1069
1070 unregister_fncall_param =
1071 mf_mark (build1 (ADDR_EXPR,
1072 build_pointer_type (TREE_TYPE (decl)),
1073 decl));
1074 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1075 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1076 unregister_fncall_param,
1077 size,
1078 build_int_cst (NULL_TREE, 3));
1079
1080
1081 variable_name = mf_varname_tree (decl);
1082 register_fncall_param =
1083 mf_mark (build1 (ADDR_EXPR,
1084 build_pointer_type (TREE_TYPE (decl)),
1085 decl));
1086 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1087 "name") */
1088 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1089 register_fncall_param,
1090 size,
1091 build_int_cst (NULL_TREE, 3),
1092 variable_name);
1093
1094
1095 /* Accumulate the two calls. */
1096 gimple_set_location (register_fncall, location);
1097 gimple_set_location (unregister_fncall, location);
1098
1099 /* Add the __mf_register call at the current appending point. */
1100 if (gsi_end_p (initially_stmts))
1101 {
1102 if (!DECL_ARTIFICIAL (decl))
1103 warning (OPT_Wmudflap,
1104 "mudflap cannot track %qE in stub function",
1105 DECL_NAME (decl));
1106 }
1107 else
1108 {
1109 gsi_insert_before (&initially_stmts, register_fncall,
1110 GSI_SAME_STMT);
1111
1112 /* Accumulate the FINALLY piece. */
1113 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1114 }
1115 mf_mark (decl);
1116 }
1117
1118 decl = TREE_CHAIN (decl);
1119 }
1120
1121 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1122 if (finally_stmts != NULL)
1123 {
1124 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1125 gimple_seq new_seq = gimple_seq_alloc ();
1126
1127 gimple_seq_add_stmt (&new_seq, stmt);
1128 return new_seq;
1129 }
1130 else
1131 return seq;
1132 }
1133
1134
1135 /* Process every variable mentioned in BIND_EXPRs. */
1136 static tree
1137 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1138 bool *handled_operands_p ATTRIBUTE_UNUSED,
1139 struct walk_stmt_info *wi)
1140 {
1141 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1142 gimple stmt = gsi_stmt (*gsi);
1143
1144 switch (gimple_code (stmt))
1145 {
1146 case GIMPLE_BIND:
1147 {
1148 /* Process function parameters now (but only once). */
1149 if (d->param_decls)
1150 {
1151 gimple_bind_set_body (stmt,
1152 mx_register_decls (d->param_decls,
1153 gimple_bind_body (stmt),
1154 gimple_location (stmt)));
1155 d->param_decls = NULL_TREE;
1156 }
1157
1158 gimple_bind_set_body (stmt,
1159 mx_register_decls (gimple_bind_vars (stmt),
1160 gimple_bind_body (stmt),
1161 gimple_location (stmt)));
1162 }
1163 break;
1164
1165 default:
1166 break;
1167 }
1168
1169 return NULL_TREE;
1170 }
1171
1172 /* Perform the object lifetime tracking mudflap transform on the given function
1173 tree. The tree is mutated in place, with possibly copied subtree nodes.
1174
1175 For every auto variable declared, if its address is ever taken
1176 within the function, then supply its lifetime to the mudflap
1177 runtime with the __mf_register and __mf_unregister calls.
1178 */
1179
1180 static void
1181 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1182 {
1183 struct mf_xform_decls_data d;
1184 struct walk_stmt_info wi;
1185 struct pointer_set_t *pset = pointer_set_create ();
1186
1187 d.param_decls = fnparams;
1188 memset (&wi, 0, sizeof (wi));
1189 wi.info = (void*) &d;
1190 wi.pset = pset;
1191 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1192 pointer_set_destroy (pset);
1193 }
1194
1195
1196 /* ------------------------------------------------------------------------ */
1197 /* Externally visible mudflap functions. */
1198
1199
1200 /* Mark and return the given tree node to prevent further mudflap
1201 transforms. */
1202 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1203
1204 tree
1205 mf_mark (tree t)
1206 {
1207 void **slot;
1208
1209 if (marked_trees == NULL)
1210 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1211 NULL);
1212
1213 slot = htab_find_slot (marked_trees, t, INSERT);
1214 *slot = t;
1215 return t;
1216 }
1217
1218 int
1219 mf_marked_p (tree t)
1220 {
1221 void *entry;
1222
1223 if (marked_trees == NULL)
1224 return 0;
1225
1226 entry = htab_find (marked_trees, t);
1227 return (entry != NULL);
1228 }
1229
1230 /* Remember given node as a static of some kind: global data,
1231 function-scope static, or an anonymous constant. Its assembler
1232 label is given. */
1233
1234 /* A list of globals whose incomplete declarations we encountered.
1235 Instead of emitting the __mf_register call for them here, it's
1236 delayed until program finish time. If they're still incomplete by
1237 then, warnings are emitted. */
1238
1239 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1240
1241 /* A list of statements for calling __mf_register() at startup time. */
1242 static GTY (()) tree enqueued_call_stmt_chain;
1243
1244 static void
1245 mudflap_register_call (tree obj, tree object_size, tree varname)
1246 {
1247 tree arg, call_stmt;
1248
1249 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1250 arg = convert (ptr_type_node, arg);
1251
1252 call_stmt = build_call_expr (mf_register_fndecl, 4,
1253 arg,
1254 convert (size_type_node, object_size),
1255 /* __MF_TYPE_STATIC */
1256 build_int_cst (NULL_TREE, 4),
1257 varname);
1258
1259 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1260 }
1261
1262 void
1263 mudflap_enqueue_decl (tree obj)
1264 {
1265 if (mf_marked_p (obj))
1266 return;
1267
1268 /* We don't need to process variable decls that are internally
1269 generated extern. If we did, we'd end up with warnings for them
1270 during mudflap_finish_file (). That would confuse the user,
1271 since the text would refer to variables that don't show up in the
1272 user's source code. */
1273 if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj))
1274 return;
1275
1276 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1277 }
1278
1279
1280 void
1281 mudflap_enqueue_constant (tree obj)
1282 {
1283 tree object_size, varname;
1284
1285 if (mf_marked_p (obj))
1286 return;
1287
1288 if (TREE_CODE (obj) == STRING_CST)
1289 object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj));
1290 else
1291 object_size = size_in_bytes (TREE_TYPE (obj));
1292
1293 if (TREE_CODE (obj) == STRING_CST)
1294 varname = mf_build_string ("string literal");
1295 else
1296 varname = mf_build_string ("constant");
1297
1298 mudflap_register_call (obj, object_size, varname);
1299 }
1300
1301
1302 /* Emit any file-wide instrumentation. */
1303 void
1304 mudflap_finish_file (void)
1305 {
1306 tree ctor_statements = NULL_TREE;
1307
1308 /* No need to continue when there were errors. */
1309 if (errorcount != 0 || sorrycount != 0)
1310 return;
1311
1312 /* Insert a call to __mf_init. */
1313 {
1314 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1315 append_to_statement_list (call2_stmt, &ctor_statements);
1316 }
1317
1318 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1319 if (flag_mudflap_ignore_reads)
1320 {
1321 tree arg = mf_build_string ("-ignore-reads");
1322 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1323 append_to_statement_list (call_stmt, &ctor_statements);
1324 }
1325
1326 /* Process all enqueued object decls. */
1327 if (deferred_static_decls)
1328 {
1329 size_t i;
1330 tree obj;
1331 for (i = 0; VEC_iterate (tree, deferred_static_decls, i, obj); i++)
1332 {
1333 gcc_assert (DECL_P (obj));
1334
1335 if (mf_marked_p (obj))
1336 continue;
1337
1338 /* Omit registration for static unaddressed objects. NB:
1339 Perform registration for non-static objects regardless of
1340 TREE_USED or TREE_ADDRESSABLE, because they may be used
1341 from other compilation units. */
1342 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1343 continue;
1344
1345 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1346 {
1347 warning (OPT_Wmudflap,
1348 "mudflap cannot track unknown size extern %qE",
1349 DECL_NAME (obj));
1350 continue;
1351 }
1352
1353 mudflap_register_call (obj,
1354 size_in_bytes (TREE_TYPE (obj)),
1355 mf_varname_tree (obj));
1356 }
1357
1358 VEC_truncate (tree, deferred_static_decls, 0);
1359 }
1360
1361 /* Append all the enqueued registration calls. */
1362 if (enqueued_call_stmt_chain)
1363 {
1364 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1365 enqueued_call_stmt_chain = NULL_TREE;
1366 }
1367
1368 cgraph_build_static_cdtor ('I', ctor_statements,
1369 MAX_RESERVED_INIT_PRIORITY-1);
1370 }
1371
1372
1373 static bool
1374 gate_mudflap (void)
1375 {
1376 return flag_mudflap != 0;
1377 }
1378
1379 struct gimple_opt_pass pass_mudflap_1 =
1380 {
1381 {
1382 GIMPLE_PASS,
1383 "mudflap1", /* name */
1384 gate_mudflap, /* gate */
1385 execute_mudflap_function_decls, /* execute */
1386 NULL, /* sub */
1387 NULL, /* next */
1388 0, /* static_pass_number */
1389 TV_NONE, /* tv_id */
1390 PROP_gimple_any, /* properties_required */
1391 0, /* properties_provided */
1392 0, /* properties_destroyed */
1393 0, /* todo_flags_start */
1394 TODO_dump_func /* todo_flags_finish */
1395 }
1396 };
1397
1398 struct gimple_opt_pass pass_mudflap_2 =
1399 {
1400 {
1401 GIMPLE_PASS,
1402 "mudflap2", /* name */
1403 gate_mudflap, /* gate */
1404 execute_mudflap_function_ops, /* execute */
1405 NULL, /* sub */
1406 NULL, /* next */
1407 0, /* static_pass_number */
1408 TV_NONE, /* tv_id */
1409 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1410 0, /* properties_provided */
1411 0, /* properties_destroyed */
1412 0, /* todo_flags_start */
1413 TODO_verify_flow | TODO_verify_stmts
1414 | TODO_dump_func | TODO_update_ssa /* todo_flags_finish */
1415 }
1416 };
1417
1418 #include "gt-tree-mudflap.h"