re PR tree-optimization/49309 (ICE with -fmudflap: verify_stmts failed: type mismatch...
[gcc.git] / gcc / tree-mudflap.c
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "tree-inline.h"
34 #include "gimple.h"
35 #include "tree-iterator.h"
36 #include "tree-flow.h"
37 #include "tree-mudflap.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "hashtab.h"
41 #include "diagnostic.h"
42 #include "demangle.h"
43 #include "langhooks.h"
44 #include "ggc.h"
45 #include "cgraph.h"
46 #include "gimple.h"
47
48 /* Internal function decls */
49
50
51 /* Options. */
52 #define flag_mudflap_threads (flag_mudflap == 2)
53
54 /* Helpers. */
55 static tree mf_build_string (const char *string);
56 static tree mf_varname_tree (tree);
57 static tree mf_file_function_line_tree (location_t);
58
59 /* Indirection-related instrumentation. */
60 static void mf_decl_cache_locals (void);
61 static void mf_decl_clear_locals (void);
62 static void mf_xform_statements (void);
63 static unsigned int execute_mudflap_function_ops (void);
64
65 /* Addressable variables instrumentation. */
66 static void mf_xform_decls (gimple_seq, tree);
67 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
68 struct walk_stmt_info *);
69 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
70 static unsigned int execute_mudflap_function_decls (void);
71
72
73 /* ------------------------------------------------------------------------ */
74 /* Some generally helpful functions for mudflap instrumentation. */
75
76 /* Build a reference to a literal string. */
77 static tree
78 mf_build_string (const char *string)
79 {
80 size_t len = strlen (string);
81 tree result = mf_mark (build_string (len + 1, string));
82
83 TREE_TYPE (result) = build_array_type
84 (char_type_node, build_index_type (size_int (len)));
85 TREE_CONSTANT (result) = 1;
86 TREE_READONLY (result) = 1;
87 TREE_STATIC (result) = 1;
88
89 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
90
91 return mf_mark (result);
92 }
93
94 /* Create a properly typed STRING_CST node that describes the given
95 declaration. It will be used as an argument for __mf_register().
96 Try to construct a helpful string, including file/function/variable
97 name. */
98
99 static tree
100 mf_varname_tree (tree decl)
101 {
102 static pretty_printer buf_rec;
103 static int initialized = 0;
104 pretty_printer *buf = & buf_rec;
105 const char *buf_contents;
106 tree result;
107
108 gcc_assert (decl);
109
110 if (!initialized)
111 {
112 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
113 initialized = 1;
114 }
115 pp_clear_output_area (buf);
116
117 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
118 {
119 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
120 const char *sourcefile;
121 unsigned sourceline = xloc.line;
122 unsigned sourcecolumn = 0;
123 sourcecolumn = xloc.column;
124 sourcefile = xloc.file;
125 if (sourcefile == NULL && current_function_decl != NULL_TREE)
126 sourcefile = DECL_SOURCE_FILE (current_function_decl);
127 if (sourcefile == NULL)
128 sourcefile = "<unknown file>";
129
130 pp_string (buf, sourcefile);
131
132 if (sourceline != 0)
133 {
134 pp_string (buf, ":");
135 pp_decimal_int (buf, sourceline);
136
137 if (sourcecolumn != 0)
138 {
139 pp_string (buf, ":");
140 pp_decimal_int (buf, sourcecolumn);
141 }
142 }
143 }
144
145 if (current_function_decl != NULL_TREE)
146 {
147 /* Add (FUNCTION) */
148 pp_string (buf, " (");
149 {
150 const char *funcname = NULL;
151 if (DECL_NAME (current_function_decl))
152 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
153 if (funcname == NULL)
154 funcname = "anonymous fn";
155
156 pp_string (buf, funcname);
157 }
158 pp_string (buf, ") ");
159 }
160 else
161 pp_string (buf, " ");
162
163 /* Add <variable-declaration>, possibly demangled. */
164 {
165 const char *declname = NULL;
166
167 if (DECL_NAME (decl) != NULL)
168 {
169 if (strcmp ("GNU C++", lang_hooks.name) == 0)
170 {
171 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
172 the libiberty demangler. */
173 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
174 DMGL_AUTO | DMGL_VERBOSE);
175 }
176 if (declname == NULL)
177 declname = lang_hooks.decl_printable_name (decl, 3);
178 }
179 if (declname == NULL)
180 declname = "<unnamed variable>";
181
182 pp_string (buf, declname);
183 }
184
185 /* Return the lot as a new STRING_CST. */
186 buf_contents = pp_base_formatted_text (buf);
187 result = mf_build_string (buf_contents);
188 pp_clear_output_area (buf);
189
190 return result;
191 }
192
193
194 /* And another friend, for producing a simpler message. */
195
196 static tree
197 mf_file_function_line_tree (location_t location)
198 {
199 expanded_location xloc = expand_location (location);
200 const char *file = NULL, *colon, *line, *op, *name, *cp;
201 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
202 char *string;
203 tree result;
204
205 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
206 file = xloc.file;
207 if (file == NULL && current_function_decl != NULL_TREE)
208 file = DECL_SOURCE_FILE (current_function_decl);
209 if (file == NULL)
210 file = "<unknown file>";
211
212 if (xloc.line > 0)
213 {
214 if (xloc.column > 0)
215 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
216 else
217 sprintf (linecolbuf, "%d", xloc.line);
218 colon = ":";
219 line = linecolbuf;
220 }
221 else
222 colon = line = "";
223
224 /* Add (FUNCTION). */
225 name = lang_hooks.decl_printable_name (current_function_decl, 1);
226 if (name)
227 {
228 op = " (";
229 cp = ")";
230 }
231 else
232 op = name = cp = "";
233
234 string = concat (file, colon, line, op, name, cp, NULL);
235 result = mf_build_string (string);
236 free (string);
237
238 return result;
239 }
240
241
242 /* global tree nodes */
243
244 /* Global tree objects for global variables and functions exported by
245 mudflap runtime library. mf_init_extern_trees must be called
246 before using these. */
247
248 /* uintptr_t (usually "unsigned long") */
249 static GTY (()) tree mf_uintptr_type;
250
251 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
252 static GTY (()) tree mf_cache_struct_type;
253
254 /* struct __mf_cache * const */
255 static GTY (()) tree mf_cache_structptr_type;
256
257 /* extern struct __mf_cache __mf_lookup_cache []; */
258 static GTY (()) tree mf_cache_array_decl;
259
260 /* extern unsigned char __mf_lc_shift; */
261 static GTY (()) tree mf_cache_shift_decl;
262
263 /* extern uintptr_t __mf_lc_mask; */
264 static GTY (()) tree mf_cache_mask_decl;
265
266 /* Their function-scope local shadows, used in single-threaded mode only. */
267
268 /* auto const unsigned char __mf_lc_shift_l; */
269 static GTY (()) tree mf_cache_shift_decl_l;
270
271 /* auto const uintptr_t __mf_lc_mask_l; */
272 static GTY (()) tree mf_cache_mask_decl_l;
273
274 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
275 static GTY (()) tree mf_check_fndecl;
276
277 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
278 static GTY (()) tree mf_register_fndecl;
279
280 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
281 static GTY (()) tree mf_unregister_fndecl;
282
283 /* extern void __mf_init (); */
284 static GTY (()) tree mf_init_fndecl;
285
286 /* extern int __mf_set_options (const char*); */
287 static GTY (()) tree mf_set_options_fndecl;
288
289
290 /* Helper for mudflap_init: construct a decl with the given category,
291 name, and type, mark it an external reference, and pushdecl it. */
292 static inline tree
293 mf_make_builtin (enum tree_code category, const char *name, tree type)
294 {
295 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
296 category, get_identifier (name), type));
297 TREE_PUBLIC (decl) = 1;
298 DECL_EXTERNAL (decl) = 1;
299 lang_hooks.decls.pushdecl (decl);
300 /* The decl was declared by the compiler. */
301 DECL_ARTIFICIAL (decl) = 1;
302 /* And we don't want debug info for it. */
303 DECL_IGNORED_P (decl) = 1;
304 return decl;
305 }
306
307 /* Helper for mudflap_init: construct a tree corresponding to the type
308 struct __mf_cache { uintptr_t low; uintptr_t high; };
309 where uintptr_t is the FIELD_TYPE argument. */
310 static inline tree
311 mf_make_mf_cache_struct_type (tree field_type)
312 {
313 /* There is, abominably, no language-independent way to construct a
314 RECORD_TYPE. So we have to call the basic type construction
315 primitives by hand. */
316 tree fieldlo = build_decl (UNKNOWN_LOCATION,
317 FIELD_DECL, get_identifier ("low"), field_type);
318 tree fieldhi = build_decl (UNKNOWN_LOCATION,
319 FIELD_DECL, get_identifier ("high"), field_type);
320
321 tree struct_type = make_node (RECORD_TYPE);
322 DECL_CONTEXT (fieldlo) = struct_type;
323 DECL_CONTEXT (fieldhi) = struct_type;
324 DECL_CHAIN (fieldlo) = fieldhi;
325 TYPE_FIELDS (struct_type) = fieldlo;
326 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
327 layout_type (struct_type);
328
329 return struct_type;
330 }
331
332 /* Initialize the global tree nodes that correspond to mf-runtime.h
333 declarations. */
334 void
335 mudflap_init (void)
336 {
337 static bool done = false;
338 tree mf_const_string_type;
339 tree mf_cache_array_type;
340 tree mf_check_register_fntype;
341 tree mf_unregister_fntype;
342 tree mf_init_fntype;
343 tree mf_set_options_fntype;
344
345 if (done)
346 return;
347 done = true;
348
349 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
350 /*unsignedp=*/true);
351 mf_const_string_type
352 = build_pointer_type (build_qualified_type
353 (char_type_node, TYPE_QUAL_CONST));
354
355 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
356 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
357 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
358 mf_check_register_fntype =
359 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
360 integer_type_node, mf_const_string_type, NULL_TREE);
361 mf_unregister_fntype =
362 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
363 integer_type_node, NULL_TREE);
364 mf_init_fntype =
365 build_function_type_list (void_type_node, NULL_TREE);
366 mf_set_options_fntype =
367 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
368
369 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
370 mf_cache_array_type);
371 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
372 unsigned_char_type_node);
373 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
374 mf_uintptr_type);
375 /* Don't process these in mudflap_enqueue_decl, should they come by
376 there for some reason. */
377 mf_mark (mf_cache_array_decl);
378 mf_mark (mf_cache_shift_decl);
379 mf_mark (mf_cache_mask_decl);
380 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
381 mf_check_register_fntype);
382 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
383 mf_check_register_fntype);
384 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
385 mf_unregister_fntype);
386 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
387 mf_init_fntype);
388 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
389 mf_set_options_fntype);
390 }
391
392
393 /* ------------------------------------------------------------------------ */
394 /* This is the second part of the mudflap instrumentation. It works on
395 low-level GIMPLE using the CFG, because we want to run this pass after
396 tree optimizations have been performed, but we have to preserve the CFG
397 for expansion from trees to RTL.
398 Below is the list of transformations performed on statements in the
399 current function.
400
401 1) Memory reference transforms: Perform the mudflap indirection-related
402 tree transforms on memory references.
403
404 2) Mark BUILTIN_ALLOCA calls not inlineable.
405
406 */
407
408 static unsigned int
409 execute_mudflap_function_ops (void)
410 {
411 struct gimplify_ctx gctx;
412
413 /* Don't instrument functions such as the synthetic constructor
414 built during mudflap_finish_file. */
415 if (mf_marked_p (current_function_decl) ||
416 DECL_ARTIFICIAL (current_function_decl))
417 return 0;
418
419 push_gimplify_context (&gctx);
420
421 /* In multithreaded mode, don't cache the lookup cache parameters. */
422 if (! flag_mudflap_threads)
423 mf_decl_cache_locals ();
424
425 mf_xform_statements ();
426
427 if (! flag_mudflap_threads)
428 mf_decl_clear_locals ();
429
430 pop_gimplify_context (NULL);
431 return 0;
432 }
433
434 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
435 if BB has more than one edge, STMT will be replicated for each edge.
436 Also, abnormal edges will be ignored. */
437
438 static void
439 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
440 {
441 edge e;
442 edge_iterator ei;
443 unsigned n_copies = -1;
444
445 FOR_EACH_EDGE (e, ei, bb->succs)
446 if (!(e->flags & EDGE_ABNORMAL))
447 n_copies++;
448
449 FOR_EACH_EDGE (e, ei, bb->succs)
450 if (!(e->flags & EDGE_ABNORMAL))
451 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
452 }
453
454 /* Create and initialize local shadow variables for the lookup cache
455 globals. Put their decls in the *_l globals for use by
456 mf_build_check_statement_for. */
457
458 static void
459 mf_decl_cache_locals (void)
460 {
461 gimple g;
462 gimple_seq seq = gimple_seq_alloc ();
463
464 /* Build the cache vars. */
465 mf_cache_shift_decl_l
466 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
467 "__mf_lookup_shift_l"));
468
469 mf_cache_mask_decl_l
470 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
471 "__mf_lookup_mask_l"));
472
473 /* Build initialization nodes for the cache vars. We just load the
474 globals into the cache variables. */
475 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
476 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
477 gimple_seq_add_stmt (&seq, g);
478
479 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
480 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
481 gimple_seq_add_stmt (&seq, g);
482
483 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
484
485 gsi_commit_edge_inserts ();
486 }
487
488
489 static void
490 mf_decl_clear_locals (void)
491 {
492 /* Unset local shadows. */
493 mf_cache_shift_decl_l = NULL_TREE;
494 mf_cache_mask_decl_l = NULL_TREE;
495 }
496
497 static void
498 mf_build_check_statement_for (tree base, tree limit,
499 gimple_stmt_iterator *instr_gsi,
500 location_t location, tree dirflag)
501 {
502 gimple_stmt_iterator gsi;
503 basic_block cond_bb, then_bb, join_bb;
504 edge e;
505 tree cond, t, u, v;
506 tree mf_base;
507 tree mf_elem;
508 tree mf_limit;
509 gimple g;
510 gimple_seq seq, stmts;
511
512 /* We first need to split the current basic block, and start altering
513 the CFG. This allows us to insert the statements we're about to
514 construct into the right basic blocks. */
515
516 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
517 gsi = *instr_gsi;
518 gsi_prev (&gsi);
519 if (! gsi_end_p (gsi))
520 e = split_block (cond_bb, gsi_stmt (gsi));
521 else
522 e = split_block_after_labels (cond_bb);
523 cond_bb = e->src;
524 join_bb = e->dest;
525
526 /* A recap at this point: join_bb is the basic block at whose head
527 is the gimple statement for which this check expression is being
528 built. cond_bb is the (possibly new, synthetic) basic block the
529 end of which will contain the cache-lookup code, and a
530 conditional that jumps to the cache-miss code or, much more
531 likely, over to join_bb. */
532
533 /* Create the bb that contains the cache-miss fallback block (mf_check). */
534 then_bb = create_empty_bb (cond_bb);
535 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
536 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
537
538 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
539 e = find_edge (cond_bb, join_bb);
540 e->flags = EDGE_FALSE_VALUE;
541 e->count = cond_bb->count;
542 e->probability = REG_BR_PROB_BASE;
543
544 /* Update dominance info. Note that bb_join's data was
545 updated by split_block. */
546 if (dom_info_available_p (CDI_DOMINATORS))
547 {
548 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
549 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
550 }
551
552 /* Build our local variables. */
553 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
554 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
555 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
556
557 /* Build: __mf_base = (uintptr_t) <base address expression>. */
558 seq = gimple_seq_alloc ();
559 t = fold_convert_loc (location, mf_uintptr_type,
560 unshare_expr (base));
561 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
562 gimple_seq_add_seq (&seq, stmts);
563 g = gimple_build_assign (mf_base, t);
564 gimple_set_location (g, location);
565 gimple_seq_add_stmt (&seq, g);
566
567 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
568 t = fold_convert_loc (location, mf_uintptr_type,
569 unshare_expr (limit));
570 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
571 gimple_seq_add_seq (&seq, stmts);
572 g = gimple_build_assign (mf_limit, t);
573 gimple_set_location (g, location);
574 gimple_seq_add_stmt (&seq, g);
575
576 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
577 & __mf_mask]. */
578 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
579 flag_mudflap_threads ? mf_cache_shift_decl
580 : mf_cache_shift_decl_l);
581 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
582 flag_mudflap_threads ? mf_cache_mask_decl
583 : mf_cache_mask_decl_l);
584 t = build4 (ARRAY_REF,
585 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
586 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
587 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
588 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
589 gimple_seq_add_seq (&seq, stmts);
590 g = gimple_build_assign (mf_elem, t);
591 gimple_set_location (g, location);
592 gimple_seq_add_stmt (&seq, g);
593
594 /* Quick validity check.
595
596 if (__mf_elem->low > __mf_base
597 || (__mf_elem_high < __mf_limit))
598 {
599 __mf_check ();
600 ... and only if single-threaded:
601 __mf_lookup_shift_1 = f...;
602 __mf_lookup_mask_l = ...;
603 }
604
605 It is expected that this body of code is rarely executed so we mark
606 the edge to the THEN clause of the conditional jump as unlikely. */
607
608 /* Construct t <-- '__mf_elem->low > __mf_base'. */
609 t = build3 (COMPONENT_REF, mf_uintptr_type,
610 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
611 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
612 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
613
614 /* Construct '__mf_elem->high < __mf_limit'.
615
616 First build:
617 1) u <-- '__mf_elem->high'
618 2) v <-- '__mf_limit'.
619
620 Then build 'u <-- (u < v). */
621
622 u = build3 (COMPONENT_REF, mf_uintptr_type,
623 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
624 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
625
626 v = mf_limit;
627
628 u = build2 (LT_EXPR, boolean_type_node, u, v);
629
630 /* Build the composed conditional: t <-- 't || u'. Then store the
631 result of the evaluation of 't' in a temporary variable which we
632 can use as the condition for the conditional jump. */
633 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
634 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
635 gimple_seq_add_seq (&seq, stmts);
636 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
637 g = gimple_build_assign (cond, t);
638 gimple_set_location (g, location);
639 gimple_seq_add_stmt (&seq, g);
640
641 /* Build the conditional jump. 'cond' is just a temporary so we can
642 simply build a void COND_EXPR. We do need labels in both arms though. */
643 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
644 NULL_TREE);
645 gimple_set_location (g, location);
646 gimple_seq_add_stmt (&seq, g);
647
648 /* At this point, after so much hard work, we have only constructed
649 the conditional jump,
650
651 if (__mf_elem->low > __mf_base
652 || (__mf_elem_high < __mf_limit))
653
654 The lowered GIMPLE tree representing this code is in the statement
655 list starting at 'head'.
656
657 We can insert this now in the current basic block, i.e. the one that
658 the statement we're instrumenting was originally in. */
659 gsi = gsi_last_bb (cond_bb);
660 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
661
662 /* Now build up the body of the cache-miss handling:
663
664 __mf_check();
665 refresh *_l vars.
666
667 This is the body of the conditional. */
668
669 seq = gimple_seq_alloc ();
670 /* u is a string, so it is already a gimple value. */
671 u = mf_file_function_line_tree (location);
672 /* NB: we pass the overall [base..limit] range to mf_check. */
673 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
674 fold_build2_loc (location,
675 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
676 build_int_cst (mf_uintptr_type, 1));
677 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
678 gimple_seq_add_seq (&seq, stmts);
679 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
680 gimple_seq_add_stmt (&seq, g);
681
682 if (! flag_mudflap_threads)
683 {
684 if (stmt_ends_bb_p (g))
685 {
686 gsi = gsi_start_bb (then_bb);
687 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
688 e = split_block (then_bb, g);
689 then_bb = e->dest;
690 seq = gimple_seq_alloc ();
691 }
692
693 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
694 gimple_seq_add_stmt (&seq, g);
695
696 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
697 gimple_seq_add_stmt (&seq, g);
698 }
699
700 /* Insert the check code in the THEN block. */
701 gsi = gsi_start_bb (then_bb);
702 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
703
704 *instr_gsi = gsi_start_bb (join_bb);
705 }
706
707
708 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
709 eligible for instrumentation. For the mudflap1 pass, this implies
710 that it should be registered with the libmudflap runtime. For the
711 mudflap2 pass this means instrumenting an indirection operation with
712 respect to the object.
713 */
714 static int
715 mf_decl_eligible_p (tree decl)
716 {
717 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
718 /* The decl must have its address taken. In the case of
719 arrays, this flag is also set if the indexes are not
720 compile-time known valid constants. */
721 /* XXX: not sufficient: return-by-value structs! */
722 && TREE_ADDRESSABLE (decl)
723 /* The type of the variable must be complete. */
724 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
725 /* The decl hasn't been decomposed somehow. */
726 && !DECL_HAS_VALUE_EXPR_P (decl));
727 }
728
729
730 static void
731 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
732 location_t location, tree dirflag)
733 {
734 tree type, base, limit, addr, size, t;
735
736 /* Don't instrument read operations. */
737 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
738 return;
739
740 /* Don't instrument marked nodes. */
741 if (mf_marked_p (*tp))
742 return;
743
744 t = *tp;
745 type = TREE_TYPE (t);
746
747 if (type == error_mark_node)
748 return;
749
750 size = TYPE_SIZE_UNIT (type);
751
752 switch (TREE_CODE (t))
753 {
754 case ARRAY_REF:
755 case COMPONENT_REF:
756 {
757 /* This is trickier than it may first appear. The reason is
758 that we are looking at expressions from the "inside out" at
759 this point. We may have a complex nested aggregate/array
760 expression (e.g. "a.b[i].c"), maybe with an indirection as
761 the leftmost operator ("p->a.b.d"), where instrumentation
762 is necessary. Or we may have an innocent "a.b.c"
763 expression that must not be instrumented. We need to
764 recurse all the way down the nesting structure to figure it
765 out: looking just at the outer node is not enough. */
766 tree var;
767 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
768 /* If we have a bitfield component reference, we must note the
769 innermost addressable object in ELT, from which we will
770 construct the byte-addressable bounds of the bitfield. */
771 tree elt = NULL_TREE;
772 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
773 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
774
775 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
776 containment hierarchy to find the outermost VAR_DECL. */
777 var = TREE_OPERAND (t, 0);
778 while (1)
779 {
780 if (bitfield_ref_p && elt == NULL_TREE
781 && (TREE_CODE (var) == ARRAY_REF
782 || TREE_CODE (var) == COMPONENT_REF))
783 elt = var;
784
785 if (TREE_CODE (var) == ARRAY_REF)
786 {
787 component_ref_only = 0;
788 var = TREE_OPERAND (var, 0);
789 }
790 else if (TREE_CODE (var) == COMPONENT_REF)
791 var = TREE_OPERAND (var, 0);
792 else if (INDIRECT_REF_P (var)
793 || TREE_CODE (var) == MEM_REF)
794 {
795 base = TREE_OPERAND (var, 0);
796 break;
797 }
798 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
799 {
800 var = TREE_OPERAND (var, 0);
801 if (CONSTANT_CLASS_P (var)
802 && TREE_CODE (var) != STRING_CST)
803 return;
804 }
805 else
806 {
807 gcc_assert (TREE_CODE (var) == VAR_DECL
808 || TREE_CODE (var) == PARM_DECL
809 || TREE_CODE (var) == RESULT_DECL
810 || TREE_CODE (var) == STRING_CST);
811 /* Don't instrument this access if the underlying
812 variable is not "eligible". This test matches
813 those arrays that have only known-valid indexes,
814 and thus are not labeled TREE_ADDRESSABLE. */
815 if (! mf_decl_eligible_p (var) || component_ref_only)
816 return;
817 else
818 {
819 base = build1 (ADDR_EXPR,
820 build_pointer_type (TREE_TYPE (var)), var);
821 break;
822 }
823 }
824 }
825
826 /* Handle the case of ordinary non-indirection structure
827 accesses. These have only nested COMPONENT_REF nodes (no
828 INDIRECT_REF), but pass through the above filter loop.
829 Note that it's possible for such a struct variable to match
830 the eligible_p test because someone else might take its
831 address sometime. */
832
833 /* We need special processing for bitfield components, because
834 their addresses cannot be taken. */
835 if (bitfield_ref_p)
836 {
837 tree field = TREE_OPERAND (t, 1);
838
839 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
840 size = DECL_SIZE_UNIT (field);
841
842 if (elt)
843 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
844 elt);
845 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
846 addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
847 addr, fold_convert_loc (location, sizetype,
848 byte_position (field)));
849 }
850 else
851 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
852
853 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
854 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
855 convert (mf_uintptr_type, addr),
856 size),
857 integer_one_node);
858 }
859 break;
860
861 case INDIRECT_REF:
862 addr = TREE_OPERAND (t, 0);
863 base = addr;
864 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
865 fold_build2_loc (location,
866 POINTER_PLUS_EXPR, ptr_type_node, base,
867 size),
868 size_int (-1));
869 break;
870
871 case MEM_REF:
872 addr = fold_build2_loc (location, POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (t, 0)),
873 TREE_OPERAND (t, 0),
874 fold_convert (sizetype, TREE_OPERAND (t, 1)));
875 base = addr;
876 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
877 fold_build2_loc (location,
878 POINTER_PLUS_EXPR, ptr_type_node, base,
879 size),
880 size_int (-1));
881 break;
882
883 case TARGET_MEM_REF:
884 addr = tree_mem_ref_addr (ptr_type_node, t);
885 base = addr;
886 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
887 fold_build2_loc (location,
888 POINTER_PLUS_EXPR, ptr_type_node, base,
889 size),
890 size_int (-1));
891 break;
892
893 case ARRAY_RANGE_REF:
894 warning (OPT_Wmudflap,
895 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
896 return;
897
898 case BIT_FIELD_REF:
899 /* ??? merge with COMPONENT_REF code above? */
900 {
901 tree ofs, rem, bpu;
902
903 /* If we're not dereferencing something, then the access
904 must be ok. */
905 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
906 return;
907
908 bpu = bitsize_int (BITS_PER_UNIT);
909 ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
910 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
911 ofs = fold_convert_loc (location,
912 sizetype,
913 size_binop_loc (location,
914 TRUNC_DIV_EXPR, ofs, bpu));
915
916 size = convert (bitsizetype, TREE_OPERAND (t, 1));
917 size = size_binop_loc (location, PLUS_EXPR, size, rem);
918 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
919 size = convert (sizetype, size);
920
921 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
922 addr = convert (ptr_type_node, addr);
923 addr = fold_build2_loc (location, POINTER_PLUS_EXPR,
924 ptr_type_node, addr, ofs);
925
926 base = addr;
927 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
928 fold_build2_loc (location,
929 POINTER_PLUS_EXPR, ptr_type_node,
930 base, size),
931 size_int (-1));
932 }
933 break;
934
935 default:
936 return;
937 }
938
939 mf_build_check_statement_for (base, limit, iter, location, dirflag);
940 }
941 /* Transform
942 1) Memory references.
943 2) BUILTIN_ALLOCA calls.
944 */
945 static void
946 mf_xform_statements (void)
947 {
948 basic_block bb, next;
949 gimple_stmt_iterator i;
950 int saved_last_basic_block = last_basic_block;
951 enum gimple_rhs_class grhs_class;
952
953 bb = ENTRY_BLOCK_PTR ->next_bb;
954 do
955 {
956 next = bb->next_bb;
957 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
958 {
959 gimple s = gsi_stmt (i);
960
961 /* Only a few GIMPLE statements can reference memory. */
962 switch (gimple_code (s))
963 {
964 case GIMPLE_ASSIGN:
965 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
966 gimple_location (s), integer_one_node);
967 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
968 gimple_location (s), integer_zero_node);
969 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
970 if (grhs_class == GIMPLE_BINARY_RHS)
971 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
972 gimple_location (s), integer_zero_node);
973 break;
974
975 case GIMPLE_RETURN:
976 if (gimple_return_retval (s) != NULL_TREE)
977 {
978 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
979 gimple_location (s),
980 integer_zero_node);
981 }
982 break;
983
984 case GIMPLE_CALL:
985 {
986 tree fndecl = gimple_call_fndecl (s);
987 if (fndecl && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA))
988 gimple_call_set_cannot_inline (s, true);
989 }
990 break;
991
992 default:
993 ;
994 }
995 }
996 bb = next;
997 }
998 while (bb && bb->index <= saved_last_basic_block);
999 }
1000
1001 /* ------------------------------------------------------------------------ */
1002 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
1003 transforms on the current function.
1004
1005 This is the first part of the mudflap instrumentation. It works on
1006 high-level GIMPLE because after lowering, all variables are moved out
1007 of their BIND_EXPR binding context, and we lose liveness information
1008 for the declarations we wish to instrument. */
1009
1010 static unsigned int
1011 execute_mudflap_function_decls (void)
1012 {
1013 struct gimplify_ctx gctx;
1014
1015 /* Don't instrument functions such as the synthetic constructor
1016 built during mudflap_finish_file. */
1017 if (mf_marked_p (current_function_decl) ||
1018 DECL_ARTIFICIAL (current_function_decl))
1019 return 0;
1020
1021 push_gimplify_context (&gctx);
1022
1023 mf_xform_decls (gimple_body (current_function_decl),
1024 DECL_ARGUMENTS (current_function_decl));
1025
1026 pop_gimplify_context (NULL);
1027 return 0;
1028 }
1029
1030 /* This struct is passed between mf_xform_decls to store state needed
1031 during the traversal searching for objects that have their
1032 addresses taken. */
1033 struct mf_xform_decls_data
1034 {
1035 tree param_decls;
1036 };
1037
1038
1039 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1040 _DECLs if appropriate. Arrange to call the __mf_register function
1041 now, and the __mf_unregister function later for each. Return the
1042 gimple sequence after synthesis. */
1043 gimple_seq
1044 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1045 {
1046 gimple_seq finally_stmts = NULL;
1047 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1048
1049 while (decl != NULL_TREE)
1050 {
1051 if (mf_decl_eligible_p (decl)
1052 /* Not already processed. */
1053 && ! mf_marked_p (decl)
1054 /* Automatic variable. */
1055 && ! DECL_EXTERNAL (decl)
1056 && ! TREE_STATIC (decl))
1057 {
1058 tree size = NULL_TREE, variable_name;
1059 gimple unregister_fncall, register_fncall;
1060 tree unregister_fncall_param, register_fncall_param;
1061
1062 /* Variable-sized objects should have sizes already been
1063 gimplified when we got here. */
1064 size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1065 gcc_assert (is_gimple_val (size));
1066
1067
1068 unregister_fncall_param =
1069 mf_mark (build1 (ADDR_EXPR,
1070 build_pointer_type (TREE_TYPE (decl)),
1071 decl));
1072 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1073 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1074 unregister_fncall_param,
1075 size,
1076 integer_three_node);
1077
1078
1079 variable_name = mf_varname_tree (decl);
1080 register_fncall_param =
1081 mf_mark (build1 (ADDR_EXPR,
1082 build_pointer_type (TREE_TYPE (decl)),
1083 decl));
1084 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1085 "name") */
1086 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1087 register_fncall_param,
1088 size,
1089 integer_three_node,
1090 variable_name);
1091
1092
1093 /* Accumulate the two calls. */
1094 gimple_set_location (register_fncall, location);
1095 gimple_set_location (unregister_fncall, location);
1096
1097 /* Add the __mf_register call at the current appending point. */
1098 if (gsi_end_p (initially_stmts))
1099 {
1100 if (!DECL_ARTIFICIAL (decl))
1101 warning (OPT_Wmudflap,
1102 "mudflap cannot track %qE in stub function",
1103 DECL_NAME (decl));
1104 }
1105 else
1106 {
1107 gsi_insert_before (&initially_stmts, register_fncall,
1108 GSI_SAME_STMT);
1109
1110 /* Accumulate the FINALLY piece. */
1111 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1112 }
1113 mf_mark (decl);
1114 }
1115
1116 decl = DECL_CHAIN (decl);
1117 }
1118
1119 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1120 if (finally_stmts != NULL)
1121 {
1122 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1123 gimple_seq new_seq = gimple_seq_alloc ();
1124
1125 gimple_seq_add_stmt (&new_seq, stmt);
1126 return new_seq;
1127 }
1128 else
1129 return seq;
1130 }
1131
1132
1133 /* Process every variable mentioned in BIND_EXPRs. */
1134 static tree
1135 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1136 bool *handled_operands_p ATTRIBUTE_UNUSED,
1137 struct walk_stmt_info *wi)
1138 {
1139 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1140 gimple stmt = gsi_stmt (*gsi);
1141
1142 switch (gimple_code (stmt))
1143 {
1144 case GIMPLE_BIND:
1145 {
1146 /* Process function parameters now (but only once). */
1147 if (d->param_decls)
1148 {
1149 gimple_bind_set_body (stmt,
1150 mx_register_decls (d->param_decls,
1151 gimple_bind_body (stmt),
1152 gimple_location (stmt)));
1153 d->param_decls = NULL_TREE;
1154 }
1155
1156 gimple_bind_set_body (stmt,
1157 mx_register_decls (gimple_bind_vars (stmt),
1158 gimple_bind_body (stmt),
1159 gimple_location (stmt)));
1160 }
1161 break;
1162
1163 default:
1164 break;
1165 }
1166
1167 return NULL_TREE;
1168 }
1169
1170 /* Perform the object lifetime tracking mudflap transform on the given function
1171 tree. The tree is mutated in place, with possibly copied subtree nodes.
1172
1173 For every auto variable declared, if its address is ever taken
1174 within the function, then supply its lifetime to the mudflap
1175 runtime with the __mf_register and __mf_unregister calls.
1176 */
1177
1178 static void
1179 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1180 {
1181 struct mf_xform_decls_data d;
1182 struct walk_stmt_info wi;
1183 struct pointer_set_t *pset = pointer_set_create ();
1184
1185 d.param_decls = fnparams;
1186 memset (&wi, 0, sizeof (wi));
1187 wi.info = (void*) &d;
1188 wi.pset = pset;
1189 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1190 pointer_set_destroy (pset);
1191 }
1192
1193
1194 /* ------------------------------------------------------------------------ */
1195 /* Externally visible mudflap functions. */
1196
1197
1198 /* Mark and return the given tree node to prevent further mudflap
1199 transforms. */
1200 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1201
1202 tree
1203 mf_mark (tree t)
1204 {
1205 void **slot;
1206
1207 if (marked_trees == NULL)
1208 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1209 NULL);
1210
1211 slot = htab_find_slot (marked_trees, t, INSERT);
1212 *slot = t;
1213 return t;
1214 }
1215
1216 int
1217 mf_marked_p (tree t)
1218 {
1219 void *entry;
1220
1221 if (marked_trees == NULL)
1222 return 0;
1223
1224 entry = htab_find (marked_trees, t);
1225 return (entry != NULL);
1226 }
1227
1228 /* Remember given node as a static of some kind: global data,
1229 function-scope static, or an anonymous constant. Its assembler
1230 label is given. */
1231
1232 /* A list of globals whose incomplete declarations we encountered.
1233 Instead of emitting the __mf_register call for them here, it's
1234 delayed until program finish time. If they're still incomplete by
1235 then, warnings are emitted. */
1236
1237 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1238
1239 /* A list of statements for calling __mf_register() at startup time. */
1240 static GTY (()) tree enqueued_call_stmt_chain;
1241
1242 static void
1243 mudflap_register_call (tree obj, tree object_size, tree varname)
1244 {
1245 tree arg, call_stmt;
1246
1247 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1248 arg = convert (ptr_type_node, arg);
1249
1250 call_stmt = build_call_expr (mf_register_fndecl, 4,
1251 arg,
1252 convert (size_type_node, object_size),
1253 /* __MF_TYPE_STATIC */
1254 build_int_cst (integer_type_node, 4),
1255 varname);
1256
1257 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1258 }
1259
1260 void
1261 mudflap_enqueue_decl (tree obj)
1262 {
1263 if (mf_marked_p (obj))
1264 return;
1265
1266 /* We don't need to process variable decls that are internally
1267 generated extern. If we did, we'd end up with warnings for them
1268 during mudflap_finish_file (). That would confuse the user,
1269 since the text would refer to variables that don't show up in the
1270 user's source code. */
1271 if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj))
1272 return;
1273
1274 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1275 }
1276
1277
1278 void
1279 mudflap_enqueue_constant (tree obj)
1280 {
1281 tree object_size, varname;
1282
1283 if (mf_marked_p (obj))
1284 return;
1285
1286 if (TREE_CODE (obj) == STRING_CST)
1287 object_size = size_int (TREE_STRING_LENGTH (obj));
1288 else
1289 object_size = size_in_bytes (TREE_TYPE (obj));
1290
1291 if (TREE_CODE (obj) == STRING_CST)
1292 varname = mf_build_string ("string literal");
1293 else
1294 varname = mf_build_string ("constant");
1295
1296 mudflap_register_call (obj, object_size, varname);
1297 }
1298
1299
1300 /* Emit any file-wide instrumentation. */
1301 void
1302 mudflap_finish_file (void)
1303 {
1304 tree ctor_statements = NULL_TREE;
1305
1306 /* No need to continue when there were errors. */
1307 if (seen_error ())
1308 return;
1309
1310 /* Insert a call to __mf_init. */
1311 {
1312 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1313 append_to_statement_list (call2_stmt, &ctor_statements);
1314 }
1315
1316 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1317 if (flag_mudflap_ignore_reads)
1318 {
1319 tree arg = mf_build_string ("-ignore-reads");
1320 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1321 append_to_statement_list (call_stmt, &ctor_statements);
1322 }
1323
1324 /* Process all enqueued object decls. */
1325 if (deferred_static_decls)
1326 {
1327 size_t i;
1328 tree obj;
1329 FOR_EACH_VEC_ELT (tree, deferred_static_decls, i, obj)
1330 {
1331 gcc_assert (DECL_P (obj));
1332
1333 if (mf_marked_p (obj))
1334 continue;
1335
1336 /* Omit registration for static unaddressed objects. NB:
1337 Perform registration for non-static objects regardless of
1338 TREE_USED or TREE_ADDRESSABLE, because they may be used
1339 from other compilation units. */
1340 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1341 continue;
1342
1343 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1344 {
1345 warning (OPT_Wmudflap,
1346 "mudflap cannot track unknown size extern %qE",
1347 DECL_NAME (obj));
1348 continue;
1349 }
1350
1351 mudflap_register_call (obj,
1352 size_in_bytes (TREE_TYPE (obj)),
1353 mf_varname_tree (obj));
1354 }
1355
1356 VEC_truncate (tree, deferred_static_decls, 0);
1357 }
1358
1359 /* Append all the enqueued registration calls. */
1360 if (enqueued_call_stmt_chain)
1361 {
1362 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1363 enqueued_call_stmt_chain = NULL_TREE;
1364 }
1365
1366 cgraph_build_static_cdtor ('I', ctor_statements,
1367 MAX_RESERVED_INIT_PRIORITY-1);
1368 }
1369
1370
1371 static bool
1372 gate_mudflap (void)
1373 {
1374 return flag_mudflap != 0;
1375 }
1376
1377 struct gimple_opt_pass pass_mudflap_1 =
1378 {
1379 {
1380 GIMPLE_PASS,
1381 "mudflap1", /* name */
1382 gate_mudflap, /* gate */
1383 execute_mudflap_function_decls, /* execute */
1384 NULL, /* sub */
1385 NULL, /* next */
1386 0, /* static_pass_number */
1387 TV_NONE, /* tv_id */
1388 PROP_gimple_any, /* properties_required */
1389 0, /* properties_provided */
1390 0, /* properties_destroyed */
1391 0, /* todo_flags_start */
1392 0 /* todo_flags_finish */
1393 }
1394 };
1395
1396 struct gimple_opt_pass pass_mudflap_2 =
1397 {
1398 {
1399 GIMPLE_PASS,
1400 "mudflap2", /* name */
1401 gate_mudflap, /* gate */
1402 execute_mudflap_function_ops, /* execute */
1403 NULL, /* sub */
1404 NULL, /* next */
1405 0, /* static_pass_number */
1406 TV_NONE, /* tv_id */
1407 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1408 0, /* properties_provided */
1409 0, /* properties_destroyed */
1410 0, /* todo_flags_start */
1411 TODO_verify_flow | TODO_verify_stmts
1412 | TODO_update_ssa /* todo_flags_finish */
1413 }
1414 };
1415
1416 #include "gt-tree-mudflap.h"