re PR libmudflap/26864 (multithreaded mudflap not working)
[gcc.git] / gcc / tree-mudflap.c
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Frank Ch. Eigler <fche@redhat.com>
4 and Graydon Hoare <graydon@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tm_p.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "tree-inline.h"
36 #include "tree-gimple.h"
37 #include "tree-flow.h"
38 #include "tree-mudflap.h"
39 #include "tree-dump.h"
40 #include "tree-pass.h"
41 #include "hashtab.h"
42 #include "diagnostic.h"
43 #include <demangle.h>
44 #include "langhooks.h"
45 #include "ggc.h"
46 #include "cgraph.h"
47 #include "toplev.h"
48
49 /* Internal function decls */
50
51
52 /* Options. */
53 #define flag_mudflap_threads (flag_mudflap == 2)
54
55 /* Helpers. */
56 static tree mf_build_string (const char *string);
57 static tree mf_varname_tree (tree);
58 static tree mf_file_function_line_tree (location_t);
59
60 /* Indirection-related instrumentation. */
61 static void mf_decl_cache_locals (void);
62 static void mf_decl_clear_locals (void);
63 static void mf_xform_derefs (void);
64 static unsigned int execute_mudflap_function_ops (void);
65
66 /* Addressable variables instrumentation. */
67 static void mf_xform_decls (tree, tree);
68 static tree mx_xfn_xform_decls (tree *, int *, void *);
69 static void mx_register_decls (tree, tree *);
70 static unsigned int execute_mudflap_function_decls (void);
71
72
73 /* ------------------------------------------------------------------------ */
74 /* Some generally helpful functions for mudflap instrumentation. */
75
76 /* Build a reference to a literal string. */
77 static tree
78 mf_build_string (const char *string)
79 {
80 size_t len = strlen (string);
81 tree result = mf_mark (build_string (len + 1, string));
82
83 TREE_TYPE (result) = build_array_type
84 (char_type_node, build_index_type (build_int_cst (NULL_TREE, len)));
85 TREE_CONSTANT (result) = 1;
86 TREE_INVARIANT (result) = 1;
87 TREE_READONLY (result) = 1;
88 TREE_STATIC (result) = 1;
89
90 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
91
92 return mf_mark (result);
93 }
94
95 /* Create a properly typed STRING_CST node that describes the given
96 declaration. It will be used as an argument for __mf_register().
97 Try to construct a helpful string, including file/function/variable
98 name. */
99
100 static tree
101 mf_varname_tree (tree decl)
102 {
103 static pretty_printer buf_rec;
104 static int initialized = 0;
105 pretty_printer *buf = & buf_rec;
106 const char *buf_contents;
107 tree result;
108
109 gcc_assert (decl);
110
111 if (!initialized)
112 {
113 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
114 initialized = 1;
115 }
116 pp_clear_output_area (buf);
117
118 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
119 {
120 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
121 const char *sourcefile;
122 unsigned sourceline = xloc.line;
123 unsigned sourcecolumn = 0;
124 #ifdef USE_MAPPED_LOCATION
125 sourcecolumn = xloc.column;
126 #endif
127 sourcefile = xloc.file;
128 if (sourcefile == NULL && current_function_decl != NULL_TREE)
129 sourcefile = DECL_SOURCE_FILE (current_function_decl);
130 if (sourcefile == NULL)
131 sourcefile = "<unknown file>";
132
133 pp_string (buf, sourcefile);
134
135 if (sourceline != 0)
136 {
137 pp_string (buf, ":");
138 pp_decimal_int (buf, sourceline);
139
140 if (sourcecolumn != 0)
141 {
142 pp_string (buf, ":");
143 pp_decimal_int (buf, sourcecolumn);
144 }
145 }
146 }
147
148 if (current_function_decl != NULL_TREE)
149 {
150 /* Add (FUNCTION) */
151 pp_string (buf, " (");
152 {
153 const char *funcname = NULL;
154 if (DECL_NAME (current_function_decl))
155 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
156 if (funcname == NULL)
157 funcname = "anonymous fn";
158
159 pp_string (buf, funcname);
160 }
161 pp_string (buf, ") ");
162 }
163 else
164 pp_string (buf, " ");
165
166 /* Add <variable-declaration>, possibly demangled. */
167 {
168 const char *declname = NULL;
169
170 if (DECL_NAME (decl) != NULL)
171 {
172 if (strcmp ("GNU C++", lang_hooks.name) == 0)
173 {
174 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
175 the libiberty demangler. */
176 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
177 DMGL_AUTO | DMGL_VERBOSE);
178 }
179 if (declname == NULL)
180 declname = lang_hooks.decl_printable_name (decl, 3);
181 }
182 if (declname == NULL)
183 declname = "<unnamed variable>";
184
185 pp_string (buf, declname);
186 }
187
188 /* Return the lot as a new STRING_CST. */
189 buf_contents = pp_base_formatted_text (buf);
190 result = mf_build_string (buf_contents);
191 pp_clear_output_area (buf);
192
193 return result;
194 }
195
196
197 /* And another friend, for producing a simpler message. */
198
199 static tree
200 mf_file_function_line_tree (location_t location)
201 {
202 expanded_location xloc = expand_location (location);
203 const char *file = NULL, *colon, *line, *op, *name, *cp;
204 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
205 char *string;
206 tree result;
207
208 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
209 file = xloc.file;
210 if (file == NULL && current_function_decl != NULL_TREE)
211 file = DECL_SOURCE_FILE (current_function_decl);
212 if (file == NULL)
213 file = "<unknown file>";
214
215 if (xloc.line > 0)
216 {
217 #ifdef USE_MAPPED_LOCATION
218 if (xloc.column > 0)
219 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
220 else
221 #endif
222 sprintf (linecolbuf, "%d", xloc.line);
223 colon = ":";
224 line = linecolbuf;
225 }
226 else
227 colon = line = "";
228
229 /* Add (FUNCTION). */
230 name = lang_hooks.decl_printable_name (current_function_decl, 1);
231 if (name)
232 {
233 op = " (";
234 cp = ")";
235 }
236 else
237 op = name = cp = "";
238
239 string = concat (file, colon, line, op, name, cp, NULL);
240 result = mf_build_string (string);
241 free (string);
242
243 return result;
244 }
245
246
247 /* global tree nodes */
248
249 /* Global tree objects for global variables and functions exported by
250 mudflap runtime library. mf_init_extern_trees must be called
251 before using these. */
252
253 /* uintptr_t (usually "unsigned long") */
254 static GTY (()) tree mf_uintptr_type;
255
256 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
257 static GTY (()) tree mf_cache_struct_type;
258
259 /* struct __mf_cache * const */
260 static GTY (()) tree mf_cache_structptr_type;
261
262 /* extern struct __mf_cache __mf_lookup_cache []; */
263 static GTY (()) tree mf_cache_array_decl;
264
265 /* extern unsigned char __mf_lc_shift; */
266 static GTY (()) tree mf_cache_shift_decl;
267
268 /* extern uintptr_t __mf_lc_mask; */
269 static GTY (()) tree mf_cache_mask_decl;
270
271 /* Their function-scope local shadows, used in single-threaded mode only. */
272
273 /* auto const unsigned char __mf_lc_shift_l; */
274 static GTY (()) tree mf_cache_shift_decl_l;
275
276 /* auto const uintptr_t __mf_lc_mask_l; */
277 static GTY (()) tree mf_cache_mask_decl_l;
278
279 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
280 static GTY (()) tree mf_check_fndecl;
281
282 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
283 static GTY (()) tree mf_register_fndecl;
284
285 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
286 static GTY (()) tree mf_unregister_fndecl;
287
288 /* extern void __mf_init (); */
289 static GTY (()) tree mf_init_fndecl;
290
291 /* extern int __mf_set_options (const char*); */
292 static GTY (()) tree mf_set_options_fndecl;
293
294
295 /* Helper for mudflap_init: construct a decl with the given category,
296 name, and type, mark it an external reference, and pushdecl it. */
297 static inline tree
298 mf_make_builtin (enum tree_code category, const char *name, tree type)
299 {
300 tree decl = mf_mark (build_decl (category, get_identifier (name), type));
301 TREE_PUBLIC (decl) = 1;
302 DECL_EXTERNAL (decl) = 1;
303 lang_hooks.decls.pushdecl (decl);
304 return decl;
305 }
306
307 /* Helper for mudflap_init: construct a tree corresponding to the type
308 struct __mf_cache { uintptr_t low; uintptr_t high; };
309 where uintptr_t is the FIELD_TYPE argument. */
310 static inline tree
311 mf_make_mf_cache_struct_type (tree field_type)
312 {
313 /* There is, abominably, no language-independent way to construct a
314 RECORD_TYPE. So we have to call the basic type construction
315 primitives by hand. */
316 tree fieldlo = build_decl (FIELD_DECL, get_identifier ("low"), field_type);
317 tree fieldhi = build_decl (FIELD_DECL, get_identifier ("high"), field_type);
318
319 tree struct_type = make_node (RECORD_TYPE);
320 DECL_CONTEXT (fieldlo) = struct_type;
321 DECL_CONTEXT (fieldhi) = struct_type;
322 TREE_CHAIN (fieldlo) = fieldhi;
323 TYPE_FIELDS (struct_type) = fieldlo;
324 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
325 layout_type (struct_type);
326
327 return struct_type;
328 }
329
330 #define build_function_type_0(rtype) \
331 build_function_type (rtype, void_list_node)
332 #define build_function_type_1(rtype, arg1) \
333 build_function_type (rtype, tree_cons (0, arg1, void_list_node))
334 #define build_function_type_3(rtype, arg1, arg2, arg3) \
335 build_function_type (rtype, tree_cons (0, arg1, tree_cons (0, arg2, \
336 tree_cons (0, arg3, void_list_node))))
337 #define build_function_type_4(rtype, arg1, arg2, arg3, arg4) \
338 build_function_type (rtype, tree_cons (0, arg1, tree_cons (0, arg2, \
339 tree_cons (0, arg3, tree_cons (0, arg4, \
340 void_list_node)))))
341
342 /* Initialize the global tree nodes that correspond to mf-runtime.h
343 declarations. */
344 void
345 mudflap_init (void)
346 {
347 static bool done = false;
348 tree mf_const_string_type;
349 tree mf_cache_array_type;
350 tree mf_check_register_fntype;
351 tree mf_unregister_fntype;
352 tree mf_init_fntype;
353 tree mf_set_options_fntype;
354
355 if (done)
356 return;
357 done = true;
358
359 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
360 /*unsignedp=*/true);
361 mf_const_string_type
362 = build_pointer_type (build_qualified_type
363 (char_type_node, TYPE_QUAL_CONST));
364
365 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
366 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
367 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
368 mf_check_register_fntype =
369 build_function_type_4 (void_type_node, ptr_type_node, size_type_node,
370 integer_type_node, mf_const_string_type);
371 mf_unregister_fntype =
372 build_function_type_3 (void_type_node, ptr_type_node, size_type_node,
373 integer_type_node);
374 mf_init_fntype =
375 build_function_type_0 (void_type_node);
376 mf_set_options_fntype =
377 build_function_type_1 (integer_type_node, mf_const_string_type);
378
379 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
380 mf_cache_array_type);
381 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
382 unsigned_char_type_node);
383 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
384 mf_uintptr_type);
385 /* Don't process these in mudflap_enqueue_decl, should they come by
386 there for some reason. */
387 mf_mark (mf_cache_array_decl);
388 mf_mark (mf_cache_shift_decl);
389 mf_mark (mf_cache_mask_decl);
390 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
391 mf_check_register_fntype);
392 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
393 mf_check_register_fntype);
394 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
395 mf_unregister_fntype);
396 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
397 mf_init_fntype);
398 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
399 mf_set_options_fntype);
400 }
401 #undef build_function_type_4
402 #undef build_function_type_3
403 #undef build_function_type_1
404 #undef build_function_type_0
405
406
407 /* ------------------------------------------------------------------------ */
408 /* Memory reference transforms. Perform the mudflap indirection-related
409 tree transforms on the current function.
410
411 This is the second part of the mudflap instrumentation. It works on
412 low-level GIMPLE using the CFG, because we want to run this pass after
413 tree optimizations have been performed, but we have to preserve the CFG
414 for expansion from trees to RTL. */
415
416 static unsigned int
417 execute_mudflap_function_ops (void)
418 {
419 /* Don't instrument functions such as the synthetic constructor
420 built during mudflap_finish_file. */
421 if (mf_marked_p (current_function_decl) ||
422 DECL_ARTIFICIAL (current_function_decl))
423 return 0;
424
425 push_gimplify_context ();
426
427 /* In multithreaded mode, don't cache the lookup cache parameters. */
428 if (! flag_mudflap_threads)
429 mf_decl_cache_locals ();
430
431 mf_xform_derefs ();
432
433 if (! flag_mudflap_threads)
434 mf_decl_clear_locals ();
435
436 pop_gimplify_context (NULL);
437 return 0;
438 }
439
440 /* Create and initialize local shadow variables for the lookup cache
441 globals. Put their decls in the *_l globals for use by
442 mf_build_check_statement_for. */
443
444 static void
445 mf_decl_cache_locals (void)
446 {
447 tree t, shift_init_stmts, mask_init_stmts;
448 tree_stmt_iterator tsi;
449
450 /* Build the cache vars. */
451 mf_cache_shift_decl_l
452 = mf_mark (create_tmp_var (TREE_TYPE (mf_cache_shift_decl),
453 "__mf_lookup_shift_l"));
454
455 mf_cache_mask_decl_l
456 = mf_mark (create_tmp_var (TREE_TYPE (mf_cache_mask_decl),
457 "__mf_lookup_mask_l"));
458
459 /* Build initialization nodes for the cache vars. We just load the
460 globals into the cache variables. */
461 t = build2 (MODIFY_EXPR, TREE_TYPE (mf_cache_shift_decl_l),
462 mf_cache_shift_decl_l, mf_cache_shift_decl);
463 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
464 gimplify_to_stmt_list (&t);
465 shift_init_stmts = t;
466
467 t = build2 (MODIFY_EXPR, TREE_TYPE (mf_cache_mask_decl_l),
468 mf_cache_mask_decl_l, mf_cache_mask_decl);
469 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
470 gimplify_to_stmt_list (&t);
471 mask_init_stmts = t;
472
473 /* Anticipating multiple entry points, we insert the cache vars
474 initializers in each successor of the ENTRY_BLOCK_PTR. */
475 for (tsi = tsi_start (shift_init_stmts);
476 ! tsi_end_p (tsi);
477 tsi_next (&tsi))
478 insert_edge_copies (tsi_stmt (tsi), ENTRY_BLOCK_PTR);
479
480 for (tsi = tsi_start (mask_init_stmts);
481 ! tsi_end_p (tsi);
482 tsi_next (&tsi))
483 insert_edge_copies (tsi_stmt (tsi), ENTRY_BLOCK_PTR);
484 bsi_commit_edge_inserts ();
485 }
486
487
488 static void
489 mf_decl_clear_locals (void)
490 {
491 /* Unset local shadows. */
492 mf_cache_shift_decl_l = NULL_TREE;
493 mf_cache_mask_decl_l = NULL_TREE;
494 }
495
496 static void
497 mf_build_check_statement_for (tree base, tree limit,
498 block_stmt_iterator *instr_bsi,
499 location_t *locus, tree dirflag)
500 {
501 tree_stmt_iterator head, tsi;
502 block_stmt_iterator bsi;
503 basic_block cond_bb, then_bb, join_bb;
504 edge e;
505 tree cond, t, u, v;
506 tree mf_base;
507 tree mf_elem;
508 tree mf_limit;
509
510 /* We first need to split the current basic block, and start altering
511 the CFG. This allows us to insert the statements we're about to
512 construct into the right basic blocks. */
513
514 cond_bb = bb_for_stmt (bsi_stmt (*instr_bsi));
515 bsi = *instr_bsi;
516 bsi_prev (&bsi);
517 if (! bsi_end_p (bsi))
518 e = split_block (cond_bb, bsi_stmt (bsi));
519 else
520 e = split_block_after_labels (cond_bb);
521 cond_bb = e->src;
522 join_bb = e->dest;
523
524 /* A recap at this point: join_bb is the basic block at whose head
525 is the gimple statement for which this check expression is being
526 built. cond_bb is the (possibly new, synthetic) basic block the
527 end of which will contain the cache-lookup code, and a
528 conditional that jumps to the cache-miss code or, much more
529 likely, over to join_bb. */
530
531 /* Create the bb that contains the cache-miss fallback block (mf_check). */
532 then_bb = create_empty_bb (cond_bb);
533 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
534 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
535
536 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
537 e = find_edge (cond_bb, join_bb);
538 e->flags = EDGE_FALSE_VALUE;
539 e->count = cond_bb->count;
540 e->probability = REG_BR_PROB_BASE;
541
542 /* Update dominance info. Note that bb_join's data was
543 updated by split_block. */
544 if (dom_info_available_p (CDI_DOMINATORS))
545 {
546 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
547 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
548 }
549
550 /* Build our local variables. */
551 mf_elem = create_tmp_var (mf_cache_structptr_type, "__mf_elem");
552 mf_base = create_tmp_var (mf_uintptr_type, "__mf_base");
553 mf_limit = create_tmp_var (mf_uintptr_type, "__mf_limit");
554
555 /* Build: __mf_base = (uintptr_t) <base address expression>. */
556 t = build2 (MODIFY_EXPR, void_type_node, mf_base,
557 convert (mf_uintptr_type, unshare_expr (base)));
558 SET_EXPR_LOCUS (t, locus);
559 gimplify_to_stmt_list (&t);
560 head = tsi_start (t);
561 tsi = tsi_last (t);
562
563 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
564 t = build2 (MODIFY_EXPR, void_type_node, mf_limit,
565 convert (mf_uintptr_type, unshare_expr (limit)));
566 SET_EXPR_LOCUS (t, locus);
567 gimplify_to_stmt_list (&t);
568 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
569
570 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
571 & __mf_mask]. */
572 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
573 (flag_mudflap_threads ? mf_cache_shift_decl : mf_cache_shift_decl_l));
574 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
575 (flag_mudflap_threads ? mf_cache_mask_decl : mf_cache_mask_decl_l));
576 t = build4 (ARRAY_REF,
577 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
578 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
579 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
580 t = build2 (MODIFY_EXPR, void_type_node, mf_elem, t);
581 SET_EXPR_LOCUS (t, locus);
582 gimplify_to_stmt_list (&t);
583 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
584
585 /* Quick validity check.
586
587 if (__mf_elem->low > __mf_base
588 || (__mf_elem_high < __mf_limit))
589 {
590 __mf_check ();
591 ... and only if single-threaded:
592 __mf_lookup_shift_1 = f...;
593 __mf_lookup_mask_l = ...;
594 }
595
596 It is expected that this body of code is rarely executed so we mark
597 the edge to the THEN clause of the conditional jump as unlikely. */
598
599 /* Construct t <-- '__mf_elem->low > __mf_base'. */
600 t = build3 (COMPONENT_REF, mf_uintptr_type,
601 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
602 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
603 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
604
605 /* Construct '__mf_elem->high < __mf_limit'.
606
607 First build:
608 1) u <-- '__mf_elem->high'
609 2) v <-- '__mf_limit'.
610
611 Then build 'u <-- (u < v). */
612
613 u = build3 (COMPONENT_REF, mf_uintptr_type,
614 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
615 TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
616
617 v = mf_limit;
618
619 u = build2 (LT_EXPR, boolean_type_node, u, v);
620
621 /* Build the composed conditional: t <-- 't || u'. Then store the
622 result of the evaluation of 't' in a temporary variable which we
623 can use as the condition for the conditional jump. */
624 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
625 cond = create_tmp_var (boolean_type_node, "__mf_unlikely_cond");
626 t = build2 (MODIFY_EXPR, boolean_type_node, cond, t);
627 gimplify_to_stmt_list (&t);
628 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
629
630 /* Build the conditional jump. 'cond' is just a temporary so we can
631 simply build a void COND_EXPR. We do need labels in both arms though. */
632 t = build3 (COND_EXPR, void_type_node, cond,
633 build1 (GOTO_EXPR, void_type_node, tree_block_label (then_bb)),
634 build1 (GOTO_EXPR, void_type_node, tree_block_label (join_bb)));
635 SET_EXPR_LOCUS (t, locus);
636 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
637
638 /* At this point, after so much hard work, we have only constructed
639 the conditional jump,
640
641 if (__mf_elem->low > __mf_base
642 || (__mf_elem_high < __mf_limit))
643
644 The lowered GIMPLE tree representing this code is in the statement
645 list starting at 'head'.
646
647 We can insert this now in the current basic block, i.e. the one that
648 the statement we're instrumenting was originally in. */
649 bsi = bsi_last (cond_bb);
650 for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi))
651 bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING);
652
653 /* Now build up the body of the cache-miss handling:
654
655 __mf_check();
656 refresh *_l vars.
657
658 This is the body of the conditional. */
659
660 u = tree_cons (NULL_TREE,
661 mf_file_function_line_tree (locus == NULL ? UNKNOWN_LOCATION
662 : *locus),
663 NULL_TREE);
664 u = tree_cons (NULL_TREE, dirflag, u);
665 /* NB: we pass the overall [base..limit] range to mf_check. */
666 u = tree_cons (NULL_TREE,
667 fold_build2 (PLUS_EXPR, integer_type_node,
668 fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
669 integer_one_node),
670 u);
671 u = tree_cons (NULL_TREE, mf_base, u);
672 t = build_function_call_expr (mf_check_fndecl, u);
673 gimplify_to_stmt_list (&t);
674 head = tsi_start (t);
675 tsi = tsi_last (t);
676
677 if (! flag_mudflap_threads)
678 {
679 t = build2 (MODIFY_EXPR, void_type_node,
680 mf_cache_shift_decl_l, mf_cache_shift_decl);
681 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
682
683 t = build2 (MODIFY_EXPR, void_type_node,
684 mf_cache_mask_decl_l, mf_cache_mask_decl);
685 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
686 }
687
688 /* Insert the check code in the THEN block. */
689 bsi = bsi_start (then_bb);
690 for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi))
691 bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING);
692
693 *instr_bsi = bsi_start (join_bb);
694 bsi_next (instr_bsi);
695 }
696
697
698 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
699 eligible for instrumentation. For the mudflap1 pass, this implies
700 that it should be registered with the libmudflap runtime. For the
701 mudflap2 pass this means instrumenting an indirection operation with
702 respect to the object.
703 */
704 static int
705 mf_decl_eligible_p (tree decl)
706 {
707 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
708 /* The decl must have its address taken. In the case of
709 arrays, this flag is also set if the indexes are not
710 compile-time known valid constants. */
711 && TREE_ADDRESSABLE (decl) /* XXX: not sufficient: return-by-value structs! */
712 /* The type of the variable must be complete. */
713 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
714 /* The decl hasn't been decomposed somehow. */
715 && !DECL_HAS_VALUE_EXPR_P (decl));
716 }
717
718
719 static void
720 mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp,
721 location_t *locus, tree dirflag)
722 {
723 tree type, base, limit, addr, size, t;
724
725 /* Don't instrument read operations. */
726 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
727 return;
728
729 /* Don't instrument marked nodes. */
730 if (mf_marked_p (*tp))
731 return;
732
733 t = *tp;
734 type = TREE_TYPE (t);
735
736 if (type == error_mark_node)
737 return;
738
739 size = TYPE_SIZE_UNIT (type);
740
741 switch (TREE_CODE (t))
742 {
743 case ARRAY_REF:
744 case COMPONENT_REF:
745 {
746 /* This is trickier than it may first appear. The reason is
747 that we are looking at expressions from the "inside out" at
748 this point. We may have a complex nested aggregate/array
749 expression (e.g. "a.b[i].c"), maybe with an indirection as
750 the leftmost operator ("p->a.b.d"), where instrumentation
751 is necessary. Or we may have an innocent "a.b.c"
752 expression that must not be instrumented. We need to
753 recurse all the way down the nesting structure to figure it
754 out: looking just at the outer node is not enough. */
755 tree var;
756 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
757 /* If we have a bitfield component reference, we must note the
758 innermost addressable object in ELT, from which we will
759 construct the byte-addressable bounds of the bitfield. */
760 tree elt = NULL_TREE;
761 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
762 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
763
764 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
765 containment hierarchy to find the outermost VAR_DECL. */
766 var = TREE_OPERAND (t, 0);
767 while (1)
768 {
769 if (bitfield_ref_p && elt == NULL_TREE
770 && (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF))
771 elt = var;
772
773 if (TREE_CODE (var) == ARRAY_REF)
774 {
775 component_ref_only = 0;
776 var = TREE_OPERAND (var, 0);
777 }
778 else if (TREE_CODE (var) == COMPONENT_REF)
779 var = TREE_OPERAND (var, 0);
780 else if (INDIRECT_REF_P (var))
781 {
782 base = TREE_OPERAND (var, 0);
783 break;
784 }
785 else
786 {
787 gcc_assert (TREE_CODE (var) == VAR_DECL
788 || TREE_CODE (var) == PARM_DECL
789 || TREE_CODE (var) == RESULT_DECL
790 || TREE_CODE (var) == STRING_CST);
791 /* Don't instrument this access if the underlying
792 variable is not "eligible". This test matches
793 those arrays that have only known-valid indexes,
794 and thus are not labeled TREE_ADDRESSABLE. */
795 if (! mf_decl_eligible_p (var) || component_ref_only)
796 return;
797 else
798 {
799 base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var);
800 break;
801 }
802 }
803 }
804
805 /* Handle the case of ordinary non-indirection structure
806 accesses. These have only nested COMPONENT_REF nodes (no
807 INDIRECT_REF), but pass through the above filter loop.
808 Note that it's possible for such a struct variable to match
809 the eligible_p test because someone else might take its
810 address sometime. */
811
812 /* We need special processing for bitfield components, because
813 their addresses cannot be taken. */
814 if (bitfield_ref_p)
815 {
816 tree field = TREE_OPERAND (t, 1);
817
818 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
819 size = DECL_SIZE_UNIT (field);
820
821 if (elt)
822 elt = build1 (ADDR_EXPR, build_pointer_type TREE_TYPE (elt), elt);
823 addr = fold_convert (ptr_type_node, elt ? elt : base);
824 addr = fold_build2 (PLUS_EXPR, ptr_type_node,
825 addr, fold_convert (ptr_type_node,
826 byte_position (field)));
827 }
828 else
829 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
830
831 limit = fold_build2 (MINUS_EXPR, mf_uintptr_type,
832 fold_build2 (PLUS_EXPR, mf_uintptr_type,
833 convert (mf_uintptr_type, addr),
834 size),
835 integer_one_node);
836 }
837 break;
838
839 case INDIRECT_REF:
840 addr = TREE_OPERAND (t, 0);
841 base = addr;
842 limit = fold_build2 (MINUS_EXPR, ptr_type_node,
843 fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
844 integer_one_node);
845 break;
846
847 case TARGET_MEM_REF:
848 addr = tree_mem_ref_addr (ptr_type_node, t);
849 base = addr;
850 limit = fold_build2 (MINUS_EXPR, ptr_type_node,
851 fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
852 build_int_cst (ptr_type_node, 1));
853 break;
854
855 case ARRAY_RANGE_REF:
856 warning (0, "mudflap checking not yet implemented for ARRAY_RANGE_REF");
857 return;
858
859 case BIT_FIELD_REF:
860 /* ??? merge with COMPONENT_REF code above? */
861 {
862 tree ofs, rem, bpu;
863
864 /* If we're not dereferencing something, then the access
865 must be ok. */
866 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
867 return;
868
869 bpu = bitsize_int (BITS_PER_UNIT);
870 ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
871 rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu);
872 ofs = size_binop (TRUNC_DIV_EXPR, ofs, bpu);
873
874 size = convert (bitsizetype, TREE_OPERAND (t, 1));
875 size = size_binop (PLUS_EXPR, size, rem);
876 size = size_binop (CEIL_DIV_EXPR, size, bpu);
877 size = convert (sizetype, size);
878
879 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
880 addr = convert (ptr_type_node, addr);
881 addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, ofs);
882
883 base = addr;
884 limit = fold_build2 (MINUS_EXPR, ptr_type_node,
885 fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
886 integer_one_node);
887 }
888 break;
889
890 default:
891 return;
892 }
893
894 mf_build_check_statement_for (base, limit, iter, locus, dirflag);
895 }
896
897 static void
898 mf_xform_derefs (void)
899 {
900 basic_block bb, next;
901 block_stmt_iterator i;
902 int saved_last_basic_block = last_basic_block;
903
904 bb = ENTRY_BLOCK_PTR ->next_bb;
905 do
906 {
907 next = bb->next_bb;
908 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
909 {
910 tree s = bsi_stmt (i);
911
912 /* Only a few GIMPLE statements can reference memory. */
913 switch (TREE_CODE (s))
914 {
915 case MODIFY_EXPR:
916 mf_xform_derefs_1 (&i, &TREE_OPERAND (s, 0), EXPR_LOCUS (s),
917 integer_one_node);
918 mf_xform_derefs_1 (&i, &TREE_OPERAND (s, 1), EXPR_LOCUS (s),
919 integer_zero_node);
920 break;
921
922 case RETURN_EXPR:
923 if (TREE_OPERAND (s, 0) != NULL_TREE)
924 {
925 if (TREE_CODE (TREE_OPERAND (s, 0)) == MODIFY_EXPR)
926 mf_xform_derefs_1 (&i, &TREE_OPERAND (TREE_OPERAND (s, 0), 1),
927 EXPR_LOCUS (s), integer_zero_node);
928 else
929 mf_xform_derefs_1 (&i, &TREE_OPERAND (s, 0), EXPR_LOCUS (s),
930 integer_zero_node);
931 }
932 break;
933
934 default:
935 ;
936 }
937 }
938 bb = next;
939 }
940 while (bb && bb->index <= saved_last_basic_block);
941 }
942
943 /* ------------------------------------------------------------------------ */
944 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
945 transforms on the current function.
946
947 This is the first part of the mudflap instrumentation. It works on
948 high-level GIMPLE because after lowering, all variables are moved out
949 of their BIND_EXPR binding context, and we lose liveness information
950 for the declarations we wish to instrument. */
951
952 static unsigned int
953 execute_mudflap_function_decls (void)
954 {
955 /* Don't instrument functions such as the synthetic constructor
956 built during mudflap_finish_file. */
957 if (mf_marked_p (current_function_decl) ||
958 DECL_ARTIFICIAL (current_function_decl))
959 return 0;
960
961 push_gimplify_context ();
962
963 mf_xform_decls (DECL_SAVED_TREE (current_function_decl),
964 DECL_ARGUMENTS (current_function_decl));
965
966 pop_gimplify_context (NULL);
967 return 0;
968 }
969
970 /* This struct is passed between mf_xform_decls to store state needed
971 during the traversal searching for objects that have their
972 addresses taken. */
973 struct mf_xform_decls_data
974 {
975 tree param_decls;
976 };
977
978
979 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
980 _DECLs if appropriate. Arrange to call the __mf_register function
981 now, and the __mf_unregister function later for each. */
982 static void
983 mx_register_decls (tree decl, tree *stmt_list)
984 {
985 tree finally_stmts = NULL_TREE;
986 tree_stmt_iterator initially_stmts = tsi_start (*stmt_list);
987
988 while (decl != NULL_TREE)
989 {
990 if (mf_decl_eligible_p (decl)
991 /* Not already processed. */
992 && ! mf_marked_p (decl)
993 /* Automatic variable. */
994 && ! DECL_EXTERNAL (decl)
995 && ! TREE_STATIC (decl))
996 {
997 tree size = NULL_TREE, variable_name;
998 tree unregister_fncall, unregister_fncall_params;
999 tree register_fncall, register_fncall_params;
1000
1001 size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1002
1003 /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1004 unregister_fncall_params =
1005 tree_cons (NULL_TREE,
1006 convert (ptr_type_node,
1007 mf_mark (build1 (ADDR_EXPR,
1008 build_pointer_type (TREE_TYPE (decl)),
1009 decl))),
1010 tree_cons (NULL_TREE,
1011 size,
1012 tree_cons (NULL_TREE,
1013 /* __MF_TYPE_STACK */
1014 build_int_cst (NULL_TREE, 3),
1015 NULL_TREE)));
1016 /* __mf_unregister (...) */
1017 unregister_fncall = build_function_call_expr (mf_unregister_fndecl,
1018 unregister_fncall_params);
1019
1020 /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK, "name") */
1021 variable_name = mf_varname_tree (decl);
1022 register_fncall_params =
1023 tree_cons (NULL_TREE,
1024 convert (ptr_type_node,
1025 mf_mark (build1 (ADDR_EXPR,
1026 build_pointer_type (TREE_TYPE (decl)),
1027 decl))),
1028 tree_cons (NULL_TREE,
1029 size,
1030 tree_cons (NULL_TREE,
1031 /* __MF_TYPE_STACK */
1032 build_int_cst (NULL_TREE, 3),
1033 tree_cons (NULL_TREE,
1034 variable_name,
1035 NULL_TREE))));
1036
1037 /* __mf_register (...) */
1038 register_fncall = build_function_call_expr (mf_register_fndecl,
1039 register_fncall_params);
1040
1041 /* Accumulate the two calls. */
1042 /* ??? Set EXPR_LOCATION. */
1043 gimplify_stmt (&register_fncall);
1044 gimplify_stmt (&unregister_fncall);
1045
1046 /* Add the __mf_register call at the current appending point. */
1047 if (tsi_end_p (initially_stmts))
1048 warning (0, "mudflap cannot track %qs in stub function",
1049 IDENTIFIER_POINTER (DECL_NAME (decl)));
1050 else
1051 {
1052 tsi_link_before (&initially_stmts, register_fncall, TSI_SAME_STMT);
1053
1054 /* Accumulate the FINALLY piece. */
1055 append_to_statement_list (unregister_fncall, &finally_stmts);
1056 }
1057 mf_mark (decl);
1058 }
1059
1060 decl = TREE_CHAIN (decl);
1061 }
1062
1063 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1064 if (finally_stmts != NULL_TREE)
1065 {
1066 tree t = build2 (TRY_FINALLY_EXPR, void_type_node,
1067 *stmt_list, finally_stmts);
1068 *stmt_list = NULL;
1069 append_to_statement_list (t, stmt_list);
1070 }
1071 }
1072
1073
1074 /* Process every variable mentioned in BIND_EXPRs. */
1075 static tree
1076 mx_xfn_xform_decls (tree *t, int *continue_p, void *data)
1077 {
1078 struct mf_xform_decls_data* d = (struct mf_xform_decls_data*) data;
1079
1080 if (*t == NULL_TREE || *t == error_mark_node)
1081 {
1082 *continue_p = 0;
1083 return NULL_TREE;
1084 }
1085
1086 *continue_p = 1;
1087
1088 switch (TREE_CODE (*t))
1089 {
1090 case BIND_EXPR:
1091 {
1092 /* Process function parameters now (but only once). */
1093 mx_register_decls (d->param_decls, &BIND_EXPR_BODY (*t));
1094 d->param_decls = NULL_TREE;
1095
1096 mx_register_decls (BIND_EXPR_VARS (*t), &BIND_EXPR_BODY (*t));
1097 }
1098 break;
1099
1100 default:
1101 break;
1102 }
1103
1104 return NULL_TREE;
1105 }
1106
1107 /* Perform the object lifetime tracking mudflap transform on the given function
1108 tree. The tree is mutated in place, with possibly copied subtree nodes.
1109
1110 For every auto variable declared, if its address is ever taken
1111 within the function, then supply its lifetime to the mudflap
1112 runtime with the __mf_register and __mf_unregister calls.
1113 */
1114
1115 static void
1116 mf_xform_decls (tree fnbody, tree fnparams)
1117 {
1118 struct mf_xform_decls_data d;
1119 d.param_decls = fnparams;
1120 walk_tree_without_duplicates (&fnbody, mx_xfn_xform_decls, &d);
1121 }
1122
1123
1124 /* ------------------------------------------------------------------------ */
1125 /* Externally visible mudflap functions. */
1126
1127
1128 /* Mark and return the given tree node to prevent further mudflap
1129 transforms. */
1130 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1131
1132 tree
1133 mf_mark (tree t)
1134 {
1135 void **slot;
1136
1137 if (marked_trees == NULL)
1138 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer, NULL);
1139
1140 slot = htab_find_slot (marked_trees, t, INSERT);
1141 *slot = t;
1142 return t;
1143 }
1144
1145 int
1146 mf_marked_p (tree t)
1147 {
1148 void *entry;
1149
1150 if (marked_trees == NULL)
1151 return 0;
1152
1153 entry = htab_find (marked_trees, t);
1154 return (entry != NULL);
1155 }
1156
1157 /* Remember given node as a static of some kind: global data,
1158 function-scope static, or an anonymous constant. Its assembler
1159 label is given. */
1160
1161 /* A list of globals whose incomplete declarations we encountered.
1162 Instead of emitting the __mf_register call for them here, it's
1163 delayed until program finish time. If they're still incomplete by
1164 then, warnings are emitted. */
1165
1166 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1167
1168 /* A list of statements for calling __mf_register() at startup time. */
1169 static GTY (()) tree enqueued_call_stmt_chain;
1170
1171 static void
1172 mudflap_register_call (tree obj, tree object_size, tree varname)
1173 {
1174 tree arg, args, call_stmt;
1175
1176 args = tree_cons (NULL_TREE, varname, NULL_TREE);
1177
1178 arg = build_int_cst (NULL_TREE, 4); /* __MF_TYPE_STATIC */
1179 args = tree_cons (NULL_TREE, arg, args);
1180
1181 arg = convert (size_type_node, object_size);
1182 args = tree_cons (NULL_TREE, arg, args);
1183
1184 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1185 arg = convert (ptr_type_node, arg);
1186 args = tree_cons (NULL_TREE, arg, args);
1187
1188 call_stmt = build_function_call_expr (mf_register_fndecl, args);
1189
1190 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1191 }
1192
1193 void
1194 mudflap_enqueue_decl (tree obj)
1195 {
1196 if (mf_marked_p (obj))
1197 return;
1198
1199 /* We don't need to process variable decls that are internally
1200 generated extern. If we did, we'd end up with warnings for them
1201 during mudflap_finish_file (). That would confuse the user,
1202 since the text would refer to variables that don't show up in the
1203 user's source code. */
1204 if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj))
1205 return;
1206
1207 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1208 }
1209
1210
1211 void
1212 mudflap_enqueue_constant (tree obj)
1213 {
1214 tree object_size, varname;
1215
1216 if (mf_marked_p (obj))
1217 return;
1218
1219 if (TREE_CODE (obj) == STRING_CST)
1220 object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj));
1221 else
1222 object_size = size_in_bytes (TREE_TYPE (obj));
1223
1224 if (TREE_CODE (obj) == STRING_CST)
1225 varname = mf_build_string ("string literal");
1226 else
1227 varname = mf_build_string ("constant");
1228
1229 mudflap_register_call (obj, object_size, varname);
1230 }
1231
1232
1233 /* Emit any file-wide instrumentation. */
1234 void
1235 mudflap_finish_file (void)
1236 {
1237 tree ctor_statements = NULL_TREE;
1238
1239 /* No need to continue when there were errors. */
1240 if (errorcount != 0 || sorrycount != 0)
1241 return;
1242
1243 /* Insert a call to __mf_init. */
1244 {
1245 tree call2_stmt = build_function_call_expr (mf_init_fndecl, NULL_TREE);
1246 append_to_statement_list (call2_stmt, &ctor_statements);
1247 }
1248
1249 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1250 if (flag_mudflap_ignore_reads)
1251 {
1252 tree arg = tree_cons (NULL_TREE,
1253 mf_build_string ("-ignore-reads"), NULL_TREE);
1254 tree call_stmt = build_function_call_expr (mf_set_options_fndecl, arg);
1255 append_to_statement_list (call_stmt, &ctor_statements);
1256 }
1257
1258 /* Process all enqueued object decls. */
1259 if (deferred_static_decls)
1260 {
1261 size_t i;
1262 tree obj;
1263 for (i = 0; VEC_iterate (tree, deferred_static_decls, i, obj); i++)
1264 {
1265 gcc_assert (DECL_P (obj));
1266
1267 if (mf_marked_p (obj))
1268 continue;
1269
1270 /* Omit registration for static unaddressed objects. NB:
1271 Perform registration for non-static objects regardless of
1272 TREE_USED or TREE_ADDRESSABLE, because they may be used
1273 from other compilation units. */
1274 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1275 continue;
1276
1277 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1278 {
1279 warning (0, "mudflap cannot track unknown size extern %qs",
1280 IDENTIFIER_POINTER (DECL_NAME (obj)));
1281 continue;
1282 }
1283
1284 mudflap_register_call (obj,
1285 size_in_bytes (TREE_TYPE (obj)),
1286 mf_varname_tree (obj));
1287 }
1288
1289 VEC_truncate (tree, deferred_static_decls, 0);
1290 }
1291
1292 /* Append all the enqueued registration calls. */
1293 if (enqueued_call_stmt_chain)
1294 {
1295 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1296 enqueued_call_stmt_chain = NULL_TREE;
1297 }
1298
1299 cgraph_build_static_cdtor ('I', ctor_statements,
1300 MAX_RESERVED_INIT_PRIORITY-1);
1301 }
1302
1303
1304 static bool
1305 gate_mudflap (void)
1306 {
1307 return flag_mudflap != 0;
1308 }
1309
1310 struct tree_opt_pass pass_mudflap_1 =
1311 {
1312 "mudflap1", /* name */
1313 gate_mudflap, /* gate */
1314 execute_mudflap_function_decls, /* execute */
1315 NULL, /* sub */
1316 NULL, /* next */
1317 0, /* static_pass_number */
1318 0, /* tv_id */
1319 PROP_gimple_any, /* properties_required */
1320 0, /* properties_provided */
1321 0, /* properties_destroyed */
1322 0, /* todo_flags_start */
1323 TODO_dump_func, /* todo_flags_finish */
1324 0 /* letter */
1325 };
1326
1327 struct tree_opt_pass pass_mudflap_2 =
1328 {
1329 "mudflap2", /* name */
1330 gate_mudflap, /* gate */
1331 execute_mudflap_function_ops, /* execute */
1332 NULL, /* sub */
1333 NULL, /* next */
1334 0, /* static_pass_number */
1335 0, /* tv_id */
1336 PROP_gimple_leh, /* properties_required */
1337 0, /* properties_provided */
1338 0, /* properties_destroyed */
1339 0, /* todo_flags_start */
1340 TODO_verify_flow | TODO_verify_stmts
1341 | TODO_dump_func, /* todo_flags_finish */
1342 0 /* letter */
1343 };
1344
1345 #include "gt-tree-mudflap.h"