builtins.c, [...]: Use fold_buildN instead of fold (buildN (...)).
[gcc.git] / gcc / tree-mudflap.c
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Frank Ch. Eigler <fche@redhat.com>
4 and Graydon Hoare <graydon@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tm_p.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "tree-inline.h"
36 #include "tree-gimple.h"
37 #include "tree-flow.h"
38 #include "tree-mudflap.h"
39 #include "tree-dump.h"
40 #include "tree-pass.h"
41 #include "hashtab.h"
42 #include "diagnostic.h"
43 #include <demangle.h>
44 #include "langhooks.h"
45 #include "ggc.h"
46 #include "cgraph.h"
47 #include "toplev.h"
48
49 /* Internal function decls */
50
51 /* Helpers. */
52 static tree mf_build_string (const char *string);
53 static tree mf_varname_tree (tree);
54 static tree mf_file_function_line_tree (location_t);
55
56 /* Indirection-related instrumentation. */
57 static void mf_decl_cache_locals (void);
58 static void mf_decl_clear_locals (void);
59 static void mf_xform_derefs (void);
60 static void execute_mudflap_function_ops (void);
61
62 /* Addressable variables instrumentation. */
63 static void mf_xform_decls (tree, tree);
64 static tree mx_xfn_xform_decls (tree *, int *, void *);
65 static void mx_register_decls (tree, tree *);
66 static void execute_mudflap_function_decls (void);
67
68
69 /* ------------------------------------------------------------------------ */
70 /* Some generally helpful functions for mudflap instrumentation. */
71
72 /* Build a reference to a literal string. */
73 static tree
74 mf_build_string (const char *string)
75 {
76 size_t len = strlen (string);
77 tree result = mf_mark (build_string (len + 1, string));
78
79 TREE_TYPE (result) = build_array_type
80 (char_type_node, build_index_type (build_int_cst (NULL_TREE, len)));
81 TREE_CONSTANT (result) = 1;
82 TREE_INVARIANT (result) = 1;
83 TREE_READONLY (result) = 1;
84 TREE_STATIC (result) = 1;
85
86 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
87
88 return mf_mark (result);
89 }
90
91 /* Create a properly typed STRING_CST node that describes the given
92 declaration. It will be used as an argument for __mf_register().
93 Try to construct a helpful string, including file/function/variable
94 name. */
95
96 static tree
97 mf_varname_tree (tree decl)
98 {
99 static pretty_printer buf_rec;
100 static int initialized = 0;
101 pretty_printer *buf = & buf_rec;
102 const char *buf_contents;
103 tree result;
104
105 gcc_assert (decl);
106
107 if (!initialized)
108 {
109 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
110 initialized = 1;
111 }
112 pp_clear_output_area (buf);
113
114 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
115 {
116 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
117 const char *sourcefile;
118 unsigned sourceline = xloc.line;
119 unsigned sourcecolumn = 0;
120 #ifdef USE_MAPPED_LOCATION
121 sourcecolumn = xloc.column;
122 #endif
123 sourcefile = xloc.file;
124 if (sourcefile == NULL && current_function_decl != NULL_TREE)
125 sourcefile = DECL_SOURCE_FILE (current_function_decl);
126 if (sourcefile == NULL)
127 sourcefile = "<unknown file>";
128
129 pp_string (buf, sourcefile);
130
131 if (sourceline != 0)
132 {
133 pp_string (buf, ":");
134 pp_decimal_int (buf, sourceline);
135
136 if (sourcecolumn != 0)
137 {
138 pp_string (buf, ":");
139 pp_decimal_int (buf, sourcecolumn);
140 }
141 }
142 }
143
144 if (current_function_decl != NULL_TREE)
145 {
146 /* Add (FUNCTION) */
147 pp_string (buf, " (");
148 {
149 const char *funcname = NULL;
150 if (DECL_NAME (current_function_decl))
151 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
152 if (funcname == NULL)
153 funcname = "anonymous fn";
154
155 pp_string (buf, funcname);
156 }
157 pp_string (buf, ") ");
158 }
159 else
160 pp_string (buf, " ");
161
162 /* Add <variable-declaration>, possibly demangled. */
163 {
164 const char *declname = NULL;
165
166 if (DECL_NAME (decl) != NULL)
167 {
168 if (strcmp ("GNU C++", lang_hooks.name) == 0)
169 {
170 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
171 the libiberty demangler. */
172 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
173 DMGL_AUTO | DMGL_VERBOSE);
174 }
175 if (declname == NULL)
176 declname = lang_hooks.decl_printable_name (decl, 3);
177 }
178 if (declname == NULL)
179 declname = "<unnamed variable>";
180
181 pp_string (buf, declname);
182 }
183
184 /* Return the lot as a new STRING_CST. */
185 buf_contents = pp_base_formatted_text (buf);
186 result = mf_build_string (buf_contents);
187 pp_clear_output_area (buf);
188
189 return result;
190 }
191
192
193 /* And another friend, for producing a simpler message. */
194
195 static tree
196 mf_file_function_line_tree (location_t location)
197 {
198 expanded_location xloc = expand_location (location);
199 const char *file = NULL, *colon, *line, *op, *name, *cp;
200 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
201 char *string;
202 tree result;
203
204 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
205 file = xloc.file;
206 if (file == NULL && current_function_decl != NULL_TREE)
207 file = DECL_SOURCE_FILE (current_function_decl);
208 if (file == NULL)
209 file = "<unknown file>";
210
211 if (xloc.line > 0)
212 {
213 #ifdef USE_MAPPED_LOCATION
214 if (xloc.column > 0)
215 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
216 else
217 #endif
218 sprintf (linecolbuf, "%d", xloc.line);
219 colon = ":";
220 line = linecolbuf;
221 }
222 else
223 colon = line = "";
224
225 /* Add (FUNCTION). */
226 name = lang_hooks.decl_printable_name (current_function_decl, 1);
227 if (name)
228 {
229 op = " (";
230 cp = ")";
231 }
232 else
233 op = name = cp = "";
234
235 string = concat (file, colon, line, op, name, cp, NULL);
236 result = mf_build_string (string);
237 free (string);
238
239 return result;
240 }
241
242
243 /* global tree nodes */
244
245 /* Global tree objects for global variables and functions exported by
246 mudflap runtime library. mf_init_extern_trees must be called
247 before using these. */
248
249 /* uintptr_t (usually "unsigned long") */
250 static GTY (()) tree mf_uintptr_type;
251
252 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
253 static GTY (()) tree mf_cache_struct_type;
254
255 /* struct __mf_cache * const */
256 static GTY (()) tree mf_cache_structptr_type;
257
258 /* extern struct __mf_cache __mf_lookup_cache []; */
259 static GTY (()) tree mf_cache_array_decl;
260
261 /* extern unsigned char __mf_lc_shift; */
262 static GTY (()) tree mf_cache_shift_decl;
263
264 /* extern uintptr_t __mf_lc_mask; */
265 static GTY (()) tree mf_cache_mask_decl;
266
267 /* Their function-scope local shadows, used in single-threaded mode only. */
268
269 /* auto const unsigned char __mf_lc_shift_l; */
270 static GTY (()) tree mf_cache_shift_decl_l;
271
272 /* auto const uintptr_t __mf_lc_mask_l; */
273 static GTY (()) tree mf_cache_mask_decl_l;
274
275 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
276 static GTY (()) tree mf_check_fndecl;
277
278 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
279 static GTY (()) tree mf_register_fndecl;
280
281 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
282 static GTY (()) tree mf_unregister_fndecl;
283
284 /* extern void __mf_init (); */
285 static GTY (()) tree mf_init_fndecl;
286
287 /* extern int __mf_set_options (const char*); */
288 static GTY (()) tree mf_set_options_fndecl;
289
290
291 /* Helper for mudflap_init: construct a decl with the given category,
292 name, and type, mark it an external reference, and pushdecl it. */
293 static inline tree
294 mf_make_builtin (enum tree_code category, const char *name, tree type)
295 {
296 tree decl = mf_mark (build_decl (category, get_identifier (name), type));
297 TREE_PUBLIC (decl) = 1;
298 DECL_EXTERNAL (decl) = 1;
299 lang_hooks.decls.pushdecl (decl);
300 return decl;
301 }
302
303 /* Helper for mudflap_init: construct a tree corresponding to the type
304 struct __mf_cache { uintptr_t low; uintptr_t high; };
305 where uintptr_t is the FIELD_TYPE argument. */
306 static inline tree
307 mf_make_mf_cache_struct_type (tree field_type)
308 {
309 /* There is, abominably, no language-independent way to construct a
310 RECORD_TYPE. So we have to call the basic type construction
311 primitives by hand. */
312 tree fieldlo = build_decl (FIELD_DECL, get_identifier ("low"), field_type);
313 tree fieldhi = build_decl (FIELD_DECL, get_identifier ("high"), field_type);
314
315 tree struct_type = make_node (RECORD_TYPE);
316 DECL_CONTEXT (fieldlo) = struct_type;
317 DECL_CONTEXT (fieldhi) = struct_type;
318 TREE_CHAIN (fieldlo) = fieldhi;
319 TYPE_FIELDS (struct_type) = fieldlo;
320 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
321 layout_type (struct_type);
322
323 return struct_type;
324 }
325
326 #define build_function_type_0(rtype) \
327 build_function_type (rtype, void_list_node)
328 #define build_function_type_1(rtype, arg1) \
329 build_function_type (rtype, tree_cons (0, arg1, void_list_node))
330 #define build_function_type_3(rtype, arg1, arg2, arg3) \
331 build_function_type (rtype, tree_cons (0, arg1, tree_cons (0, arg2, \
332 tree_cons (0, arg3, void_list_node))))
333 #define build_function_type_4(rtype, arg1, arg2, arg3, arg4) \
334 build_function_type (rtype, tree_cons (0, arg1, tree_cons (0, arg2, \
335 tree_cons (0, arg3, tree_cons (0, arg4, \
336 void_list_node)))))
337
338 /* Initialize the global tree nodes that correspond to mf-runtime.h
339 declarations. */
340 void
341 mudflap_init (void)
342 {
343 static bool done = false;
344 tree mf_const_string_type;
345 tree mf_cache_array_type;
346 tree mf_check_register_fntype;
347 tree mf_unregister_fntype;
348 tree mf_init_fntype;
349 tree mf_set_options_fntype;
350
351 if (done)
352 return;
353 done = true;
354
355 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
356 /*unsignedp=*/true);
357 mf_const_string_type
358 = build_pointer_type (build_qualified_type
359 (char_type_node, TYPE_QUAL_CONST));
360
361 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
362 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
363 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
364 mf_check_register_fntype =
365 build_function_type_4 (void_type_node, ptr_type_node, size_type_node,
366 integer_type_node, mf_const_string_type);
367 mf_unregister_fntype =
368 build_function_type_3 (void_type_node, ptr_type_node, size_type_node,
369 integer_type_node);
370 mf_init_fntype =
371 build_function_type_0 (void_type_node);
372 mf_set_options_fntype =
373 build_function_type_1 (integer_type_node, mf_const_string_type);
374
375 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
376 mf_cache_array_type);
377 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
378 unsigned_char_type_node);
379 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
380 mf_uintptr_type);
381 /* Don't process these in mudflap_enqueue_decl, should they come by
382 there for some reason. */
383 mf_mark (mf_cache_array_decl);
384 mf_mark (mf_cache_shift_decl);
385 mf_mark (mf_cache_mask_decl);
386 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
387 mf_check_register_fntype);
388 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
389 mf_check_register_fntype);
390 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
391 mf_unregister_fntype);
392 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
393 mf_init_fntype);
394 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
395 mf_set_options_fntype);
396 }
397 #undef build_function_type_4
398 #undef build_function_type_3
399 #undef build_function_type_1
400 #undef build_function_type_0
401
402
403 /* ------------------------------------------------------------------------ */
404 /* Memory reference transforms. Perform the mudflap indirection-related
405 tree transforms on the current function.
406
407 This is the second part of the mudflap instrumentation. It works on
408 low-level GIMPLE using the CFG, because we want to run this pass after
409 tree optimizations have been performed, but we have to preserve the CFG
410 for expansion from trees to RTL. */
411
412 static void
413 execute_mudflap_function_ops (void)
414 {
415 /* Don't instrument functions such as the synthetic constructor
416 built during mudflap_finish_file. */
417 if (mf_marked_p (current_function_decl) ||
418 DECL_ARTIFICIAL (current_function_decl))
419 return;
420
421 push_gimplify_context ();
422
423 /* In multithreaded mode, don't cache the lookup cache parameters. */
424 if (! flag_mudflap_threads)
425 mf_decl_cache_locals ();
426
427 mf_xform_derefs ();
428
429 if (! flag_mudflap_threads)
430 mf_decl_clear_locals ();
431
432 pop_gimplify_context (NULL);
433 }
434
435 /* Create and initialize local shadow variables for the lookup cache
436 globals. Put their decls in the *_l globals for use by
437 mf_build_check_statement_for. */
438
439 static void
440 mf_decl_cache_locals (void)
441 {
442 tree t, shift_init_stmts, mask_init_stmts;
443 tree_stmt_iterator tsi;
444
445 /* Build the cache vars. */
446 mf_cache_shift_decl_l
447 = mf_mark (create_tmp_var (TREE_TYPE (mf_cache_shift_decl),
448 "__mf_lookup_shift_l"));
449
450 mf_cache_mask_decl_l
451 = mf_mark (create_tmp_var (TREE_TYPE (mf_cache_mask_decl),
452 "__mf_lookup_mask_l"));
453
454 /* Build initialization nodes for the cache vars. We just load the
455 globals into the cache variables. */
456 t = build (MODIFY_EXPR, TREE_TYPE (mf_cache_shift_decl_l),
457 mf_cache_shift_decl_l, mf_cache_shift_decl);
458 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
459 gimplify_to_stmt_list (&t);
460 shift_init_stmts = t;
461
462 t = build (MODIFY_EXPR, TREE_TYPE (mf_cache_mask_decl_l),
463 mf_cache_mask_decl_l, mf_cache_mask_decl);
464 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
465 gimplify_to_stmt_list (&t);
466 mask_init_stmts = t;
467
468 /* Anticipating multiple entry points, we insert the cache vars
469 initializers in each successor of the ENTRY_BLOCK_PTR. */
470 for (tsi = tsi_start (shift_init_stmts);
471 ! tsi_end_p (tsi);
472 tsi_next (&tsi))
473 insert_edge_copies (tsi_stmt (tsi), ENTRY_BLOCK_PTR);
474
475 for (tsi = tsi_start (mask_init_stmts);
476 ! tsi_end_p (tsi);
477 tsi_next (&tsi))
478 insert_edge_copies (tsi_stmt (tsi), ENTRY_BLOCK_PTR);
479 bsi_commit_edge_inserts ();
480 }
481
482
483 static void
484 mf_decl_clear_locals (void)
485 {
486 /* Unset local shadows. */
487 mf_cache_shift_decl_l = NULL_TREE;
488 mf_cache_mask_decl_l = NULL_TREE;
489 }
490
491 static void
492 mf_build_check_statement_for (tree base, tree limit,
493 block_stmt_iterator *instr_bsi,
494 location_t *locus, tree dirflag)
495 {
496 tree_stmt_iterator head, tsi;
497 block_stmt_iterator bsi;
498 basic_block cond_bb, then_bb, join_bb;
499 edge e;
500 tree cond, t, u, v;
501 tree mf_base;
502 tree mf_elem;
503 tree mf_limit;
504
505 /* We first need to split the current basic block, and start altering
506 the CFG. This allows us to insert the statements we're about to
507 construct into the right basic blocks. */
508
509 cond_bb = bb_for_stmt (bsi_stmt (*instr_bsi));
510 bsi = *instr_bsi;
511 bsi_prev (&bsi);
512 if (! bsi_end_p (bsi))
513 e = split_block (cond_bb, bsi_stmt (bsi));
514 else
515 e = split_block_after_labels (cond_bb);
516 cond_bb = e->src;
517 join_bb = e->dest;
518
519 /* A recap at this point: join_bb is the basic block at whose head
520 is the gimple statement for which this check expression is being
521 built. cond_bb is the (possibly new, synthetic) basic block the
522 end of which will contain the cache-lookup code, and a
523 conditional that jumps to the cache-miss code or, much more
524 likely, over to join_bb. */
525
526 /* Create the bb that contains the cache-miss fallback block (mf_check). */
527 then_bb = create_empty_bb (cond_bb);
528 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
529 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
530
531 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
532 e = find_edge (cond_bb, join_bb);
533 e->flags = EDGE_FALSE_VALUE;
534 e->count = cond_bb->count;
535 e->probability = REG_BR_PROB_BASE;
536
537 /* Update dominance info. Note that bb_join's data was
538 updated by split_block. */
539 if (dom_info_available_p (CDI_DOMINATORS))
540 {
541 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
542 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
543 }
544
545 /* Build our local variables. */
546 mf_elem = create_tmp_var (mf_cache_structptr_type, "__mf_elem");
547 mf_base = create_tmp_var (mf_uintptr_type, "__mf_base");
548 mf_limit = create_tmp_var (mf_uintptr_type, "__mf_limit");
549
550 /* Build: __mf_base = (uintptr_t) <base address expression>. */
551 t = build (MODIFY_EXPR, void_type_node, mf_base,
552 convert (mf_uintptr_type, unshare_expr (base)));
553 SET_EXPR_LOCUS (t, locus);
554 gimplify_to_stmt_list (&t);
555 head = tsi_start (t);
556 tsi = tsi_last (t);
557
558 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
559 t = build (MODIFY_EXPR, void_type_node, mf_limit,
560 convert (mf_uintptr_type, unshare_expr (limit)));
561 SET_EXPR_LOCUS (t, locus);
562 gimplify_to_stmt_list (&t);
563 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
564
565 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
566 & __mf_mask]. */
567 t = build (RSHIFT_EXPR, mf_uintptr_type, mf_base,
568 (flag_mudflap_threads ? mf_cache_shift_decl : mf_cache_shift_decl_l));
569 t = build (BIT_AND_EXPR, mf_uintptr_type, t,
570 (flag_mudflap_threads ? mf_cache_mask_decl : mf_cache_mask_decl_l));
571 t = build (ARRAY_REF,
572 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
573 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
574 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
575 t = build (MODIFY_EXPR, void_type_node, mf_elem, t);
576 SET_EXPR_LOCUS (t, locus);
577 gimplify_to_stmt_list (&t);
578 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
579
580 /* Quick validity check.
581
582 if (__mf_elem->low > __mf_base
583 || (__mf_elem_high < __mf_limit))
584 {
585 __mf_check ();
586 ... and only if single-threaded:
587 __mf_lookup_shift_1 = f...;
588 __mf_lookup_mask_l = ...;
589 }
590
591 It is expected that this body of code is rarely executed so we mark
592 the edge to the THEN clause of the conditional jump as unlikely. */
593
594 /* Construct t <-- '__mf_elem->low > __mf_base'. */
595 t = build (COMPONENT_REF, mf_uintptr_type,
596 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
597 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
598 t = build (GT_EXPR, boolean_type_node, t, mf_base);
599
600 /* Construct '__mf_elem->high < __mf_limit'.
601
602 First build:
603 1) u <-- '__mf_elem->high'
604 2) v <-- '__mf_limit'.
605
606 Then build 'u <-- (u < v). */
607
608 u = build (COMPONENT_REF, mf_uintptr_type,
609 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
610 TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
611
612 v = mf_limit;
613
614 u = build (LT_EXPR, boolean_type_node, u, v);
615
616 /* Build the composed conditional: t <-- 't || u'. Then store the
617 result of the evaluation of 't' in a temporary variable which we
618 can use as the condition for the conditional jump. */
619 t = build (TRUTH_OR_EXPR, boolean_type_node, t, u);
620 cond = create_tmp_var (boolean_type_node, "__mf_unlikely_cond");
621 t = build (MODIFY_EXPR, boolean_type_node, cond, t);
622 gimplify_to_stmt_list (&t);
623 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
624
625 /* Build the conditional jump. 'cond' is just a temporary so we can
626 simply build a void COND_EXPR. We do need labels in both arms though. */
627 t = build (COND_EXPR, void_type_node, cond,
628 build (GOTO_EXPR, void_type_node, tree_block_label (then_bb)),
629 build (GOTO_EXPR, void_type_node, tree_block_label (join_bb)));
630 SET_EXPR_LOCUS (t, locus);
631 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
632
633 /* At this point, after so much hard work, we have only constructed
634 the conditional jump,
635
636 if (__mf_elem->low > __mf_base
637 || (__mf_elem_high < __mf_limit))
638
639 The lowered GIMPLE tree representing this code is in the statement
640 list starting at 'head'.
641
642 We can insert this now in the current basic block, i.e. the one that
643 the statement we're instrumenting was originally in. */
644 bsi = bsi_last (cond_bb);
645 for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi))
646 bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING);
647
648 /* Now build up the body of the cache-miss handling:
649
650 __mf_check();
651 refresh *_l vars.
652
653 This is the body of the conditional. */
654
655 u = tree_cons (NULL_TREE,
656 mf_file_function_line_tree (locus == NULL ? UNKNOWN_LOCATION
657 : *locus),
658 NULL_TREE);
659 u = tree_cons (NULL_TREE, dirflag, u);
660 /* NB: we pass the overall [base..limit] range to mf_check. */
661 u = tree_cons (NULL_TREE,
662 fold_build2 (PLUS_EXPR, integer_type_node,
663 fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
664 integer_one_node),
665 u);
666 u = tree_cons (NULL_TREE, mf_base, u);
667 t = build_function_call_expr (mf_check_fndecl, u);
668 gimplify_to_stmt_list (&t);
669 head = tsi_start (t);
670 tsi = tsi_last (t);
671
672 if (! flag_mudflap_threads)
673 {
674 t = build (MODIFY_EXPR, void_type_node,
675 mf_cache_shift_decl_l, mf_cache_shift_decl);
676 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
677
678 t = build (MODIFY_EXPR, void_type_node,
679 mf_cache_mask_decl_l, mf_cache_mask_decl);
680 tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
681 }
682
683 /* Insert the check code in the THEN block. */
684 bsi = bsi_start (then_bb);
685 for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi))
686 bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING);
687
688 *instr_bsi = bsi_start (join_bb);
689 bsi_next (instr_bsi);
690 }
691
692
693 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
694 eligible for instrumentation. For the mudflap1 pass, this implies
695 that it should be registered with the libmudflap runtime. For the
696 mudflap2 pass this means instrumenting an indirection operation with
697 respect to the object.
698 */
699 static int
700 mf_decl_eligible_p (tree decl)
701 {
702 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
703 /* The decl must have its address taken. In the case of
704 arrays, this flag is also set if the indexes are not
705 compile-time known valid constants. */
706 && TREE_ADDRESSABLE (decl) /* XXX: not sufficient: return-by-value structs! */
707 /* The type of the variable must be complete. */
708 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
709 /* The decl hasn't been decomposed somehow. */
710 && !DECL_HAS_VALUE_EXPR_P (decl));
711 }
712
713
714 static void
715 mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp,
716 location_t *locus, tree dirflag)
717 {
718 tree type, base, limit, addr, size, t;
719
720 /* Don't instrument read operations. */
721 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
722 return;
723
724 /* Don't instrument marked nodes. */
725 if (mf_marked_p (*tp))
726 return;
727
728 t = *tp;
729 type = TREE_TYPE (t);
730 size = TYPE_SIZE_UNIT (type);
731
732 switch (TREE_CODE (t))
733 {
734 case ARRAY_REF:
735 case COMPONENT_REF:
736 {
737 /* This is trickier than it may first appear. The reason is
738 that we are looking at expressions from the "inside out" at
739 this point. We may have a complex nested aggregate/array
740 expression (e.g. "a.b[i].c"), maybe with an indirection as
741 the leftmost operator ("p->a.b.d"), where instrumentation
742 is necessary. Or we may have an innocent "a.b.c"
743 expression that must not be instrumented. We need to
744 recurse all the way down the nesting structure to figure it
745 out: looking just at the outer node is not enough. */
746 tree var;
747 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
748 /* If we have a bitfield component reference, we must note the
749 innermost addressable object in ELT, from which we will
750 construct the byte-addressable bounds of the bitfield. */
751 tree elt = NULL_TREE;
752 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
753 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
754
755 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
756 containment hierarchy to find the outermost VAR_DECL. */
757 var = TREE_OPERAND (t, 0);
758 while (1)
759 {
760 if (bitfield_ref_p && elt == NULL_TREE
761 && (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF))
762 elt = var;
763
764 if (TREE_CODE (var) == ARRAY_REF)
765 {
766 component_ref_only = 0;
767 var = TREE_OPERAND (var, 0);
768 }
769 else if (TREE_CODE (var) == COMPONENT_REF)
770 var = TREE_OPERAND (var, 0);
771 else if (INDIRECT_REF_P (var))
772 {
773 base = TREE_OPERAND (var, 0);
774 break;
775 }
776 else
777 {
778 gcc_assert (TREE_CODE (var) == VAR_DECL
779 || TREE_CODE (var) == PARM_DECL
780 || TREE_CODE (var) == RESULT_DECL
781 || TREE_CODE (var) == STRING_CST);
782 /* Don't instrument this access if the underlying
783 variable is not "eligible". This test matches
784 those arrays that have only known-valid indexes,
785 and thus are not labeled TREE_ADDRESSABLE. */
786 if (! mf_decl_eligible_p (var) || component_ref_only)
787 return;
788 else
789 {
790 base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var);
791 break;
792 }
793 }
794 }
795
796 /* Handle the case of ordinary non-indirection structure
797 accesses. These have only nested COMPONENT_REF nodes (no
798 INDIRECT_REF), but pass through the above filter loop.
799 Note that it's possible for such a struct variable to match
800 the eligible_p test because someone else might take its
801 address sometime. */
802
803 /* We need special processing for bitfield components, because
804 their addresses cannot be taken. */
805 if (bitfield_ref_p)
806 {
807 tree field = TREE_OPERAND (t, 1);
808
809 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
810 size = DECL_SIZE_UNIT (field);
811
812 if (elt)
813 elt = build1 (ADDR_EXPR, build_pointer_type TREE_TYPE (elt), elt);
814 addr = fold_convert (ptr_type_node, elt ? elt : base);
815 addr = fold_build2 (PLUS_EXPR, ptr_type_node,
816 addr, fold_convert (ptr_type_node,
817 byte_position (field)));
818 }
819 else
820 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
821
822 limit = fold_build2 (MINUS_EXPR, mf_uintptr_type,
823 fold_build2 (PLUS_EXPR, mf_uintptr_type,
824 convert (mf_uintptr_type, addr),
825 size),
826 integer_one_node);
827 }
828 break;
829
830 case INDIRECT_REF:
831 addr = TREE_OPERAND (t, 0);
832 base = addr;
833 limit = fold_build2 (MINUS_EXPR, ptr_type_node,
834 fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
835 integer_one_node);
836 break;
837
838 case TARGET_MEM_REF:
839 addr = tree_mem_ref_addr (ptr_type_node, t);
840 base = addr;
841 limit = fold_build2 (MINUS_EXPR, ptr_type_node,
842 fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
843 build_int_cst_type (ptr_type_node, 1));
844 break;
845
846 case ARRAY_RANGE_REF:
847 warning (0, "mudflap checking not yet implemented for ARRAY_RANGE_REF");
848 return;
849
850 case BIT_FIELD_REF:
851 /* ??? merge with COMPONENT_REF code above? */
852 {
853 tree ofs, rem, bpu;
854
855 /* If we're not dereferencing something, then the access
856 must be ok. */
857 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
858 return;
859
860 bpu = bitsize_int (BITS_PER_UNIT);
861 ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
862 rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu);
863 ofs = size_binop (TRUNC_DIV_EXPR, ofs, bpu);
864
865 size = convert (bitsizetype, TREE_OPERAND (t, 1));
866 size = size_binop (PLUS_EXPR, size, rem);
867 size = size_binop (CEIL_DIV_EXPR, size, bpu);
868 size = convert (sizetype, size);
869
870 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
871 addr = convert (ptr_type_node, addr);
872 addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, ofs);
873
874 base = addr;
875 limit = fold_build2 (MINUS_EXPR, ptr_type_node,
876 fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
877 integer_one_node);
878 }
879 break;
880
881 default:
882 return;
883 }
884
885 mf_build_check_statement_for (base, limit, iter, locus, dirflag);
886 }
887
888 static void
889 mf_xform_derefs (void)
890 {
891 basic_block bb, next;
892 block_stmt_iterator i;
893 int saved_last_basic_block = last_basic_block;
894
895 bb = ENTRY_BLOCK_PTR ->next_bb;
896 do
897 {
898 next = bb->next_bb;
899 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
900 {
901 tree s = bsi_stmt (i);
902
903 /* Only a few GIMPLE statements can reference memory. */
904 switch (TREE_CODE (s))
905 {
906 case MODIFY_EXPR:
907 mf_xform_derefs_1 (&i, &TREE_OPERAND (s, 0), EXPR_LOCUS (s),
908 integer_one_node);
909 mf_xform_derefs_1 (&i, &TREE_OPERAND (s, 1), EXPR_LOCUS (s),
910 integer_zero_node);
911 break;
912
913 case RETURN_EXPR:
914 if (TREE_OPERAND (s, 0) != NULL_TREE)
915 {
916 if (TREE_CODE (TREE_OPERAND (s, 0)) == MODIFY_EXPR)
917 mf_xform_derefs_1 (&i, &TREE_OPERAND (TREE_OPERAND (s, 0), 1),
918 EXPR_LOCUS (s), integer_zero_node);
919 else
920 mf_xform_derefs_1 (&i, &TREE_OPERAND (s, 0), EXPR_LOCUS (s),
921 integer_zero_node);
922 }
923 break;
924
925 default:
926 ;
927 }
928 }
929 bb = next;
930 }
931 while (bb && bb->index <= saved_last_basic_block);
932 }
933
934 /* ------------------------------------------------------------------------ */
935 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
936 transforms on the current function.
937
938 This is the first part of the mudflap instrumentation. It works on
939 high-level GIMPLE because after lowering, all variables are moved out
940 of their BIND_EXPR binding context, and we lose liveness information
941 for the declarations we wish to instrument. */
942
943 static void
944 execute_mudflap_function_decls (void)
945 {
946 /* Don't instrument functions such as the synthetic constructor
947 built during mudflap_finish_file. */
948 if (mf_marked_p (current_function_decl) ||
949 DECL_ARTIFICIAL (current_function_decl))
950 return;
951
952 push_gimplify_context ();
953
954 mf_xform_decls (DECL_SAVED_TREE (current_function_decl),
955 DECL_ARGUMENTS (current_function_decl));
956
957 pop_gimplify_context (NULL);
958 }
959
960 /* This struct is passed between mf_xform_decls to store state needed
961 during the traversal searching for objects that have their
962 addresses taken. */
963 struct mf_xform_decls_data
964 {
965 tree param_decls;
966 };
967
968
969 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
970 _DECLs if appropriate. Arrange to call the __mf_register function
971 now, and the __mf_unregister function later for each. */
972 static void
973 mx_register_decls (tree decl, tree *stmt_list)
974 {
975 tree finally_stmts = NULL_TREE;
976 tree_stmt_iterator initially_stmts = tsi_start (*stmt_list);
977
978 while (decl != NULL_TREE)
979 {
980 if (mf_decl_eligible_p (decl)
981 /* Not already processed. */
982 && ! mf_marked_p (decl)
983 /* Automatic variable. */
984 && ! DECL_EXTERNAL (decl)
985 && ! TREE_STATIC (decl))
986 {
987 tree size = NULL_TREE, variable_name;
988 tree unregister_fncall, unregister_fncall_params;
989 tree register_fncall, register_fncall_params;
990
991 size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl)));
992
993 /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
994 unregister_fncall_params =
995 tree_cons (NULL_TREE,
996 convert (ptr_type_node,
997 mf_mark (build1 (ADDR_EXPR,
998 build_pointer_type (TREE_TYPE (decl)),
999 decl))),
1000 tree_cons (NULL_TREE,
1001 size,
1002 tree_cons (NULL_TREE,
1003 /* __MF_TYPE_STACK */
1004 build_int_cst (NULL_TREE, 3),
1005 NULL_TREE)));
1006 /* __mf_unregister (...) */
1007 unregister_fncall = build_function_call_expr (mf_unregister_fndecl,
1008 unregister_fncall_params);
1009
1010 /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK, "name") */
1011 variable_name = mf_varname_tree (decl);
1012 register_fncall_params =
1013 tree_cons (NULL_TREE,
1014 convert (ptr_type_node,
1015 mf_mark (build1 (ADDR_EXPR,
1016 build_pointer_type (TREE_TYPE (decl)),
1017 decl))),
1018 tree_cons (NULL_TREE,
1019 size,
1020 tree_cons (NULL_TREE,
1021 /* __MF_TYPE_STACK */
1022 build_int_cst (NULL_TREE, 3),
1023 tree_cons (NULL_TREE,
1024 variable_name,
1025 NULL_TREE))));
1026
1027 /* __mf_register (...) */
1028 register_fncall = build_function_call_expr (mf_register_fndecl,
1029 register_fncall_params);
1030
1031 /* Accumulate the two calls. */
1032 /* ??? Set EXPR_LOCATION. */
1033 gimplify_stmt (&register_fncall);
1034 gimplify_stmt (&unregister_fncall);
1035
1036 /* Add the __mf_register call at the current appending point. */
1037 if (tsi_end_p (initially_stmts))
1038 warning (0, "mudflap cannot track %qs in stub function",
1039 IDENTIFIER_POINTER (DECL_NAME (decl)));
1040 else
1041 {
1042 tsi_link_before (&initially_stmts, register_fncall, TSI_SAME_STMT);
1043
1044 /* Accumulate the FINALLY piece. */
1045 append_to_statement_list (unregister_fncall, &finally_stmts);
1046 }
1047 mf_mark (decl);
1048 }
1049
1050 decl = TREE_CHAIN (decl);
1051 }
1052
1053 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1054 if (finally_stmts != NULL_TREE)
1055 {
1056 tree t = build (TRY_FINALLY_EXPR, void_type_node,
1057 *stmt_list, finally_stmts);
1058 *stmt_list = NULL;
1059 append_to_statement_list (t, stmt_list);
1060 }
1061 }
1062
1063
1064 /* Process every variable mentioned in BIND_EXPRs. */
1065 static tree
1066 mx_xfn_xform_decls (tree *t, int *continue_p, void *data)
1067 {
1068 struct mf_xform_decls_data* d = (struct mf_xform_decls_data*) data;
1069
1070 if (*t == NULL_TREE || *t == error_mark_node)
1071 {
1072 *continue_p = 0;
1073 return NULL_TREE;
1074 }
1075
1076 *continue_p = 1;
1077
1078 switch (TREE_CODE (*t))
1079 {
1080 case BIND_EXPR:
1081 {
1082 /* Process function parameters now (but only once). */
1083 mx_register_decls (d->param_decls, &BIND_EXPR_BODY (*t));
1084 d->param_decls = NULL_TREE;
1085
1086 mx_register_decls (BIND_EXPR_VARS (*t), &BIND_EXPR_BODY (*t));
1087 }
1088 break;
1089
1090 default:
1091 break;
1092 }
1093
1094 return NULL_TREE;
1095 }
1096
1097 /* Perform the object lifetime tracking mudflap transform on the given function
1098 tree. The tree is mutated in place, with possibly copied subtree nodes.
1099
1100 For every auto variable declared, if its address is ever taken
1101 within the function, then supply its lifetime to the mudflap
1102 runtime with the __mf_register and __mf_unregister calls.
1103 */
1104
1105 static void
1106 mf_xform_decls (tree fnbody, tree fnparams)
1107 {
1108 struct mf_xform_decls_data d;
1109 d.param_decls = fnparams;
1110 walk_tree_without_duplicates (&fnbody, mx_xfn_xform_decls, &d);
1111 }
1112
1113
1114 /* ------------------------------------------------------------------------ */
1115 /* Externally visible mudflap functions. */
1116
1117
1118 /* Mark and return the given tree node to prevent further mudflap
1119 transforms. */
1120 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1121
1122 tree
1123 mf_mark (tree t)
1124 {
1125 void **slot;
1126
1127 if (marked_trees == NULL)
1128 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer, NULL);
1129
1130 slot = htab_find_slot (marked_trees, t, INSERT);
1131 *slot = t;
1132 return t;
1133 }
1134
1135 int
1136 mf_marked_p (tree t)
1137 {
1138 void *entry;
1139
1140 if (marked_trees == NULL)
1141 return 0;
1142
1143 entry = htab_find (marked_trees, t);
1144 return (entry != NULL);
1145 }
1146
1147 /* Remember given node as a static of some kind: global data,
1148 function-scope static, or an anonymous constant. Its assembler
1149 label is given. */
1150
1151 /* A list of globals whose incomplete declarations we encountered.
1152 Instead of emitting the __mf_register call for them here, it's
1153 delayed until program finish time. If they're still incomplete by
1154 then, warnings are emitted. */
1155
1156 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1157
1158 /* A list of statements for calling __mf_register() at startup time. */
1159 static GTY (()) tree enqueued_call_stmt_chain;
1160
1161 static void
1162 mudflap_register_call (tree obj, tree object_size, tree varname)
1163 {
1164 tree arg, args, call_stmt;
1165
1166 args = tree_cons (NULL_TREE, varname, NULL_TREE);
1167
1168 arg = build_int_cst (NULL_TREE, 4); /* __MF_TYPE_STATIC */
1169 args = tree_cons (NULL_TREE, arg, args);
1170
1171 arg = convert (size_type_node, object_size);
1172 args = tree_cons (NULL_TREE, arg, args);
1173
1174 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1175 arg = convert (ptr_type_node, arg);
1176 args = tree_cons (NULL_TREE, arg, args);
1177
1178 call_stmt = build_function_call_expr (mf_register_fndecl, args);
1179
1180 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1181 }
1182
1183 void
1184 mudflap_enqueue_decl (tree obj)
1185 {
1186 if (mf_marked_p (obj))
1187 return;
1188
1189 /* We don't need to process variable decls that are internally
1190 generated extern. If we did, we'd end up with warnings for them
1191 during mudflap_finish_file (). That would confuse the user,
1192 since the text would refer to variables that don't show up in the
1193 user's source code. */
1194 if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj))
1195 return;
1196
1197 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1198 }
1199
1200
1201 void
1202 mudflap_enqueue_constant (tree obj)
1203 {
1204 tree object_size, varname;
1205
1206 if (mf_marked_p (obj))
1207 return;
1208
1209 if (TREE_CODE (obj) == STRING_CST)
1210 object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj));
1211 else
1212 object_size = size_in_bytes (TREE_TYPE (obj));
1213
1214 if (TREE_CODE (obj) == STRING_CST)
1215 varname = mf_build_string ("string literal");
1216 else
1217 varname = mf_build_string ("constant");
1218
1219 mudflap_register_call (obj, object_size, varname);
1220 }
1221
1222
1223 /* Emit any file-wide instrumentation. */
1224 void
1225 mudflap_finish_file (void)
1226 {
1227 tree ctor_statements = NULL_TREE;
1228
1229 /* Insert a call to __mf_init. */
1230 {
1231 tree call2_stmt = build_function_call_expr (mf_init_fndecl, NULL_TREE);
1232 append_to_statement_list (call2_stmt, &ctor_statements);
1233 }
1234
1235 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1236 if (flag_mudflap_ignore_reads)
1237 {
1238 tree arg = tree_cons (NULL_TREE,
1239 mf_build_string ("-ignore-reads"), NULL_TREE);
1240 tree call_stmt = build_function_call_expr (mf_set_options_fndecl, arg);
1241 append_to_statement_list (call_stmt, &ctor_statements);
1242 }
1243
1244 /* Process all enqueued object decls. */
1245 if (deferred_static_decls)
1246 {
1247 size_t i;
1248 tree obj;
1249 for (i = 0; VEC_iterate (tree, deferred_static_decls, i, obj); i++)
1250 {
1251 gcc_assert (DECL_P (obj));
1252
1253 if (mf_marked_p (obj))
1254 continue;
1255
1256 /* Omit registration for static unaddressed objects. NB:
1257 Perform registration for non-static objects regardless of
1258 TREE_USED or TREE_ADDRESSABLE, because they may be used
1259 from other compilation units. */
1260 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1261 continue;
1262
1263 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1264 {
1265 warning (0, "mudflap cannot track unknown size extern %qs",
1266 IDENTIFIER_POINTER (DECL_NAME (obj)));
1267 continue;
1268 }
1269
1270 mudflap_register_call (obj,
1271 size_in_bytes (TREE_TYPE (obj)),
1272 mf_varname_tree (obj));
1273 }
1274
1275 VEC_truncate (tree, deferred_static_decls, 0);
1276 }
1277
1278 /* Append all the enqueued registration calls. */
1279 if (enqueued_call_stmt_chain)
1280 {
1281 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1282 enqueued_call_stmt_chain = NULL_TREE;
1283 }
1284
1285 cgraph_build_static_cdtor ('I', ctor_statements,
1286 MAX_RESERVED_INIT_PRIORITY-1);
1287 }
1288
1289
1290 static bool
1291 gate_mudflap (void)
1292 {
1293 return flag_mudflap != 0;
1294 }
1295
1296 struct tree_opt_pass pass_mudflap_1 =
1297 {
1298 "mudflap1", /* name */
1299 gate_mudflap, /* gate */
1300 execute_mudflap_function_decls, /* execute */
1301 NULL, /* sub */
1302 NULL, /* next */
1303 0, /* static_pass_number */
1304 0, /* tv_id */
1305 PROP_gimple_any, /* properties_required */
1306 0, /* properties_provided */
1307 0, /* properties_destroyed */
1308 0, /* todo_flags_start */
1309 TODO_dump_func, /* todo_flags_finish */
1310 0 /* letter */
1311 };
1312
1313 struct tree_opt_pass pass_mudflap_2 =
1314 {
1315 "mudflap2", /* name */
1316 gate_mudflap, /* gate */
1317 execute_mudflap_function_ops, /* execute */
1318 NULL, /* sub */
1319 NULL, /* next */
1320 0, /* static_pass_number */
1321 0, /* tv_id */
1322 PROP_gimple_leh, /* properties_required */
1323 0, /* properties_provided */
1324 0, /* properties_destroyed */
1325 0, /* todo_flags_start */
1326 TODO_verify_flow | TODO_verify_stmts
1327 | TODO_dump_func, /* todo_flags_finish */
1328 0 /* letter */
1329 };
1330
1331 #include "gt-tree-mudflap.h"