i386.c (legitimize_tls_address): Generate tls_initial_exec_64_sun only when !TARGET_X32.
[gcc.git] / gcc / tree-mudflap.c
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2012
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "tree-inline.h"
34 #include "gimple.h"
35 #include "tree-iterator.h"
36 #include "tree-flow.h"
37 #include "tree-mudflap.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "hashtab.h"
41 #include "diagnostic.h"
42 #include "demangle.h"
43 #include "langhooks.h"
44 #include "ggc.h"
45 #include "cgraph.h"
46 #include "gimple.h"
47
48 extern void add_bb_to_loop (basic_block, struct loop *);
49
50 /* Internal function decls */
51
52
53 /* Options. */
54 #define flag_mudflap_threads (flag_mudflap == 2)
55
56 /* Helpers. */
57 static tree mf_build_string (const char *string);
58 static tree mf_varname_tree (tree);
59 static tree mf_file_function_line_tree (location_t);
60
61 /* Indirection-related instrumentation. */
62 static void mf_decl_cache_locals (void);
63 static void mf_decl_clear_locals (void);
64 static void mf_xform_statements (void);
65 static unsigned int execute_mudflap_function_ops (void);
66
67 /* Addressable variables instrumentation. */
68 static void mf_xform_decls (gimple_seq, tree);
69 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
70 struct walk_stmt_info *);
71 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
72 static unsigned int execute_mudflap_function_decls (void);
73
74 /* Return true if DECL is artificial stub that shouldn't be instrumented by
75 mf. We should instrument clones of non-artificial functions. */
76 static inline bool
77 mf_artificial (const_tree decl)
78 {
79 return DECL_ARTIFICIAL (DECL_ORIGIN (decl));
80 }
81
82 /* ------------------------------------------------------------------------ */
83 /* Some generally helpful functions for mudflap instrumentation. */
84
85 /* Build a reference to a literal string. */
86 static tree
87 mf_build_string (const char *string)
88 {
89 size_t len = strlen (string);
90 tree result = mf_mark (build_string (len + 1, string));
91
92 TREE_TYPE (result) = build_array_type
93 (char_type_node, build_index_type (size_int (len)));
94 TREE_CONSTANT (result) = 1;
95 TREE_READONLY (result) = 1;
96 TREE_STATIC (result) = 1;
97
98 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
99
100 return mf_mark (result);
101 }
102
103 /* Create a properly typed STRING_CST node that describes the given
104 declaration. It will be used as an argument for __mf_register().
105 Try to construct a helpful string, including file/function/variable
106 name. */
107
108 static tree
109 mf_varname_tree (tree decl)
110 {
111 static pretty_printer buf_rec;
112 static int initialized = 0;
113 pretty_printer *buf = & buf_rec;
114 const char *buf_contents;
115 tree result;
116
117 gcc_assert (decl);
118
119 if (!initialized)
120 {
121 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
122 initialized = 1;
123 }
124 pp_clear_output_area (buf);
125
126 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
127 {
128 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
129 const char *sourcefile;
130 unsigned sourceline = xloc.line;
131 unsigned sourcecolumn = 0;
132 sourcecolumn = xloc.column;
133 sourcefile = xloc.file;
134 if (sourcefile == NULL && current_function_decl != NULL_TREE)
135 sourcefile = DECL_SOURCE_FILE (current_function_decl);
136 if (sourcefile == NULL)
137 sourcefile = "<unknown file>";
138
139 pp_string (buf, sourcefile);
140
141 if (sourceline != 0)
142 {
143 pp_string (buf, ":");
144 pp_decimal_int (buf, sourceline);
145
146 if (sourcecolumn != 0)
147 {
148 pp_string (buf, ":");
149 pp_decimal_int (buf, sourcecolumn);
150 }
151 }
152 }
153
154 if (current_function_decl != NULL_TREE)
155 {
156 /* Add (FUNCTION) */
157 pp_string (buf, " (");
158 {
159 const char *funcname = NULL;
160 if (DECL_NAME (current_function_decl))
161 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
162 if (funcname == NULL)
163 funcname = "anonymous fn";
164
165 pp_string (buf, funcname);
166 }
167 pp_string (buf, ") ");
168 }
169 else
170 pp_string (buf, " ");
171
172 /* Add <variable-declaration>, possibly demangled. */
173 {
174 const char *declname = NULL;
175
176 if (DECL_NAME (decl) != NULL)
177 {
178 if (strcmp ("GNU C++", lang_hooks.name) == 0)
179 {
180 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
181 the libiberty demangler. */
182 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
183 DMGL_AUTO | DMGL_VERBOSE);
184 }
185 if (declname == NULL)
186 declname = lang_hooks.decl_printable_name (decl, 3);
187 }
188 if (declname == NULL)
189 declname = "<unnamed variable>";
190
191 pp_string (buf, declname);
192 }
193
194 /* Return the lot as a new STRING_CST. */
195 buf_contents = pp_base_formatted_text (buf);
196 result = mf_build_string (buf_contents);
197 pp_clear_output_area (buf);
198
199 return result;
200 }
201
202
203 /* And another friend, for producing a simpler message. */
204
205 static tree
206 mf_file_function_line_tree (location_t location)
207 {
208 expanded_location xloc = expand_location (location);
209 const char *file = NULL, *colon, *line, *op, *name, *cp;
210 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
211 char *string;
212 tree result;
213
214 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
215 file = xloc.file;
216 if (file == NULL && current_function_decl != NULL_TREE)
217 file = DECL_SOURCE_FILE (current_function_decl);
218 if (file == NULL)
219 file = "<unknown file>";
220
221 if (xloc.line > 0)
222 {
223 if (xloc.column > 0)
224 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
225 else
226 sprintf (linecolbuf, "%d", xloc.line);
227 colon = ":";
228 line = linecolbuf;
229 }
230 else
231 colon = line = "";
232
233 /* Add (FUNCTION). */
234 name = lang_hooks.decl_printable_name (current_function_decl, 1);
235 if (name)
236 {
237 op = " (";
238 cp = ")";
239 }
240 else
241 op = name = cp = "";
242
243 string = concat (file, colon, line, op, name, cp, NULL);
244 result = mf_build_string (string);
245 free (string);
246
247 return result;
248 }
249
250
251 /* global tree nodes */
252
253 /* Global tree objects for global variables and functions exported by
254 mudflap runtime library. mf_init_extern_trees must be called
255 before using these. */
256
257 /* uintptr_t (usually "unsigned long") */
258 static GTY (()) tree mf_uintptr_type;
259
260 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
261 static GTY (()) tree mf_cache_struct_type;
262
263 /* struct __mf_cache * const */
264 static GTY (()) tree mf_cache_structptr_type;
265
266 /* extern struct __mf_cache __mf_lookup_cache []; */
267 static GTY (()) tree mf_cache_array_decl;
268
269 /* extern unsigned char __mf_lc_shift; */
270 static GTY (()) tree mf_cache_shift_decl;
271
272 /* extern uintptr_t __mf_lc_mask; */
273 static GTY (()) tree mf_cache_mask_decl;
274
275 /* Their function-scope local shadows, used in single-threaded mode only. */
276
277 /* auto const unsigned char __mf_lc_shift_l; */
278 static GTY (()) tree mf_cache_shift_decl_l;
279
280 /* auto const uintptr_t __mf_lc_mask_l; */
281 static GTY (()) tree mf_cache_mask_decl_l;
282
283 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
284 static GTY (()) tree mf_check_fndecl;
285
286 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
287 static GTY (()) tree mf_register_fndecl;
288
289 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
290 static GTY (()) tree mf_unregister_fndecl;
291
292 /* extern void __mf_init (); */
293 static GTY (()) tree mf_init_fndecl;
294
295 /* extern int __mf_set_options (const char*); */
296 static GTY (()) tree mf_set_options_fndecl;
297
298
299 /* Helper for mudflap_init: construct a decl with the given category,
300 name, and type, mark it an external reference, and pushdecl it. */
301 static inline tree
302 mf_make_builtin (enum tree_code category, const char *name, tree type)
303 {
304 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
305 category, get_identifier (name), type));
306 TREE_PUBLIC (decl) = 1;
307 DECL_EXTERNAL (decl) = 1;
308 lang_hooks.decls.pushdecl (decl);
309 /* The decl was declared by the compiler. */
310 DECL_ARTIFICIAL (decl) = 1;
311 /* And we don't want debug info for it. */
312 DECL_IGNORED_P (decl) = 1;
313 return decl;
314 }
315
316 /* Helper for mudflap_init: construct a tree corresponding to the type
317 struct __mf_cache { uintptr_t low; uintptr_t high; };
318 where uintptr_t is the FIELD_TYPE argument. */
319 static inline tree
320 mf_make_mf_cache_struct_type (tree field_type)
321 {
322 /* There is, abominably, no language-independent way to construct a
323 RECORD_TYPE. So we have to call the basic type construction
324 primitives by hand. */
325 tree fieldlo = build_decl (UNKNOWN_LOCATION,
326 FIELD_DECL, get_identifier ("low"), field_type);
327 tree fieldhi = build_decl (UNKNOWN_LOCATION,
328 FIELD_DECL, get_identifier ("high"), field_type);
329
330 tree struct_type = make_node (RECORD_TYPE);
331 DECL_CONTEXT (fieldlo) = struct_type;
332 DECL_CONTEXT (fieldhi) = struct_type;
333 DECL_CHAIN (fieldlo) = fieldhi;
334 TYPE_FIELDS (struct_type) = fieldlo;
335 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
336 layout_type (struct_type);
337
338 return struct_type;
339 }
340
341 /* Initialize the global tree nodes that correspond to mf-runtime.h
342 declarations. */
343 void
344 mudflap_init (void)
345 {
346 static bool done = false;
347 tree mf_const_string_type;
348 tree mf_cache_array_type;
349 tree mf_check_register_fntype;
350 tree mf_unregister_fntype;
351 tree mf_init_fntype;
352 tree mf_set_options_fntype;
353
354 if (done)
355 return;
356 done = true;
357
358 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
359 /*unsignedp=*/true);
360 mf_const_string_type
361 = build_pointer_type (build_qualified_type
362 (char_type_node, TYPE_QUAL_CONST));
363
364 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
365 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
366 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
367 mf_check_register_fntype =
368 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
369 integer_type_node, mf_const_string_type, NULL_TREE);
370 mf_unregister_fntype =
371 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
372 integer_type_node, NULL_TREE);
373 mf_init_fntype =
374 build_function_type_list (void_type_node, NULL_TREE);
375 mf_set_options_fntype =
376 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
377
378 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
379 mf_cache_array_type);
380 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
381 unsigned_char_type_node);
382 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
383 mf_uintptr_type);
384 /* Don't process these in mudflap_enqueue_decl, should they come by
385 there for some reason. */
386 mf_mark (mf_cache_array_decl);
387 mf_mark (mf_cache_shift_decl);
388 mf_mark (mf_cache_mask_decl);
389 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
390 mf_check_register_fntype);
391 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
392 mf_check_register_fntype);
393 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
394 mf_unregister_fntype);
395 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
396 mf_init_fntype);
397 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
398 mf_set_options_fntype);
399 }
400
401
402 /* ------------------------------------------------------------------------ */
403 /* This is the second part of the mudflap instrumentation. It works on
404 low-level GIMPLE using the CFG, because we want to run this pass after
405 tree optimizations have been performed, but we have to preserve the CFG
406 for expansion from trees to RTL.
407 Below is the list of transformations performed on statements in the
408 current function.
409
410 1) Memory reference transforms: Perform the mudflap indirection-related
411 tree transforms on memory references.
412
413 2) Mark BUILTIN_ALLOCA calls not inlineable.
414
415 */
416
417 static unsigned int
418 execute_mudflap_function_ops (void)
419 {
420 struct gimplify_ctx gctx;
421
422 /* Don't instrument functions such as the synthetic constructor
423 built during mudflap_finish_file. */
424 if (mf_marked_p (current_function_decl)
425 || mf_artificial (current_function_decl))
426 return 0;
427
428 push_gimplify_context (&gctx);
429
430 /* In multithreaded mode, don't cache the lookup cache parameters. */
431 if (! flag_mudflap_threads)
432 mf_decl_cache_locals ();
433
434 mf_xform_statements ();
435
436 if (! flag_mudflap_threads)
437 mf_decl_clear_locals ();
438
439 pop_gimplify_context (NULL);
440 return 0;
441 }
442
443 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
444 if BB has more than one edge, STMT will be replicated for each edge.
445 Also, abnormal edges will be ignored. */
446
447 static void
448 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
449 {
450 edge e;
451 edge_iterator ei;
452 unsigned n_copies = -1;
453
454 FOR_EACH_EDGE (e, ei, bb->succs)
455 if (!(e->flags & EDGE_ABNORMAL))
456 n_copies++;
457
458 FOR_EACH_EDGE (e, ei, bb->succs)
459 if (!(e->flags & EDGE_ABNORMAL))
460 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
461 }
462
463 /* Create and initialize local shadow variables for the lookup cache
464 globals. Put their decls in the *_l globals for use by
465 mf_build_check_statement_for. */
466
467 static void
468 mf_decl_cache_locals (void)
469 {
470 gimple g;
471 gimple_seq seq = NULL;
472
473 /* Build the cache vars. */
474 mf_cache_shift_decl_l
475 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
476 "__mf_lookup_shift_l"));
477
478 mf_cache_mask_decl_l
479 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
480 "__mf_lookup_mask_l"));
481
482 /* Build initialization nodes for the cache vars. We just load the
483 globals into the cache variables. */
484 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
485 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
486 gimple_seq_add_stmt (&seq, g);
487
488 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
489 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
490 gimple_seq_add_stmt (&seq, g);
491
492 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
493
494 gsi_commit_edge_inserts ();
495 }
496
497
498 static void
499 mf_decl_clear_locals (void)
500 {
501 /* Unset local shadows. */
502 mf_cache_shift_decl_l = NULL_TREE;
503 mf_cache_mask_decl_l = NULL_TREE;
504 }
505
506 static void
507 mf_build_check_statement_for (tree base, tree limit,
508 gimple_stmt_iterator *instr_gsi,
509 location_t location, tree dirflag)
510 {
511 gimple_stmt_iterator gsi;
512 basic_block cond_bb, then_bb, join_bb;
513 edge e;
514 tree cond, t, u, v;
515 tree mf_base;
516 tree mf_elem;
517 tree mf_limit;
518 gimple g;
519 gimple_seq seq, stmts;
520
521 /* We first need to split the current basic block, and start altering
522 the CFG. This allows us to insert the statements we're about to
523 construct into the right basic blocks. */
524
525 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
526 gsi = *instr_gsi;
527 gsi_prev (&gsi);
528 if (! gsi_end_p (gsi))
529 e = split_block (cond_bb, gsi_stmt (gsi));
530 else
531 e = split_block_after_labels (cond_bb);
532 cond_bb = e->src;
533 join_bb = e->dest;
534
535 /* A recap at this point: join_bb is the basic block at whose head
536 is the gimple statement for which this check expression is being
537 built. cond_bb is the (possibly new, synthetic) basic block the
538 end of which will contain the cache-lookup code, and a
539 conditional that jumps to the cache-miss code or, much more
540 likely, over to join_bb. */
541
542 /* Create the bb that contains the cache-miss fallback block (mf_check). */
543 then_bb = create_empty_bb (cond_bb);
544 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
545 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
546
547 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
548 e = find_edge (cond_bb, join_bb);
549 e->flags = EDGE_FALSE_VALUE;
550 e->count = cond_bb->count;
551 e->probability = REG_BR_PROB_BASE;
552
553 /* Update dominance info. Note that bb_join's data was
554 updated by split_block. */
555 if (dom_info_available_p (CDI_DOMINATORS))
556 {
557 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
558 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
559 }
560
561 /* Update loop info. */
562 if (current_loops)
563 add_bb_to_loop (then_bb, cond_bb->loop_father);
564
565 /* Build our local variables. */
566 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
567 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
568 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
569
570 /* Build: __mf_base = (uintptr_t) <base address expression>. */
571 seq = NULL;
572 t = fold_convert_loc (location, mf_uintptr_type,
573 unshare_expr (base));
574 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
575 gimple_seq_add_seq (&seq, stmts);
576 g = gimple_build_assign (mf_base, t);
577 gimple_set_location (g, location);
578 gimple_seq_add_stmt (&seq, g);
579
580 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
581 t = fold_convert_loc (location, mf_uintptr_type,
582 unshare_expr (limit));
583 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
584 gimple_seq_add_seq (&seq, stmts);
585 g = gimple_build_assign (mf_limit, t);
586 gimple_set_location (g, location);
587 gimple_seq_add_stmt (&seq, g);
588
589 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
590 & __mf_mask]. */
591 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
592 flag_mudflap_threads ? mf_cache_shift_decl
593 : mf_cache_shift_decl_l);
594 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
595 flag_mudflap_threads ? mf_cache_mask_decl
596 : mf_cache_mask_decl_l);
597 t = build4 (ARRAY_REF,
598 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
599 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
600 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
601 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
602 gimple_seq_add_seq (&seq, stmts);
603 g = gimple_build_assign (mf_elem, t);
604 gimple_set_location (g, location);
605 gimple_seq_add_stmt (&seq, g);
606
607 /* Quick validity check.
608
609 if (__mf_elem->low > __mf_base
610 || (__mf_elem_high < __mf_limit))
611 {
612 __mf_check ();
613 ... and only if single-threaded:
614 __mf_lookup_shift_1 = f...;
615 __mf_lookup_mask_l = ...;
616 }
617
618 It is expected that this body of code is rarely executed so we mark
619 the edge to the THEN clause of the conditional jump as unlikely. */
620
621 /* Construct t <-- '__mf_elem->low > __mf_base'. */
622 t = build3 (COMPONENT_REF, mf_uintptr_type,
623 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
624 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
625 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
626
627 /* Construct '__mf_elem->high < __mf_limit'.
628
629 First build:
630 1) u <-- '__mf_elem->high'
631 2) v <-- '__mf_limit'.
632
633 Then build 'u <-- (u < v). */
634
635 u = build3 (COMPONENT_REF, mf_uintptr_type,
636 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
637 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
638
639 v = mf_limit;
640
641 u = build2 (LT_EXPR, boolean_type_node, u, v);
642
643 /* Build the composed conditional: t <-- 't || u'. Then store the
644 result of the evaluation of 't' in a temporary variable which we
645 can use as the condition for the conditional jump. */
646 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
647 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
648 gimple_seq_add_seq (&seq, stmts);
649 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
650 g = gimple_build_assign (cond, t);
651 gimple_set_location (g, location);
652 gimple_seq_add_stmt (&seq, g);
653
654 /* Build the conditional jump. 'cond' is just a temporary so we can
655 simply build a void COND_EXPR. We do need labels in both arms though. */
656 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
657 NULL_TREE);
658 gimple_set_location (g, location);
659 gimple_seq_add_stmt (&seq, g);
660
661 /* At this point, after so much hard work, we have only constructed
662 the conditional jump,
663
664 if (__mf_elem->low > __mf_base
665 || (__mf_elem_high < __mf_limit))
666
667 The lowered GIMPLE tree representing this code is in the statement
668 list starting at 'head'.
669
670 We can insert this now in the current basic block, i.e. the one that
671 the statement we're instrumenting was originally in. */
672 gsi = gsi_last_bb (cond_bb);
673 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
674
675 /* Now build up the body of the cache-miss handling:
676
677 __mf_check();
678 refresh *_l vars.
679
680 This is the body of the conditional. */
681
682 seq = NULL;
683 /* u is a string, so it is already a gimple value. */
684 u = mf_file_function_line_tree (location);
685 /* NB: we pass the overall [base..limit] range to mf_check. */
686 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
687 fold_build2_loc (location,
688 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
689 build_int_cst (mf_uintptr_type, 1));
690 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
691 gimple_seq_add_seq (&seq, stmts);
692 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
693 gimple_seq_add_stmt (&seq, g);
694
695 if (! flag_mudflap_threads)
696 {
697 if (stmt_ends_bb_p (g))
698 {
699 gsi = gsi_start_bb (then_bb);
700 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
701 e = split_block (then_bb, g);
702 then_bb = e->dest;
703 seq = NULL;
704 }
705
706 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
707 gimple_seq_add_stmt (&seq, g);
708
709 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
710 gimple_seq_add_stmt (&seq, g);
711 }
712
713 /* Insert the check code in the THEN block. */
714 gsi = gsi_start_bb (then_bb);
715 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
716
717 *instr_gsi = gsi_start_bb (join_bb);
718 }
719
720
721 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
722 eligible for instrumentation. For the mudflap1 pass, this implies
723 that it should be registered with the libmudflap runtime. For the
724 mudflap2 pass this means instrumenting an indirection operation with
725 respect to the object.
726 */
727 static int
728 mf_decl_eligible_p (tree decl)
729 {
730 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
731 /* The decl must have its address taken. In the case of
732 arrays, this flag is also set if the indexes are not
733 compile-time known valid constants. */
734 /* XXX: not sufficient: return-by-value structs! */
735 && TREE_ADDRESSABLE (decl)
736 /* The type of the variable must be complete. */
737 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
738 /* The decl hasn't been decomposed somehow. */
739 && !DECL_HAS_VALUE_EXPR_P (decl));
740 }
741
742
743 static void
744 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
745 location_t location, tree dirflag)
746 {
747 tree type, base, limit, addr, size, t;
748
749 /* Don't instrument read operations. */
750 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
751 return;
752
753 /* Don't instrument marked nodes. */
754 if (mf_marked_p (*tp))
755 return;
756
757 t = *tp;
758 type = TREE_TYPE (t);
759
760 if (type == error_mark_node)
761 return;
762
763 size = TYPE_SIZE_UNIT (type);
764
765 switch (TREE_CODE (t))
766 {
767 case ARRAY_REF:
768 case COMPONENT_REF:
769 {
770 /* This is trickier than it may first appear. The reason is
771 that we are looking at expressions from the "inside out" at
772 this point. We may have a complex nested aggregate/array
773 expression (e.g. "a.b[i].c"), maybe with an indirection as
774 the leftmost operator ("p->a.b.d"), where instrumentation
775 is necessary. Or we may have an innocent "a.b.c"
776 expression that must not be instrumented. We need to
777 recurse all the way down the nesting structure to figure it
778 out: looking just at the outer node is not enough. */
779 tree var;
780 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
781 /* If we have a bitfield component reference, we must note the
782 innermost addressable object in ELT, from which we will
783 construct the byte-addressable bounds of the bitfield. */
784 tree elt = NULL_TREE;
785 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
786 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
787
788 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
789 containment hierarchy to find the outermost VAR_DECL. */
790 var = TREE_OPERAND (t, 0);
791 while (1)
792 {
793 if (bitfield_ref_p && elt == NULL_TREE
794 && (TREE_CODE (var) == ARRAY_REF
795 || TREE_CODE (var) == COMPONENT_REF))
796 elt = var;
797
798 if (TREE_CODE (var) == ARRAY_REF)
799 {
800 component_ref_only = 0;
801 var = TREE_OPERAND (var, 0);
802 }
803 else if (TREE_CODE (var) == COMPONENT_REF)
804 var = TREE_OPERAND (var, 0);
805 else if (INDIRECT_REF_P (var)
806 || TREE_CODE (var) == MEM_REF)
807 {
808 base = TREE_OPERAND (var, 0);
809 break;
810 }
811 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
812 {
813 var = TREE_OPERAND (var, 0);
814 if (CONSTANT_CLASS_P (var)
815 && TREE_CODE (var) != STRING_CST)
816 return;
817 }
818 else
819 {
820 gcc_assert (TREE_CODE (var) == VAR_DECL
821 || TREE_CODE (var) == PARM_DECL
822 || TREE_CODE (var) == RESULT_DECL
823 || TREE_CODE (var) == STRING_CST);
824 /* Don't instrument this access if the underlying
825 variable is not "eligible". This test matches
826 those arrays that have only known-valid indexes,
827 and thus are not labeled TREE_ADDRESSABLE. */
828 if (! mf_decl_eligible_p (var) || component_ref_only)
829 return;
830 else
831 {
832 base = build1 (ADDR_EXPR,
833 build_pointer_type (TREE_TYPE (var)), var);
834 break;
835 }
836 }
837 }
838
839 /* Handle the case of ordinary non-indirection structure
840 accesses. These have only nested COMPONENT_REF nodes (no
841 INDIRECT_REF), but pass through the above filter loop.
842 Note that it's possible for such a struct variable to match
843 the eligible_p test because someone else might take its
844 address sometime. */
845
846 /* We need special processing for bitfield components, because
847 their addresses cannot be taken. */
848 if (bitfield_ref_p)
849 {
850 tree field = TREE_OPERAND (t, 1);
851
852 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
853 size = DECL_SIZE_UNIT (field);
854
855 if (elt)
856 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
857 elt);
858 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
859 addr = fold_build_pointer_plus_loc (location,
860 addr, byte_position (field));
861 }
862 else
863 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
864
865 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
866 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
867 fold_convert (mf_uintptr_type, addr),
868 size),
869 integer_one_node);
870 }
871 break;
872
873 case INDIRECT_REF:
874 addr = TREE_OPERAND (t, 0);
875 base = addr;
876 limit = fold_build_pointer_plus_hwi_loc
877 (location, fold_build_pointer_plus_loc (location, base, size), -1);
878 break;
879
880 case MEM_REF:
881 addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
882 TREE_OPERAND (t, 1));
883 base = addr;
884 limit = fold_build_pointer_plus_hwi_loc (location,
885 fold_build_pointer_plus_loc (location,
886 base, size), -1);
887 break;
888
889 case TARGET_MEM_REF:
890 addr = tree_mem_ref_addr (ptr_type_node, t);
891 base = addr;
892 limit = fold_build_pointer_plus_hwi_loc (location,
893 fold_build_pointer_plus_loc (location,
894 base, size), -1);
895 break;
896
897 case ARRAY_RANGE_REF:
898 warning (OPT_Wmudflap,
899 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
900 return;
901
902 case BIT_FIELD_REF:
903 /* ??? merge with COMPONENT_REF code above? */
904 {
905 tree ofs, rem, bpu;
906
907 /* If we're not dereferencing something, then the access
908 must be ok. */
909 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
910 return;
911
912 bpu = bitsize_int (BITS_PER_UNIT);
913 ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2));
914 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
915 ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu);
916
917 size = fold_convert (bitsizetype, TREE_OPERAND (t, 1));
918 size = size_binop_loc (location, PLUS_EXPR, size, rem);
919 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
920 size = fold_convert (sizetype, size);
921
922 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
923 addr = fold_convert (ptr_type_node, addr);
924 addr = fold_build_pointer_plus_loc (location, addr, ofs);
925
926 base = addr;
927 limit = fold_build_pointer_plus_hwi_loc (location,
928 fold_build_pointer_plus_loc (location,
929 base, size), -1);
930 }
931 break;
932
933 default:
934 return;
935 }
936
937 mf_build_check_statement_for (base, limit, iter, location, dirflag);
938 }
939 /* Transform
940 1) Memory references.
941 */
942 static void
943 mf_xform_statements (void)
944 {
945 basic_block bb, next;
946 gimple_stmt_iterator i;
947 int saved_last_basic_block = last_basic_block;
948 enum gimple_rhs_class grhs_class;
949
950 bb = ENTRY_BLOCK_PTR ->next_bb;
951 do
952 {
953 next = bb->next_bb;
954 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
955 {
956 gimple s = gsi_stmt (i);
957
958 /* Only a few GIMPLE statements can reference memory. */
959 switch (gimple_code (s))
960 {
961 case GIMPLE_ASSIGN:
962 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
963 gimple_location (s), integer_one_node);
964 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
965 gimple_location (s), integer_zero_node);
966 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
967 if (grhs_class == GIMPLE_BINARY_RHS)
968 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
969 gimple_location (s), integer_zero_node);
970 break;
971
972 case GIMPLE_RETURN:
973 if (gimple_return_retval (s) != NULL_TREE)
974 {
975 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
976 gimple_location (s),
977 integer_zero_node);
978 }
979 break;
980
981 default:
982 ;
983 }
984 }
985 bb = next;
986 }
987 while (bb && bb->index <= saved_last_basic_block);
988 }
989
990 /* ------------------------------------------------------------------------ */
991 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
992 transforms on the current function.
993
994 This is the first part of the mudflap instrumentation. It works on
995 high-level GIMPLE because after lowering, all variables are moved out
996 of their BIND_EXPR binding context, and we lose liveness information
997 for the declarations we wish to instrument. */
998
999 static unsigned int
1000 execute_mudflap_function_decls (void)
1001 {
1002 struct gimplify_ctx gctx;
1003
1004 /* Don't instrument functions such as the synthetic constructor
1005 built during mudflap_finish_file. */
1006 if (mf_marked_p (current_function_decl)
1007 || mf_artificial (current_function_decl))
1008 return 0;
1009
1010 push_gimplify_context (&gctx);
1011
1012 mf_xform_decls (gimple_body (current_function_decl),
1013 DECL_ARGUMENTS (current_function_decl));
1014
1015 pop_gimplify_context (NULL);
1016 return 0;
1017 }
1018
1019 /* This struct is passed between mf_xform_decls to store state needed
1020 during the traversal searching for objects that have their
1021 addresses taken. */
1022 struct mf_xform_decls_data
1023 {
1024 tree param_decls;
1025 };
1026
1027
1028 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1029 _DECLs if appropriate. Arrange to call the __mf_register function
1030 now, and the __mf_unregister function later for each. Return the
1031 gimple sequence after synthesis. */
1032 gimple_seq
1033 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1034 {
1035 gimple_seq finally_stmts = NULL;
1036 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1037
1038 while (decl != NULL_TREE)
1039 {
1040 if (mf_decl_eligible_p (decl)
1041 /* Not already processed. */
1042 && ! mf_marked_p (decl)
1043 /* Automatic variable. */
1044 && ! DECL_EXTERNAL (decl)
1045 && ! TREE_STATIC (decl))
1046 {
1047 tree size = NULL_TREE, variable_name;
1048 gimple unregister_fncall, register_fncall;
1049 tree unregister_fncall_param, register_fncall_param;
1050
1051 /* Variable-sized objects should have sizes already been
1052 gimplified when we got here. */
1053 size = fold_convert (size_type_node,
1054 TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1055 gcc_assert (is_gimple_val (size));
1056
1057
1058 unregister_fncall_param =
1059 mf_mark (build1 (ADDR_EXPR,
1060 build_pointer_type (TREE_TYPE (decl)),
1061 decl));
1062 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1063 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1064 unregister_fncall_param,
1065 size,
1066 integer_three_node);
1067
1068
1069 variable_name = mf_varname_tree (decl);
1070 register_fncall_param =
1071 mf_mark (build1 (ADDR_EXPR,
1072 build_pointer_type (TREE_TYPE (decl)),
1073 decl));
1074 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1075 "name") */
1076 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1077 register_fncall_param,
1078 size,
1079 integer_three_node,
1080 variable_name);
1081
1082
1083 /* Accumulate the two calls. */
1084 gimple_set_location (register_fncall, location);
1085 gimple_set_location (unregister_fncall, location);
1086
1087 /* Add the __mf_register call at the current appending point. */
1088 if (gsi_end_p (initially_stmts))
1089 {
1090 if (!mf_artificial (decl))
1091 warning (OPT_Wmudflap,
1092 "mudflap cannot track %qE in stub function",
1093 DECL_NAME (decl));
1094 }
1095 else
1096 {
1097 gsi_insert_before (&initially_stmts, register_fncall,
1098 GSI_SAME_STMT);
1099
1100 /* Accumulate the FINALLY piece. */
1101 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1102 }
1103 mf_mark (decl);
1104 }
1105
1106 decl = DECL_CHAIN (decl);
1107 }
1108
1109 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1110 if (finally_stmts != NULL)
1111 {
1112 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1113 gimple_seq new_seq = NULL;
1114
1115 gimple_seq_add_stmt (&new_seq, stmt);
1116 return new_seq;
1117 }
1118 else
1119 return seq;
1120 }
1121
1122
1123 /* Process every variable mentioned in BIND_EXPRs. */
1124 static tree
1125 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1126 bool *handled_operands_p ATTRIBUTE_UNUSED,
1127 struct walk_stmt_info *wi)
1128 {
1129 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1130 gimple stmt = gsi_stmt (*gsi);
1131
1132 switch (gimple_code (stmt))
1133 {
1134 case GIMPLE_BIND:
1135 {
1136 /* Process function parameters now (but only once). */
1137 if (d->param_decls)
1138 {
1139 gimple_bind_set_body (stmt,
1140 mx_register_decls (d->param_decls,
1141 gimple_bind_body (stmt),
1142 gimple_location (stmt)));
1143 d->param_decls = NULL_TREE;
1144 }
1145
1146 gimple_bind_set_body (stmt,
1147 mx_register_decls (gimple_bind_vars (stmt),
1148 gimple_bind_body (stmt),
1149 gimple_location (stmt)));
1150 }
1151 break;
1152
1153 default:
1154 break;
1155 }
1156
1157 return NULL_TREE;
1158 }
1159
1160 /* Perform the object lifetime tracking mudflap transform on the given function
1161 tree. The tree is mutated in place, with possibly copied subtree nodes.
1162
1163 For every auto variable declared, if its address is ever taken
1164 within the function, then supply its lifetime to the mudflap
1165 runtime with the __mf_register and __mf_unregister calls.
1166 */
1167
1168 static void
1169 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1170 {
1171 struct mf_xform_decls_data d;
1172 struct walk_stmt_info wi;
1173 struct pointer_set_t *pset = pointer_set_create ();
1174
1175 d.param_decls = fnparams;
1176 memset (&wi, 0, sizeof (wi));
1177 wi.info = (void*) &d;
1178 wi.pset = pset;
1179 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1180 pointer_set_destroy (pset);
1181 }
1182
1183
1184 /* ------------------------------------------------------------------------ */
1185 /* Externally visible mudflap functions. */
1186
1187
1188 /* Mark and return the given tree node to prevent further mudflap
1189 transforms. */
1190 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1191
1192 tree
1193 mf_mark (tree t)
1194 {
1195 void **slot;
1196
1197 if (marked_trees == NULL)
1198 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1199 NULL);
1200
1201 slot = htab_find_slot (marked_trees, t, INSERT);
1202 *slot = t;
1203 return t;
1204 }
1205
1206 int
1207 mf_marked_p (tree t)
1208 {
1209 void *entry;
1210
1211 if (marked_trees == NULL)
1212 return 0;
1213
1214 entry = htab_find (marked_trees, t);
1215 return (entry != NULL);
1216 }
1217
1218 /* Remember given node as a static of some kind: global data,
1219 function-scope static, or an anonymous constant. Its assembler
1220 label is given. */
1221
1222 /* A list of globals whose incomplete declarations we encountered.
1223 Instead of emitting the __mf_register call for them here, it's
1224 delayed until program finish time. If they're still incomplete by
1225 then, warnings are emitted. */
1226
1227 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1228
1229 /* A list of statements for calling __mf_register() at startup time. */
1230 static GTY (()) tree enqueued_call_stmt_chain;
1231
1232 static void
1233 mudflap_register_call (tree obj, tree object_size, tree varname)
1234 {
1235 tree arg, call_stmt;
1236
1237 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1238 arg = fold_convert (ptr_type_node, arg);
1239
1240 call_stmt = build_call_expr (mf_register_fndecl, 4,
1241 arg,
1242 fold_convert (size_type_node, object_size),
1243 /* __MF_TYPE_STATIC */
1244 build_int_cst (integer_type_node, 4),
1245 varname);
1246
1247 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1248 }
1249
1250 void
1251 mudflap_enqueue_decl (tree obj)
1252 {
1253 if (mf_marked_p (obj))
1254 return;
1255
1256 /* We don't need to process variable decls that are internally
1257 generated extern. If we did, we'd end up with warnings for them
1258 during mudflap_finish_file (). That would confuse the user,
1259 since the text would refer to variables that don't show up in the
1260 user's source code. */
1261 if (DECL_P (obj) && DECL_EXTERNAL (obj) && mf_artificial (obj))
1262 return;
1263
1264 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1265 }
1266
1267
1268 void
1269 mudflap_enqueue_constant (tree obj)
1270 {
1271 tree object_size, varname;
1272
1273 if (mf_marked_p (obj))
1274 return;
1275
1276 if (TREE_CODE (obj) == STRING_CST)
1277 object_size = size_int (TREE_STRING_LENGTH (obj));
1278 else
1279 object_size = size_in_bytes (TREE_TYPE (obj));
1280
1281 if (TREE_CODE (obj) == STRING_CST)
1282 varname = mf_build_string ("string literal");
1283 else
1284 varname = mf_build_string ("constant");
1285
1286 mudflap_register_call (obj, object_size, varname);
1287 }
1288
1289
1290 /* Emit any file-wide instrumentation. */
1291 void
1292 mudflap_finish_file (void)
1293 {
1294 tree ctor_statements = NULL_TREE;
1295
1296 /* No need to continue when there were errors. */
1297 if (seen_error ())
1298 return;
1299
1300 /* Insert a call to __mf_init. */
1301 {
1302 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1303 append_to_statement_list (call2_stmt, &ctor_statements);
1304 }
1305
1306 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1307 if (flag_mudflap_ignore_reads)
1308 {
1309 tree arg = mf_build_string ("-ignore-reads");
1310 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1311 append_to_statement_list (call_stmt, &ctor_statements);
1312 }
1313
1314 /* Process all enqueued object decls. */
1315 if (deferred_static_decls)
1316 {
1317 size_t i;
1318 tree obj;
1319 FOR_EACH_VEC_ELT (tree, deferred_static_decls, i, obj)
1320 {
1321 gcc_assert (DECL_P (obj));
1322
1323 if (mf_marked_p (obj))
1324 continue;
1325
1326 /* Omit registration for static unaddressed objects. NB:
1327 Perform registration for non-static objects regardless of
1328 TREE_USED or TREE_ADDRESSABLE, because they may be used
1329 from other compilation units. */
1330 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1331 continue;
1332
1333 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1334 {
1335 warning (OPT_Wmudflap,
1336 "mudflap cannot track unknown size extern %qE",
1337 DECL_NAME (obj));
1338 continue;
1339 }
1340
1341 mudflap_register_call (obj,
1342 size_in_bytes (TREE_TYPE (obj)),
1343 mf_varname_tree (obj));
1344 }
1345
1346 VEC_truncate (tree, deferred_static_decls, 0);
1347 }
1348
1349 /* Append all the enqueued registration calls. */
1350 if (enqueued_call_stmt_chain)
1351 {
1352 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1353 enqueued_call_stmt_chain = NULL_TREE;
1354 }
1355
1356 cgraph_build_static_cdtor ('I', ctor_statements,
1357 MAX_RESERVED_INIT_PRIORITY-1);
1358 }
1359
1360
1361 static bool
1362 gate_mudflap (void)
1363 {
1364 return flag_mudflap != 0;
1365 }
1366
1367 struct gimple_opt_pass pass_mudflap_1 =
1368 {
1369 {
1370 GIMPLE_PASS,
1371 "mudflap1", /* name */
1372 gate_mudflap, /* gate */
1373 execute_mudflap_function_decls, /* execute */
1374 NULL, /* sub */
1375 NULL, /* next */
1376 0, /* static_pass_number */
1377 TV_NONE, /* tv_id */
1378 PROP_gimple_any, /* properties_required */
1379 0, /* properties_provided */
1380 0, /* properties_destroyed */
1381 0, /* todo_flags_start */
1382 0 /* todo_flags_finish */
1383 }
1384 };
1385
1386 struct gimple_opt_pass pass_mudflap_2 =
1387 {
1388 {
1389 GIMPLE_PASS,
1390 "mudflap2", /* name */
1391 gate_mudflap, /* gate */
1392 execute_mudflap_function_ops, /* execute */
1393 NULL, /* sub */
1394 NULL, /* next */
1395 0, /* static_pass_number */
1396 TV_NONE, /* tv_id */
1397 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1398 0, /* properties_provided */
1399 0, /* properties_destroyed */
1400 0, /* todo_flags_start */
1401 TODO_verify_flow | TODO_verify_stmts
1402 | TODO_update_ssa /* todo_flags_finish */
1403 }
1404 };
1405
1406 #include "gt-tree-mudflap.h"