re PR middle-end/45738 (ICE: tree check: expected var_decl, have debug_expr_decl...
[gcc.git] / gcc / varpool.c
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "cgraph.h"
28 #include "langhooks.h"
29 #include "diagnostic-core.h"
30 #include "hashtab.h"
31 #include "ggc.h"
32 #include "timevar.h"
33 #include "debug.h"
34 #include "target.h"
35 #include "output.h"
36 #include "gimple.h"
37 #include "tree-flow.h"
38 #include "flags.h"
39
40 /* This file contains basic routines manipulating variable pool.
41
42 Varpool acts as interface in between the front-end and middle-end
43 and drives the decision process on what variables and when are
44 going to be compiled.
45
46 The varpool nodes are allocated lazily for declarations
47 either by frontend or at callgraph construction time.
48 All variables supposed to be output into final file needs to be
49 explicitly marked by frontend via VARPOOL_FINALIZE_DECL function. */
50
51 /* Hash table used to convert declarations into nodes. */
52 static GTY((param_is (struct varpool_node))) htab_t varpool_hash;
53
54 /* The linked list of cgraph varpool nodes.
55 Linked via node->next pointer. */
56 struct varpool_node *varpool_nodes;
57
58 /* Queue of cgraph nodes scheduled to be lowered and output.
59 The queue is maintained via mark_needed_node, linked via node->next_needed
60 pointer.
61
62 LAST_NEEDED_NODE points to the end of queue, so it can be
63 maintained in forward order. GTY is needed to make it friendly to
64 PCH.
65
66 During compilation we construct the queue of needed variables
67 twice: first time it is during cgraph construction, second time it is at the
68 end of compilation in VARPOOL_REMOVE_UNREFERENCED_DECLS so we can avoid
69 optimized out variables being output.
70
71 Each variable is thus first analyzed and then later possibly output.
72 FIRST_UNANALYZED_NODE points to first node in queue that was not analyzed
73 yet and is moved via VARPOOL_ANALYZE_PENDING_DECLS. */
74
75 struct varpool_node *varpool_nodes_queue;
76 static GTY(()) struct varpool_node *varpool_last_needed_node;
77 static GTY(()) struct varpool_node *varpool_first_unanalyzed_node;
78
79 /* Lists all assembled variables to be sent to debugger output later on. */
80 static GTY(()) struct varpool_node *varpool_assembled_nodes_queue;
81
82 /* Return name of the node used in debug output. */
83 const char *
84 varpool_node_name (struct varpool_node *node)
85 {
86 return lang_hooks.decl_printable_name (node->decl, 2);
87 }
88
89 /* Returns a hash code for P. */
90 static hashval_t
91 hash_varpool_node (const void *p)
92 {
93 const struct varpool_node *n = (const struct varpool_node *) p;
94 return (hashval_t) DECL_UID (n->decl);
95 }
96
97 /* Returns nonzero if P1 and P2 are equal. */
98 static int
99 eq_varpool_node (const void *p1, const void *p2)
100 {
101 const struct varpool_node *n1 =
102 (const struct varpool_node *) p1;
103 const struct varpool_node *n2 =
104 (const struct varpool_node *) p2;
105 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
106 }
107
108 /* Return varpool node assigned to DECL without creating new one. */
109 struct varpool_node *
110 varpool_get_node (tree decl)
111 {
112 struct varpool_node key, **slot;
113
114 gcc_assert (TREE_CODE (decl) == VAR_DECL
115 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)));
116
117 if (!varpool_hash)
118 return NULL;
119 key.decl = decl;
120 slot = (struct varpool_node **)
121 htab_find_slot (varpool_hash, &key, NO_INSERT);
122 if (!slot)
123 return NULL;
124 return *slot;
125 }
126
127 /* Return varpool node assigned to DECL. Create new one when needed. */
128 struct varpool_node *
129 varpool_node (tree decl)
130 {
131 struct varpool_node key, *node, **slot;
132
133 gcc_assert (TREE_CODE (decl) == VAR_DECL
134 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)));
135
136 if (!varpool_hash)
137 varpool_hash = htab_create_ggc (10, hash_varpool_node,
138 eq_varpool_node, NULL);
139 key.decl = decl;
140 slot = (struct varpool_node **)
141 htab_find_slot (varpool_hash, &key, INSERT);
142 if (*slot)
143 return *slot;
144 node = ggc_alloc_cleared_varpool_node ();
145 node->decl = decl;
146 node->order = cgraph_order++;
147 node->next = varpool_nodes;
148 ipa_empty_ref_list (&node->ref_list);
149 if (varpool_nodes)
150 varpool_nodes->prev = node;
151 varpool_nodes = node;
152 *slot = node;
153 return node;
154 }
155
156 /* Remove node from the varpool. */
157 void
158 varpool_remove_node (struct varpool_node *node)
159 {
160 void **slot;
161 slot = htab_find_slot (varpool_hash, node, NO_INSERT);
162 gcc_assert (*slot == node);
163 htab_clear_slot (varpool_hash, slot);
164 gcc_assert (!varpool_assembled_nodes_queue);
165 if (!node->alias)
166 while (node->extra_name)
167 varpool_remove_node (node->extra_name);
168 if (node->next)
169 node->next->prev = node->prev;
170 if (node->prev)
171 node->prev->next = node->next;
172 else
173 {
174 if (node->alias && node->extra_name)
175 {
176 gcc_assert (node->extra_name->extra_name == node);
177 node->extra_name->extra_name = node->next;
178 }
179 else
180 {
181 gcc_assert (varpool_nodes == node);
182 varpool_nodes = node->next;
183 }
184 }
185 if (varpool_first_unanalyzed_node == node)
186 varpool_first_unanalyzed_node = node->next_needed;
187 if (node->next_needed)
188 node->next_needed->prev_needed = node->prev_needed;
189 else if (node->prev_needed)
190 {
191 gcc_assert (varpool_last_needed_node);
192 varpool_last_needed_node = node->prev_needed;
193 }
194 if (node->prev_needed)
195 node->prev_needed->next_needed = node->next_needed;
196 else if (node->next_needed)
197 {
198 gcc_assert (varpool_nodes_queue == node);
199 varpool_nodes_queue = node->next_needed;
200 }
201 if (node->same_comdat_group)
202 {
203 struct varpool_node *prev;
204 for (prev = node->same_comdat_group;
205 prev->same_comdat_group != node;
206 prev = prev->same_comdat_group)
207 ;
208 if (node->same_comdat_group == prev)
209 prev->same_comdat_group = NULL;
210 else
211 prev->same_comdat_group = node->same_comdat_group;
212 node->same_comdat_group = NULL;
213 }
214 ipa_remove_all_references (&node->ref_list);
215 ipa_remove_all_refering (&node->ref_list);
216 ggc_free (node);
217 }
218
219 /* Dump given cgraph node. */
220 void
221 dump_varpool_node (FILE *f, struct varpool_node *node)
222 {
223 fprintf (f, "%s:", varpool_node_name (node));
224 fprintf (f, " availability:%s",
225 cgraph_function_flags_ready
226 ? cgraph_availability_names[cgraph_variable_initializer_availability (node)]
227 : "not-ready");
228 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
229 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
230 if (DECL_INITIAL (node->decl))
231 fprintf (f, " initialized");
232 if (TREE_ASM_WRITTEN (node->decl))
233 fprintf (f, " (asm written)");
234 if (node->needed)
235 fprintf (f, " needed");
236 if (node->analyzed)
237 fprintf (f, " analyzed");
238 if (node->finalized)
239 fprintf (f, " finalized");
240 if (node->output)
241 fprintf (f, " output");
242 if (node->externally_visible)
243 fprintf (f, " externally_visible");
244 if (node->in_other_partition)
245 fprintf (f, " in_other_partition");
246 else if (node->used_from_other_partition)
247 fprintf (f, " used_from_other_partition");
248 fprintf (f, "\n");
249 fprintf (f, " References: ");
250 ipa_dump_references (f, &node->ref_list);
251 fprintf (f, " Refering this var: ");
252 ipa_dump_refering (f, &node->ref_list);
253 }
254
255 /* Dump the variable pool. */
256 void
257 dump_varpool (FILE *f)
258 {
259 struct varpool_node *node;
260
261 fprintf (f, "variable pool:\n\n");
262 for (node = varpool_nodes; node; node = node->next)
263 dump_varpool_node (f, node);
264 }
265
266 /* Dump the variable pool to stderr. */
267
268 DEBUG_FUNCTION void
269 debug_varpool (void)
270 {
271 dump_varpool (stderr);
272 }
273
274 /* Given an assembler name, lookup node. */
275 struct varpool_node *
276 varpool_node_for_asm (tree asmname)
277 {
278 struct varpool_node *node;
279
280 for (node = varpool_nodes; node ; node = node->next)
281 if (decl_assembler_name_equal (node->decl, asmname))
282 return node;
283
284 return NULL;
285 }
286
287 /* Helper function for finalization code - add node into lists so it will
288 be analyzed and compiled. */
289 static void
290 varpool_enqueue_needed_node (struct varpool_node *node)
291 {
292 if (varpool_last_needed_node)
293 {
294 varpool_last_needed_node->next_needed = node;
295 node->prev_needed = varpool_last_needed_node;
296 }
297 varpool_last_needed_node = node;
298 node->next_needed = NULL;
299 if (!varpool_nodes_queue)
300 varpool_nodes_queue = node;
301 if (!varpool_first_unanalyzed_node)
302 varpool_first_unanalyzed_node = node;
303 notice_global_symbol (node->decl);
304 }
305
306 /* Notify finalize_compilation_unit that given node is reachable
307 or needed. */
308 void
309 varpool_mark_needed_node (struct varpool_node *node)
310 {
311 if (node->alias && node->extra_name)
312 node = node->extra_name;
313 if (!node->needed && node->finalized
314 && !TREE_ASM_WRITTEN (node->decl))
315 varpool_enqueue_needed_node (node);
316 node->needed = 1;
317 }
318
319 /* Reset the queue of needed nodes. */
320 void
321 varpool_reset_queue (void)
322 {
323 varpool_last_needed_node = NULL;
324 varpool_nodes_queue = NULL;
325 varpool_first_unanalyzed_node = NULL;
326 }
327
328 /* Determine if variable DECL is needed. That is, visible to something
329 either outside this translation unit, something magic in the system
330 configury */
331 bool
332 decide_is_variable_needed (struct varpool_node *node, tree decl)
333 {
334 if (node->used_from_other_partition)
335 return true;
336 /* If the user told us it is used, then it must be so. */
337 if ((node->externally_visible && !DECL_COMDAT (decl))
338 || node->force_output)
339 return true;
340
341 /* Externally visible variables must be output. The exception is
342 COMDAT variables that must be output only when they are needed. */
343 if (TREE_PUBLIC (decl)
344 && !flag_whole_program
345 && !flag_lto
346 && !flag_whopr
347 && !DECL_COMDAT (decl)
348 && !DECL_EXTERNAL (decl))
349 return true;
350
351 /* When not reordering top level variables, we have to assume that
352 we are going to keep everything. */
353 if (flag_toplevel_reorder)
354 return false;
355
356 /* We want to emit COMDAT variables only when absolutely necessary. */
357 if (DECL_COMDAT (decl))
358 return false;
359 return true;
360 }
361
362 /* Return if DECL is constant and its initial value is known (so we can do
363 constant folding using DECL_INITIAL (decl)). */
364
365 bool
366 const_value_known_p (tree decl)
367 {
368 struct varpool_node *vnode;
369
370 if (TREE_CODE (decl) != VAR_DECL
371 &&TREE_CODE (decl) != CONST_DECL)
372 return false;
373
374 if (TREE_CODE (decl) == CONST_DECL
375 || DECL_IN_CONSTANT_POOL (decl))
376 return true;
377
378 gcc_assert (TREE_CODE (decl) == VAR_DECL);
379
380 if (!TREE_READONLY (decl))
381 return false;
382
383 /* Gimplifier takes away constructors of local vars */
384 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
385 return DECL_INITIAL (decl) != NULL;
386
387 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
388
389 /* In WHOPR mode we can put variable into one partition
390 and make it external in the other partition. In this
391 case we still know the value, but it can't be determined
392 from DECL flags. For this reason we keep const_value_known
393 flag in varpool nodes. */
394 if ((vnode = varpool_get_node (decl))
395 && vnode->const_value_known)
396 return true;
397
398 /* Variables declared 'const' without an initializer
399 have zero as the initializer if they may not be
400 overridden at link or run time. */
401 if (!DECL_INITIAL (decl)
402 && (DECL_EXTERNAL (decl)
403 || DECL_REPLACEABLE_P (decl)))
404 return false;
405
406 /* Variables declared `const' with an initializer are considered
407 to not be overwritable with different initializer by default.
408
409 ??? Previously we behaved so for scalar variables but not for array
410 accesses. */
411 return true;
412 }
413
414 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
415 middle end to output the variable to asm file, if needed or externally
416 visible. */
417 void
418 varpool_finalize_decl (tree decl)
419 {
420 struct varpool_node *node = varpool_node (decl);
421
422 gcc_assert (TREE_STATIC (decl));
423
424 /* The first declaration of a variable that comes through this function
425 decides whether it is global (in C, has external linkage)
426 or local (in C, has internal linkage). So do nothing more
427 if this function has already run. */
428 if (node->finalized)
429 {
430 if (cgraph_global_info_ready)
431 varpool_assemble_pending_decls ();
432 return;
433 }
434 if (node->needed)
435 varpool_enqueue_needed_node (node);
436 node->finalized = true;
437 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl))
438 node->force_output = true;
439
440 if (decide_is_variable_needed (node, decl))
441 varpool_mark_needed_node (node);
442 /* Since we reclaim unreachable nodes at the end of every language
443 level unit, we need to be conservative about possible entry points
444 there. */
445 else if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
446 varpool_mark_needed_node (node);
447 node->const_value_known |= const_value_known_p (node->decl);
448 if (cgraph_global_info_ready)
449 varpool_assemble_pending_decls ();
450 }
451
452 /* Return variable availability. See cgraph.h for description of individual
453 return values. */
454 enum availability
455 cgraph_variable_initializer_availability (struct varpool_node *node)
456 {
457 gcc_assert (cgraph_function_flags_ready);
458 if (!node->finalized)
459 return AVAIL_NOT_AVAILABLE;
460 if (!TREE_PUBLIC (node->decl))
461 return AVAIL_AVAILABLE;
462 /* If the variable can be overwritten, return OVERWRITABLE. Takes
463 care of at least two notable extensions - the COMDAT variables
464 used to share template instantiations in C++. */
465 if (!(*targetm.binds_local_p) (node->decl) && !DECL_COMDAT (node->decl))
466 return AVAIL_OVERWRITABLE;
467 return AVAIL_AVAILABLE;
468 }
469
470 /* Walk the decls we marked as necessary and see if they reference new
471 variables or functions and add them into the worklists. */
472 bool
473 varpool_analyze_pending_decls (void)
474 {
475 bool changed = false;
476
477 timevar_push (TV_VARPOOL);
478 while (varpool_first_unanalyzed_node)
479 {
480 struct varpool_node *node = varpool_first_unanalyzed_node, *next;
481 tree decl = node->decl;
482 bool analyzed = node->analyzed;
483
484 varpool_first_unanalyzed_node->analyzed = true;
485
486 varpool_first_unanalyzed_node = varpool_first_unanalyzed_node->next_needed;
487
488 /* When reading back varpool at LTO time, we re-construct the queue in order
489 to have "needed" list right by inserting all needed nodes into varpool.
490 We however don't want to re-analyze already analyzed nodes. */
491 if (!analyzed)
492 {
493 gcc_assert (!in_lto_p || cgraph_function_flags_ready);
494 /* Compute the alignment early so function body expanders are
495 already informed about increased alignment. */
496 align_variable (decl, 0);
497 }
498 if (DECL_INITIAL (decl))
499 record_references_in_initializer (decl, analyzed);
500 if (node->same_comdat_group)
501 {
502 for (next = node->same_comdat_group;
503 next != node;
504 next = next->same_comdat_group)
505 varpool_mark_needed_node (next);
506 }
507 changed = true;
508 }
509 timevar_pop (TV_VARPOOL);
510 return changed;
511 }
512
513 /* Output one variable, if necessary. Return whether we output it. */
514 bool
515 varpool_assemble_decl (struct varpool_node *node)
516 {
517 tree decl = node->decl;
518
519 if (!TREE_ASM_WRITTEN (decl)
520 && !node->alias
521 && !node->in_other_partition
522 && !DECL_EXTERNAL (decl)
523 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
524 {
525 assemble_variable (decl, 0, 1, 0);
526 if (TREE_ASM_WRITTEN (decl))
527 {
528 struct varpool_node *alias;
529
530 node->next_needed = varpool_assembled_nodes_queue;
531 node->prev_needed = NULL;
532 if (varpool_assembled_nodes_queue)
533 varpool_assembled_nodes_queue->prev_needed = node;
534 varpool_assembled_nodes_queue = node;
535 node->finalized = 1;
536
537 /* Also emit any extra name aliases. */
538 for (alias = node->extra_name; alias; alias = alias->next)
539 {
540 /* Update linkage fields in case they've changed. */
541 DECL_WEAK (alias->decl) = DECL_WEAK (decl);
542 TREE_PUBLIC (alias->decl) = TREE_PUBLIC (decl);
543 DECL_VISIBILITY (alias->decl) = DECL_VISIBILITY (decl);
544 assemble_alias (alias->decl, DECL_ASSEMBLER_NAME (decl));
545 }
546
547 return true;
548 }
549 }
550
551 return false;
552 }
553
554 /* Optimization of function bodies might've rendered some variables as
555 unnecessary so we want to avoid these from being compiled.
556
557 This is done by pruning the queue and keeping only the variables that
558 really appear needed (ie they are either externally visible or referenced
559 by compiled function). Re-doing the reachability analysis on variables
560 brings back the remaining variables referenced by these. */
561 void
562 varpool_remove_unreferenced_decls (void)
563 {
564 struct varpool_node *next, *node = varpool_nodes_queue;
565
566 varpool_reset_queue ();
567
568 if (seen_error ())
569 return;
570
571 while (node)
572 {
573 tree decl = node->decl;
574 next = node->next_needed;
575 node->needed = 0;
576
577 if (node->finalized
578 && (decide_is_variable_needed (node, decl)
579 /* ??? Cgraph does not yet rule the world with an iron hand,
580 and does not control the emission of debug information.
581 After a variable has its DECL_RTL set, we must assume that
582 it may be referenced by the debug information, and we can
583 no longer elide it. */
584 || DECL_RTL_SET_P (decl)))
585 varpool_mark_needed_node (node);
586
587 node = next;
588 }
589 /* Make sure we mark alias targets as used targets. */
590 finish_aliases_1 ();
591 varpool_analyze_pending_decls ();
592 }
593
594 /* Output all variables enqueued to be assembled. */
595 bool
596 varpool_assemble_pending_decls (void)
597 {
598 bool changed = false;
599
600 if (seen_error ())
601 return false;
602
603 timevar_push (TV_VAROUT);
604 /* EH might mark decls as needed during expansion. This should be safe since
605 we don't create references to new function, but it should not be used
606 elsewhere. */
607 varpool_analyze_pending_decls ();
608
609 while (varpool_nodes_queue)
610 {
611 struct varpool_node *node = varpool_nodes_queue;
612
613 varpool_nodes_queue = varpool_nodes_queue->next_needed;
614 if (varpool_assemble_decl (node))
615 changed = true;
616 else
617 {
618 node->prev_needed = NULL;
619 node->next_needed = NULL;
620 }
621 }
622 /* varpool_nodes_queue is now empty, clear the pointer to the last element
623 in the queue. */
624 varpool_last_needed_node = NULL;
625 timevar_pop (TV_VAROUT);
626 return changed;
627 }
628
629 /* Remove all elements from the queue so we can re-use it for debug output. */
630 void
631 varpool_empty_needed_queue (void)
632 {
633 /* EH might mark decls as needed during expansion. This should be safe since
634 we don't create references to new function, but it should not be used
635 elsewhere. */
636 varpool_analyze_pending_decls ();
637
638 while (varpool_nodes_queue)
639 {
640 struct varpool_node *node = varpool_nodes_queue;
641 varpool_nodes_queue = varpool_nodes_queue->next_needed;
642 node->next_needed = NULL;
643 node->prev_needed = NULL;
644 }
645 /* varpool_nodes_queue is now empty, clear the pointer to the last element
646 in the queue. */
647 varpool_last_needed_node = NULL;
648 }
649
650 /* Create a new global variable of type TYPE. */
651 tree
652 add_new_static_var (tree type)
653 {
654 tree new_decl;
655 struct varpool_node *new_node;
656
657 new_decl = create_tmp_var (type, NULL);
658 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
659 TREE_READONLY (new_decl) = 0;
660 TREE_STATIC (new_decl) = 1;
661 TREE_USED (new_decl) = 1;
662 DECL_CONTEXT (new_decl) = NULL_TREE;
663 DECL_ABSTRACT (new_decl) = 0;
664 lang_hooks.dup_lang_specific_decl (new_decl);
665 create_var_ann (new_decl);
666 new_node = varpool_node (new_decl);
667 varpool_mark_needed_node (new_node);
668 add_referenced_var (new_decl);
669 varpool_finalize_decl (new_decl);
670
671 return new_node->decl;
672 }
673
674 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
675 Extra name aliases are output whenever DECL is output. */
676
677 bool
678 varpool_extra_name_alias (tree alias, tree decl)
679 {
680 struct varpool_node key, *alias_node, *decl_node, **slot;
681
682 #ifndef ASM_OUTPUT_DEF
683 /* If aliases aren't supported by the assembler, fail. */
684 return false;
685 #endif
686
687 gcc_assert (TREE_CODE (decl) == VAR_DECL);
688 gcc_assert (TREE_CODE (alias) == VAR_DECL);
689 /* Make sure the hash table has been created. */
690 decl_node = varpool_node (decl);
691
692 key.decl = alias;
693
694 slot = (struct varpool_node **) htab_find_slot (varpool_hash, &key, INSERT);
695
696 /* If the varpool_node has been already created, fail. */
697 if (*slot)
698 return false;
699
700 alias_node = ggc_alloc_cleared_varpool_node ();
701 alias_node->decl = alias;
702 alias_node->alias = 1;
703 alias_node->extra_name = decl_node;
704 alias_node->next = decl_node->extra_name;
705 ipa_empty_ref_list (&alias_node->ref_list);
706 if (decl_node->extra_name)
707 decl_node->extra_name->prev = alias_node;
708 decl_node->extra_name = alias_node;
709 *slot = alias_node;
710 return true;
711 }
712
713 #include "gt-varpool.h"