foldconst-1.c: New testcase.
[gcc.git] / gcc / varpool.c
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "cgraph.h"
28 #include "langhooks.h"
29 #include "diagnostic-core.h"
30 #include "hashtab.h"
31 #include "ggc.h"
32 #include "timevar.h"
33 #include "debug.h"
34 #include "target.h"
35 #include "output.h"
36 #include "gimple.h"
37 #include "tree-flow.h"
38 #include "flags.h"
39
40 /* This file contains basic routines manipulating variable pool.
41
42 Varpool acts as interface in between the front-end and middle-end
43 and drives the decision process on what variables and when are
44 going to be compiled.
45
46 The varpool nodes are allocated lazily for declarations
47 either by frontend or at callgraph construction time.
48 All variables supposed to be output into final file needs to be
49 explicitly marked by frontend via VARPOOL_FINALIZE_DECL function. */
50
51 /* Hash table used to convert declarations into nodes. */
52 static GTY((param_is (struct varpool_node))) htab_t varpool_hash;
53
54 /* The linked list of cgraph varpool nodes.
55 Linked via node->next pointer. */
56 struct varpool_node *varpool_nodes;
57
58 /* Queue of cgraph nodes scheduled to be lowered and output.
59 The queue is maintained via mark_needed_node, linked via node->next_needed
60 pointer.
61
62 LAST_NEEDED_NODE points to the end of queue, so it can be
63 maintained in forward order. GTY is needed to make it friendly to
64 PCH.
65
66 During compilation we construct the queue of needed variables
67 twice: first time it is during cgraph construction, second time it is at the
68 end of compilation in VARPOOL_REMOVE_UNREFERENCED_DECLS so we can avoid
69 optimized out variables being output.
70
71 Each variable is thus first analyzed and then later possibly output.
72 FIRST_UNANALYZED_NODE points to first node in queue that was not analyzed
73 yet and is moved via VARPOOL_ANALYZE_PENDING_DECLS. */
74
75 struct varpool_node *varpool_nodes_queue;
76 static GTY(()) struct varpool_node *varpool_last_needed_node;
77 static GTY(()) struct varpool_node *varpool_first_unanalyzed_node;
78
79 /* Lists all assembled variables to be sent to debugger output later on. */
80 static GTY(()) struct varpool_node *varpool_assembled_nodes_queue;
81
82 /* Return name of the node used in debug output. */
83 const char *
84 varpool_node_name (struct varpool_node *node)
85 {
86 return lang_hooks.decl_printable_name (node->decl, 2);
87 }
88
89 /* Returns a hash code for P. */
90 static hashval_t
91 hash_varpool_node (const void *p)
92 {
93 const struct varpool_node *n = (const struct varpool_node *) p;
94 return (hashval_t) DECL_UID (n->decl);
95 }
96
97 /* Returns nonzero if P1 and P2 are equal. */
98 static int
99 eq_varpool_node (const void *p1, const void *p2)
100 {
101 const struct varpool_node *n1 =
102 (const struct varpool_node *) p1;
103 const struct varpool_node *n2 =
104 (const struct varpool_node *) p2;
105 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
106 }
107
108 /* Return varpool node assigned to DECL without creating new one. */
109 struct varpool_node *
110 varpool_get_node (tree decl)
111 {
112 struct varpool_node key, **slot;
113
114 gcc_assert (TREE_CODE (decl) == VAR_DECL
115 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)));
116
117 if (!varpool_hash)
118 return NULL;
119 key.decl = decl;
120 slot = (struct varpool_node **)
121 htab_find_slot (varpool_hash, &key, NO_INSERT);
122 if (!slot)
123 return NULL;
124 return *slot;
125 }
126
127 /* Return varpool node assigned to DECL. Create new one when needed. */
128 struct varpool_node *
129 varpool_node (tree decl)
130 {
131 struct varpool_node key, *node, **slot;
132
133 gcc_assert (TREE_CODE (decl) == VAR_DECL
134 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)));
135
136 if (!varpool_hash)
137 varpool_hash = htab_create_ggc (10, hash_varpool_node,
138 eq_varpool_node, NULL);
139 key.decl = decl;
140 slot = (struct varpool_node **)
141 htab_find_slot (varpool_hash, &key, INSERT);
142 if (*slot)
143 return *slot;
144 node = ggc_alloc_cleared_varpool_node ();
145 node->decl = decl;
146 node->order = cgraph_order++;
147 node->next = varpool_nodes;
148 ipa_empty_ref_list (&node->ref_list);
149 if (varpool_nodes)
150 varpool_nodes->prev = node;
151 varpool_nodes = node;
152 *slot = node;
153 return node;
154 }
155
156 /* Remove node from the varpool. */
157 void
158 varpool_remove_node (struct varpool_node *node)
159 {
160 void **slot;
161 slot = htab_find_slot (varpool_hash, node, NO_INSERT);
162 gcc_assert (*slot == node);
163 htab_clear_slot (varpool_hash, slot);
164 gcc_assert (!varpool_assembled_nodes_queue);
165 if (!node->alias)
166 while (node->extra_name)
167 varpool_remove_node (node->extra_name);
168 if (node->next)
169 node->next->prev = node->prev;
170 if (node->prev)
171 node->prev->next = node->next;
172 else
173 {
174 if (node->alias && node->extra_name)
175 {
176 gcc_assert (node->extra_name->extra_name == node);
177 node->extra_name->extra_name = node->next;
178 }
179 else
180 {
181 gcc_assert (varpool_nodes == node);
182 varpool_nodes = node->next;
183 }
184 }
185 if (varpool_first_unanalyzed_node == node)
186 varpool_first_unanalyzed_node = node->next_needed;
187 if (node->next_needed)
188 node->next_needed->prev_needed = node->prev_needed;
189 else if (node->prev_needed)
190 {
191 gcc_assert (varpool_last_needed_node);
192 varpool_last_needed_node = node->prev_needed;
193 }
194 if (node->prev_needed)
195 node->prev_needed->next_needed = node->next_needed;
196 else if (node->next_needed)
197 {
198 gcc_assert (varpool_nodes_queue == node);
199 varpool_nodes_queue = node->next_needed;
200 }
201 if (node->same_comdat_group)
202 {
203 struct varpool_node *prev;
204 for (prev = node->same_comdat_group;
205 prev->same_comdat_group != node;
206 prev = prev->same_comdat_group)
207 ;
208 if (node->same_comdat_group == prev)
209 prev->same_comdat_group = NULL;
210 else
211 prev->same_comdat_group = node->same_comdat_group;
212 node->same_comdat_group = NULL;
213 }
214 ipa_remove_all_references (&node->ref_list);
215 ipa_remove_all_refering (&node->ref_list);
216 ggc_free (node);
217 }
218
219 /* Dump given cgraph node. */
220 void
221 dump_varpool_node (FILE *f, struct varpool_node *node)
222 {
223 fprintf (f, "%s:", varpool_node_name (node));
224 fprintf (f, " availability:%s",
225 cgraph_function_flags_ready
226 ? cgraph_availability_names[cgraph_variable_initializer_availability (node)]
227 : "not-ready");
228 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
229 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
230 if (DECL_INITIAL (node->decl))
231 fprintf (f, " initialized");
232 if (TREE_ASM_WRITTEN (node->decl))
233 fprintf (f, " (asm written)");
234 if (node->needed)
235 fprintf (f, " needed");
236 if (node->analyzed)
237 fprintf (f, " analyzed");
238 if (node->finalized)
239 fprintf (f, " finalized");
240 if (node->output)
241 fprintf (f, " output");
242 if (node->externally_visible)
243 fprintf (f, " externally_visible");
244 if (node->in_other_partition)
245 fprintf (f, " in_other_partition");
246 else if (node->used_from_other_partition)
247 fprintf (f, " used_from_other_partition");
248 fprintf (f, "\n");
249 fprintf (f, " References: ");
250 ipa_dump_references (f, &node->ref_list);
251 fprintf (f, " Refering this var: ");
252 ipa_dump_refering (f, &node->ref_list);
253 }
254
255 /* Dump the variable pool. */
256 void
257 dump_varpool (FILE *f)
258 {
259 struct varpool_node *node;
260
261 fprintf (f, "variable pool:\n\n");
262 for (node = varpool_nodes; node; node = node->next)
263 dump_varpool_node (f, node);
264 }
265
266 /* Dump the variable pool to stderr. */
267
268 DEBUG_FUNCTION void
269 debug_varpool (void)
270 {
271 dump_varpool (stderr);
272 }
273
274 /* Given an assembler name, lookup node. */
275 struct varpool_node *
276 varpool_node_for_asm (tree asmname)
277 {
278 struct varpool_node *node;
279
280 for (node = varpool_nodes; node ; node = node->next)
281 if (decl_assembler_name_equal (node->decl, asmname))
282 return node;
283
284 return NULL;
285 }
286
287 /* Helper function for finalization code - add node into lists so it will
288 be analyzed and compiled. */
289 static void
290 varpool_enqueue_needed_node (struct varpool_node *node)
291 {
292 if (varpool_last_needed_node)
293 {
294 varpool_last_needed_node->next_needed = node;
295 node->prev_needed = varpool_last_needed_node;
296 }
297 varpool_last_needed_node = node;
298 node->next_needed = NULL;
299 if (!varpool_nodes_queue)
300 varpool_nodes_queue = node;
301 if (!varpool_first_unanalyzed_node)
302 varpool_first_unanalyzed_node = node;
303 notice_global_symbol (node->decl);
304 }
305
306 /* Notify finalize_compilation_unit that given node is reachable
307 or needed. */
308 void
309 varpool_mark_needed_node (struct varpool_node *node)
310 {
311 if (node->alias && node->extra_name)
312 node = node->extra_name;
313 if (!node->needed && node->finalized
314 && !TREE_ASM_WRITTEN (node->decl))
315 varpool_enqueue_needed_node (node);
316 node->needed = 1;
317 }
318
319 /* Reset the queue of needed nodes. */
320 void
321 varpool_reset_queue (void)
322 {
323 varpool_last_needed_node = NULL;
324 varpool_nodes_queue = NULL;
325 varpool_first_unanalyzed_node = NULL;
326 }
327
328 /* Determine if variable DECL is needed. That is, visible to something
329 either outside this translation unit, something magic in the system
330 configury */
331 bool
332 decide_is_variable_needed (struct varpool_node *node, tree decl)
333 {
334 if (node->used_from_other_partition)
335 return true;
336 /* If the user told us it is used, then it must be so. */
337 if ((node->externally_visible && !DECL_COMDAT (decl))
338 || node->force_output)
339 return true;
340
341 /* Externally visible variables must be output. The exception is
342 COMDAT variables that must be output only when they are needed. */
343 if (TREE_PUBLIC (decl)
344 && !flag_whole_program
345 && !flag_lto
346 && !flag_whopr
347 && !DECL_COMDAT (decl)
348 && !DECL_EXTERNAL (decl))
349 return true;
350
351 /* When not reordering top level variables, we have to assume that
352 we are going to keep everything. */
353 if (flag_toplevel_reorder)
354 return false;
355
356 /* We want to emit COMDAT variables only when absolutely necessary. */
357 if (DECL_COMDAT (decl))
358 return false;
359 return true;
360 }
361
362 /* Return if NODE is constant and its initial value is known (so we can do
363 constant folding). The decision depends on whole program decisions
364 and can not be recomputed at ltrans stage for variables from other
365 partitions. For this reason the new value should be always combined
366 with the previous knowledge. */
367
368 bool
369 varpool_decide_const_value_known (struct varpool_node *node)
370 {
371 tree decl = node->decl;
372
373 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
374 gcc_assert (TREE_CODE (decl) == VAR_DECL);
375 if (!TREE_READONLY (decl))
376 return false;
377 /* Variables declared 'const' without an initializer
378 have zero as the initializer if they may not be
379 overridden at link or run time. */
380 if (!DECL_INITIAL (decl)
381 && (DECL_EXTERNAL (decl)
382 || DECL_REPLACEABLE_P (decl)))
383 return false;
384
385 /* Variables declared `const' with an initializer are considered
386 to not be overwritable with different initializer by default.
387
388 ??? Previously we behaved so for scalar variables but not for array
389 accesses. */
390 return true;
391 }
392
393 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
394 middle end to output the variable to asm file, if needed or externally
395 visible. */
396 void
397 varpool_finalize_decl (tree decl)
398 {
399 struct varpool_node *node = varpool_node (decl);
400
401 gcc_assert (TREE_STATIC (decl));
402
403 /* The first declaration of a variable that comes through this function
404 decides whether it is global (in C, has external linkage)
405 or local (in C, has internal linkage). So do nothing more
406 if this function has already run. */
407 if (node->finalized)
408 {
409 if (cgraph_global_info_ready)
410 varpool_assemble_pending_decls ();
411 return;
412 }
413 if (node->needed)
414 varpool_enqueue_needed_node (node);
415 node->finalized = true;
416 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl))
417 node->force_output = true;
418
419 if (decide_is_variable_needed (node, decl))
420 varpool_mark_needed_node (node);
421 /* Since we reclaim unreachable nodes at the end of every language
422 level unit, we need to be conservative about possible entry points
423 there. */
424 else if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
425 varpool_mark_needed_node (node);
426 node->const_value_known |= varpool_decide_const_value_known (node);
427 if (cgraph_global_info_ready)
428 varpool_assemble_pending_decls ();
429 }
430
431 /* Return variable availability. See cgraph.h for description of individual
432 return values. */
433 enum availability
434 cgraph_variable_initializer_availability (struct varpool_node *node)
435 {
436 gcc_assert (cgraph_function_flags_ready);
437 if (!node->finalized)
438 return AVAIL_NOT_AVAILABLE;
439 if (!TREE_PUBLIC (node->decl))
440 return AVAIL_AVAILABLE;
441 /* If the variable can be overwritten, return OVERWRITABLE. Takes
442 care of at least two notable extensions - the COMDAT variables
443 used to share template instantiations in C++. */
444 if (!(*targetm.binds_local_p) (node->decl) && !DECL_COMDAT (node->decl))
445 return AVAIL_OVERWRITABLE;
446 return AVAIL_AVAILABLE;
447 }
448
449 /* Walk the decls we marked as necessary and see if they reference new
450 variables or functions and add them into the worklists. */
451 bool
452 varpool_analyze_pending_decls (void)
453 {
454 bool changed = false;
455
456 timevar_push (TV_VARPOOL);
457 while (varpool_first_unanalyzed_node)
458 {
459 struct varpool_node *node = varpool_first_unanalyzed_node, *next;
460 tree decl = node->decl;
461 bool analyzed = node->analyzed;
462
463 varpool_first_unanalyzed_node->analyzed = true;
464
465 varpool_first_unanalyzed_node = varpool_first_unanalyzed_node->next_needed;
466
467 /* When reading back varpool at LTO time, we re-construct the queue in order
468 to have "needed" list right by inserting all needed nodes into varpool.
469 We however don't want to re-analyze already analyzed nodes. */
470 if (!analyzed)
471 {
472 gcc_assert (!in_lto_p || cgraph_function_flags_ready);
473 /* Compute the alignment early so function body expanders are
474 already informed about increased alignment. */
475 align_variable (decl, 0);
476 }
477 if (DECL_INITIAL (decl))
478 record_references_in_initializer (decl, analyzed);
479 if (node->same_comdat_group)
480 {
481 for (next = node->same_comdat_group;
482 next != node;
483 next = next->same_comdat_group)
484 varpool_mark_needed_node (next);
485 }
486 changed = true;
487 }
488 timevar_pop (TV_VARPOOL);
489 return changed;
490 }
491
492 /* Output one variable, if necessary. Return whether we output it. */
493 bool
494 varpool_assemble_decl (struct varpool_node *node)
495 {
496 tree decl = node->decl;
497
498 if (!TREE_ASM_WRITTEN (decl)
499 && !node->alias
500 && !node->in_other_partition
501 && !DECL_EXTERNAL (decl)
502 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
503 {
504 assemble_variable (decl, 0, 1, 0);
505 if (TREE_ASM_WRITTEN (decl))
506 {
507 struct varpool_node *alias;
508
509 node->next_needed = varpool_assembled_nodes_queue;
510 node->prev_needed = NULL;
511 if (varpool_assembled_nodes_queue)
512 varpool_assembled_nodes_queue->prev_needed = node;
513 varpool_assembled_nodes_queue = node;
514 node->finalized = 1;
515
516 /* Also emit any extra name aliases. */
517 for (alias = node->extra_name; alias; alias = alias->next)
518 {
519 /* Update linkage fields in case they've changed. */
520 DECL_WEAK (alias->decl) = DECL_WEAK (decl);
521 TREE_PUBLIC (alias->decl) = TREE_PUBLIC (decl);
522 DECL_VISIBILITY (alias->decl) = DECL_VISIBILITY (decl);
523 assemble_alias (alias->decl, DECL_ASSEMBLER_NAME (decl));
524 }
525
526 return true;
527 }
528 }
529
530 return false;
531 }
532
533 /* Optimization of function bodies might've rendered some variables as
534 unnecessary so we want to avoid these from being compiled.
535
536 This is done by pruning the queue and keeping only the variables that
537 really appear needed (ie they are either externally visible or referenced
538 by compiled function). Re-doing the reachability analysis on variables
539 brings back the remaining variables referenced by these. */
540 void
541 varpool_remove_unreferenced_decls (void)
542 {
543 struct varpool_node *next, *node = varpool_nodes_queue;
544
545 varpool_reset_queue ();
546
547 if (seen_error ())
548 return;
549
550 while (node)
551 {
552 tree decl = node->decl;
553 next = node->next_needed;
554 node->needed = 0;
555
556 if (node->finalized
557 && (decide_is_variable_needed (node, decl)
558 /* ??? Cgraph does not yet rule the world with an iron hand,
559 and does not control the emission of debug information.
560 After a variable has its DECL_RTL set, we must assume that
561 it may be referenced by the debug information, and we can
562 no longer elide it. */
563 || DECL_RTL_SET_P (decl)))
564 varpool_mark_needed_node (node);
565
566 node = next;
567 }
568 /* Make sure we mark alias targets as used targets. */
569 finish_aliases_1 ();
570 varpool_analyze_pending_decls ();
571 }
572
573 /* Output all variables enqueued to be assembled. */
574 bool
575 varpool_assemble_pending_decls (void)
576 {
577 bool changed = false;
578
579 if (seen_error ())
580 return false;
581
582 timevar_push (TV_VAROUT);
583 /* EH might mark decls as needed during expansion. This should be safe since
584 we don't create references to new function, but it should not be used
585 elsewhere. */
586 varpool_analyze_pending_decls ();
587
588 while (varpool_nodes_queue)
589 {
590 struct varpool_node *node = varpool_nodes_queue;
591
592 varpool_nodes_queue = varpool_nodes_queue->next_needed;
593 if (varpool_assemble_decl (node))
594 changed = true;
595 else
596 {
597 node->prev_needed = NULL;
598 node->next_needed = NULL;
599 }
600 }
601 /* varpool_nodes_queue is now empty, clear the pointer to the last element
602 in the queue. */
603 varpool_last_needed_node = NULL;
604 timevar_pop (TV_VAROUT);
605 return changed;
606 }
607
608 /* Remove all elements from the queue so we can re-use it for debug output. */
609 void
610 varpool_empty_needed_queue (void)
611 {
612 /* EH might mark decls as needed during expansion. This should be safe since
613 we don't create references to new function, but it should not be used
614 elsewhere. */
615 varpool_analyze_pending_decls ();
616
617 while (varpool_nodes_queue)
618 {
619 struct varpool_node *node = varpool_nodes_queue;
620 varpool_nodes_queue = varpool_nodes_queue->next_needed;
621 node->next_needed = NULL;
622 node->prev_needed = NULL;
623 }
624 /* varpool_nodes_queue is now empty, clear the pointer to the last element
625 in the queue. */
626 varpool_last_needed_node = NULL;
627 }
628
629 /* Create a new global variable of type TYPE. */
630 tree
631 add_new_static_var (tree type)
632 {
633 tree new_decl;
634 struct varpool_node *new_node;
635
636 new_decl = create_tmp_var (type, NULL);
637 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
638 TREE_READONLY (new_decl) = 0;
639 TREE_STATIC (new_decl) = 1;
640 TREE_USED (new_decl) = 1;
641 DECL_CONTEXT (new_decl) = NULL_TREE;
642 DECL_ABSTRACT (new_decl) = 0;
643 lang_hooks.dup_lang_specific_decl (new_decl);
644 create_var_ann (new_decl);
645 new_node = varpool_node (new_decl);
646 varpool_mark_needed_node (new_node);
647 add_referenced_var (new_decl);
648 varpool_finalize_decl (new_decl);
649
650 return new_node->decl;
651 }
652
653 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
654 Extra name aliases are output whenever DECL is output. */
655
656 bool
657 varpool_extra_name_alias (tree alias, tree decl)
658 {
659 struct varpool_node key, *alias_node, *decl_node, **slot;
660
661 #ifndef ASM_OUTPUT_DEF
662 /* If aliases aren't supported by the assembler, fail. */
663 return false;
664 #endif
665
666 gcc_assert (TREE_CODE (decl) == VAR_DECL);
667 gcc_assert (TREE_CODE (alias) == VAR_DECL);
668 /* Make sure the hash table has been created. */
669 decl_node = varpool_node (decl);
670
671 key.decl = alias;
672
673 slot = (struct varpool_node **) htab_find_slot (varpool_hash, &key, INSERT);
674
675 /* If the varpool_node has been already created, fail. */
676 if (*slot)
677 return false;
678
679 alias_node = ggc_alloc_cleared_varpool_node ();
680 alias_node->decl = alias;
681 alias_node->alias = 1;
682 alias_node->extra_name = decl_node;
683 alias_node->next = decl_node->extra_name;
684 ipa_empty_ref_list (&alias_node->ref_list);
685 if (decl_node->extra_name)
686 decl_node->extra_name->prev = alias_node;
687 decl_node->extra_name = alias_node;
688 *slot = alias_node;
689 return true;
690 }
691
692 #include "gt-varpool.h"