i386.c (legitimize_tls_address): Generate tls_initial_exec_64_sun only when !TARGET_X32.
[gcc.git] / gcc / ipa-utils.c
1 /* Utilities for ipa analysis.
2 Copyright (C) 2005, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tree-flow.h"
27 #include "tree-inline.h"
28 #include "tree-pass.h"
29 #include "langhooks.h"
30 #include "pointer-set.h"
31 #include "splay-tree.h"
32 #include "ggc.h"
33 #include "ipa-utils.h"
34 #include "ipa-reference.h"
35 #include "gimple.h"
36 #include "cgraph.h"
37 #include "output.h"
38 #include "flags.h"
39 #include "timevar.h"
40 #include "diagnostic.h"
41 #include "langhooks.h"
42
43 /* Debugging function for postorder and inorder code. NOTE is a string
44 that is printed before the nodes are printed. ORDER is an array of
45 cgraph_nodes that has COUNT useful nodes in it. */
46
47 void
48 ipa_print_order (FILE* out,
49 const char * note,
50 struct cgraph_node** order,
51 int count)
52 {
53 int i;
54 fprintf (out, "\n\n ordered call graph: %s\n", note);
55
56 for (i = count - 1; i >= 0; i--)
57 dump_cgraph_node(dump_file, order[i]);
58 fprintf (out, "\n");
59 fflush(out);
60 }
61
62 \f
63 struct searchc_env {
64 struct cgraph_node **stack;
65 int stack_size;
66 struct cgraph_node **result;
67 int order_pos;
68 splay_tree nodes_marked_new;
69 bool reduce;
70 bool allow_overwritable;
71 int count;
72 };
73
74 /* This is an implementation of Tarjan's strongly connected region
75 finder as reprinted in Aho Hopcraft and Ullman's The Design and
76 Analysis of Computer Programs (1975) pages 192-193. This version
77 has been customized for cgraph_nodes. The env parameter is because
78 it is recursive and there are no nested functions here. This
79 function should only be called from itself or
80 ipa_reduced_postorder. ENV is a stack env and would be
81 unnecessary if C had nested functions. V is the node to start
82 searching from. */
83
84 static void
85 searchc (struct searchc_env* env, struct cgraph_node *v,
86 bool (*ignore_edge) (struct cgraph_edge *))
87 {
88 struct cgraph_edge *edge;
89 struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->symbol.aux;
90
91 /* mark node as old */
92 v_info->new_node = false;
93 splay_tree_remove (env->nodes_marked_new, v->uid);
94
95 v_info->dfn_number = env->count;
96 v_info->low_link = env->count;
97 env->count++;
98 env->stack[(env->stack_size)++] = v;
99 v_info->on_stack = true;
100
101 for (edge = v->callees; edge; edge = edge->next_callee)
102 {
103 struct ipa_dfs_info * w_info;
104 enum availability avail;
105 struct cgraph_node *w = cgraph_function_or_thunk_node (edge->callee, &avail);
106
107 if (!w || (ignore_edge && ignore_edge (edge)))
108 continue;
109
110 if (w->symbol.aux
111 && (avail > AVAIL_OVERWRITABLE
112 || (env->allow_overwritable && avail == AVAIL_OVERWRITABLE)))
113 {
114 w_info = (struct ipa_dfs_info *) w->symbol.aux;
115 if (w_info->new_node)
116 {
117 searchc (env, w, ignore_edge);
118 v_info->low_link =
119 (v_info->low_link < w_info->low_link) ?
120 v_info->low_link : w_info->low_link;
121 }
122 else
123 if ((w_info->dfn_number < v_info->dfn_number)
124 && (w_info->on_stack))
125 v_info->low_link =
126 (w_info->dfn_number < v_info->low_link) ?
127 w_info->dfn_number : v_info->low_link;
128 }
129 }
130
131
132 if (v_info->low_link == v_info->dfn_number)
133 {
134 struct cgraph_node *last = NULL;
135 struct cgraph_node *x;
136 struct ipa_dfs_info *x_info;
137 do {
138 x = env->stack[--(env->stack_size)];
139 x_info = (struct ipa_dfs_info *) x->symbol.aux;
140 x_info->on_stack = false;
141 x_info->scc_no = v_info->dfn_number;
142
143 if (env->reduce)
144 {
145 x_info->next_cycle = last;
146 last = x;
147 }
148 else
149 env->result[env->order_pos++] = x;
150 }
151 while (v != x);
152 if (env->reduce)
153 env->result[env->order_pos++] = v;
154 }
155 }
156
157 /* Topsort the call graph by caller relation. Put the result in ORDER.
158
159 The REDUCE flag is true if you want the cycles reduced to single nodes. Set
160 ALLOW_OVERWRITABLE if nodes with such availability should be included.
161 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
162 for the topological sort. */
163
164 int
165 ipa_reduced_postorder (struct cgraph_node **order,
166 bool reduce, bool allow_overwritable,
167 bool (*ignore_edge) (struct cgraph_edge *))
168 {
169 struct cgraph_node *node;
170 struct searchc_env env;
171 splay_tree_node result;
172 env.stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
173 env.stack_size = 0;
174 env.result = order;
175 env.order_pos = 0;
176 env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0);
177 env.count = 1;
178 env.reduce = reduce;
179 env.allow_overwritable = allow_overwritable;
180
181 FOR_EACH_DEFINED_FUNCTION (node)
182 {
183 enum availability avail = cgraph_function_body_availability (node);
184
185 if (avail > AVAIL_OVERWRITABLE
186 || (allow_overwritable
187 && (avail == AVAIL_OVERWRITABLE)))
188 {
189 /* Reuse the info if it is already there. */
190 struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->symbol.aux;
191 if (!info)
192 info = XCNEW (struct ipa_dfs_info);
193 info->new_node = true;
194 info->on_stack = false;
195 info->next_cycle = NULL;
196 node->symbol.aux = info;
197
198 splay_tree_insert (env.nodes_marked_new,
199 (splay_tree_key)node->uid,
200 (splay_tree_value)node);
201 }
202 else
203 node->symbol.aux = NULL;
204 }
205 result = splay_tree_min (env.nodes_marked_new);
206 while (result)
207 {
208 node = (struct cgraph_node *)result->value;
209 searchc (&env, node, ignore_edge);
210 result = splay_tree_min (env.nodes_marked_new);
211 }
212 splay_tree_delete (env.nodes_marked_new);
213 free (env.stack);
214
215 return env.order_pos;
216 }
217
218 /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
219 graph nodes. */
220
221 void
222 ipa_free_postorder_info (void)
223 {
224 struct cgraph_node *node;
225 FOR_EACH_DEFINED_FUNCTION (node)
226 {
227 /* Get rid of the aux information. */
228 if (node->symbol.aux)
229 {
230 free (node->symbol.aux);
231 node->symbol.aux = NULL;
232 }
233 }
234 }
235
236 struct postorder_stack
237 {
238 struct cgraph_node *node;
239 struct cgraph_edge *edge;
240 int ref;
241 };
242
243 /* Fill array order with all nodes with output flag set in the reverse
244 topological order. Return the number of elements in the array.
245 FIXME: While walking, consider aliases, too. */
246
247 int
248 ipa_reverse_postorder (struct cgraph_node **order)
249 {
250 struct cgraph_node *node, *node2;
251 int stack_size = 0;
252 int order_pos = 0;
253 struct cgraph_edge *edge;
254 int pass;
255 struct ipa_ref *ref;
256
257 struct postorder_stack *stack =
258 XCNEWVEC (struct postorder_stack, cgraph_n_nodes);
259
260 /* We have to deal with cycles nicely, so use a depth first traversal
261 output algorithm. Ignore the fact that some functions won't need
262 to be output and put them into order as well, so we get dependencies
263 right through inline functions. */
264 FOR_EACH_FUNCTION (node)
265 node->symbol.aux = NULL;
266 for (pass = 0; pass < 2; pass++)
267 FOR_EACH_FUNCTION (node)
268 if (!node->symbol.aux
269 && (pass
270 || (!node->symbol.address_taken
271 && !node->global.inlined_to
272 && !node->alias && !node->thunk.thunk_p
273 && !cgraph_only_called_directly_p (node))))
274 {
275 stack_size = 0;
276 stack[stack_size].node = node;
277 stack[stack_size].edge = node->callers;
278 stack[stack_size].ref = 0;
279 node->symbol.aux = (void *)(size_t)1;
280 while (stack_size >= 0)
281 {
282 while (true)
283 {
284 node2 = NULL;
285 while (stack[stack_size].edge && !node2)
286 {
287 edge = stack[stack_size].edge;
288 node2 = edge->caller;
289 stack[stack_size].edge = edge->next_caller;
290 /* Break possible cycles involving always-inline
291 functions by ignoring edges from always-inline
292 functions to non-always-inline functions. */
293 if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->symbol.decl)
294 && !DECL_DISREGARD_INLINE_LIMITS
295 (cgraph_function_node (edge->callee, NULL)->symbol.decl))
296 node2 = NULL;
297 }
298 for (;ipa_ref_list_referring_iterate (&stack[stack_size].node->symbol.ref_list,
299 stack[stack_size].ref,
300 ref) && !node2;
301 stack[stack_size].ref++)
302 {
303 if (ref->use == IPA_REF_ALIAS)
304 node2 = ipa_ref_referring_node (ref);
305 }
306 if (!node2)
307 break;
308 if (!node2->symbol.aux)
309 {
310 stack[++stack_size].node = node2;
311 stack[stack_size].edge = node2->callers;
312 stack[stack_size].ref = 0;
313 node2->symbol.aux = (void *)(size_t)1;
314 }
315 }
316 order[order_pos++] = stack[stack_size--].node;
317 }
318 }
319 free (stack);
320 FOR_EACH_FUNCTION (node)
321 node->symbol.aux = NULL;
322 return order_pos;
323 }
324
325
326
327 /* Given a memory reference T, will return the variable at the bottom
328 of the access. Unlike get_base_address, this will recurse through
329 INDIRECT_REFS. */
330
331 tree
332 get_base_var (tree t)
333 {
334 while (!SSA_VAR_P (t)
335 && (!CONSTANT_CLASS_P (t))
336 && TREE_CODE (t) != LABEL_DECL
337 && TREE_CODE (t) != FUNCTION_DECL
338 && TREE_CODE (t) != CONST_DECL
339 && TREE_CODE (t) != CONSTRUCTOR)
340 {
341 t = TREE_OPERAND (t, 0);
342 }
343 return t;
344 }
345
346
347 /* Create a new cgraph node set. */
348
349 cgraph_node_set
350 cgraph_node_set_new (void)
351 {
352 cgraph_node_set new_node_set;
353
354 new_node_set = XCNEW (struct cgraph_node_set_def);
355 new_node_set->map = pointer_map_create ();
356 new_node_set->nodes = NULL;
357 return new_node_set;
358 }
359
360
361 /* Add cgraph_node NODE to cgraph_node_set SET. */
362
363 void
364 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
365 {
366 void **slot;
367
368 slot = pointer_map_insert (set->map, node);
369
370 if (*slot)
371 {
372 int index = (size_t) *slot - 1;
373 gcc_checking_assert ((VEC_index (cgraph_node_ptr, set->nodes, index)
374 == node));
375 return;
376 }
377
378 *slot = (void *)(size_t) (VEC_length (cgraph_node_ptr, set->nodes) + 1);
379
380 /* Insert into node vector. */
381 VEC_safe_push (cgraph_node_ptr, heap, set->nodes, node);
382 }
383
384
385 /* Remove cgraph_node NODE from cgraph_node_set SET. */
386
387 void
388 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
389 {
390 void **slot, **last_slot;
391 int index;
392 struct cgraph_node *last_node;
393
394 slot = pointer_map_contains (set->map, node);
395 if (slot == NULL || !*slot)
396 return;
397
398 index = (size_t) *slot - 1;
399 gcc_checking_assert (VEC_index (cgraph_node_ptr, set->nodes, index)
400 == node);
401
402 /* Remove from vector. We do this by swapping node with the last element
403 of the vector. */
404 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
405 if (last_node != node)
406 {
407 last_slot = pointer_map_contains (set->map, last_node);
408 gcc_checking_assert (last_slot && *last_slot);
409 *last_slot = (void *)(size_t) (index + 1);
410
411 /* Move the last element to the original spot of NODE. */
412 VEC_replace (cgraph_node_ptr, set->nodes, index, last_node);
413 }
414
415 /* Remove element from hash table. */
416 *slot = NULL;
417 }
418
419
420 /* Find NODE in SET and return an iterator to it if found. A null iterator
421 is returned if NODE is not in SET. */
422
423 cgraph_node_set_iterator
424 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
425 {
426 void **slot;
427 cgraph_node_set_iterator csi;
428
429 slot = pointer_map_contains (set->map, node);
430 if (slot == NULL || !*slot)
431 csi.index = (unsigned) ~0;
432 else
433 csi.index = (size_t)*slot - 1;
434 csi.set = set;
435
436 return csi;
437 }
438
439
440 /* Dump content of SET to file F. */
441
442 void
443 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
444 {
445 cgraph_node_set_iterator iter;
446
447 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
448 {
449 struct cgraph_node *node = csi_node (iter);
450 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
451 }
452 fprintf (f, "\n");
453 }
454
455
456 /* Dump content of SET to stderr. */
457
458 DEBUG_FUNCTION void
459 debug_cgraph_node_set (cgraph_node_set set)
460 {
461 dump_cgraph_node_set (stderr, set);
462 }
463
464
465 /* Free varpool node set. */
466
467 void
468 free_cgraph_node_set (cgraph_node_set set)
469 {
470 VEC_free (cgraph_node_ptr, heap, set->nodes);
471 pointer_map_destroy (set->map);
472 free (set);
473 }
474
475
476 /* Create a new varpool node set. */
477
478 varpool_node_set
479 varpool_node_set_new (void)
480 {
481 varpool_node_set new_node_set;
482
483 new_node_set = XCNEW (struct varpool_node_set_def);
484 new_node_set->map = pointer_map_create ();
485 new_node_set->nodes = NULL;
486 return new_node_set;
487 }
488
489
490 /* Add varpool_node NODE to varpool_node_set SET. */
491
492 void
493 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
494 {
495 void **slot;
496
497 slot = pointer_map_insert (set->map, node);
498
499 if (*slot)
500 {
501 int index = (size_t) *slot - 1;
502 gcc_checking_assert ((VEC_index (varpool_node_ptr, set->nodes, index)
503 == node));
504 return;
505 }
506
507 *slot = (void *)(size_t) (VEC_length (varpool_node_ptr, set->nodes) + 1);
508
509 /* Insert into node vector. */
510 VEC_safe_push (varpool_node_ptr, heap, set->nodes, node);
511 }
512
513
514 /* Remove varpool_node NODE from varpool_node_set SET. */
515
516 void
517 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
518 {
519 void **slot, **last_slot;
520 int index;
521 struct varpool_node *last_node;
522
523 slot = pointer_map_contains (set->map, node);
524 if (slot == NULL || !*slot)
525 return;
526
527 index = (size_t) *slot - 1;
528 gcc_checking_assert (VEC_index (varpool_node_ptr, set->nodes, index)
529 == node);
530
531 /* Remove from vector. We do this by swapping node with the last element
532 of the vector. */
533 last_node = VEC_pop (varpool_node_ptr, set->nodes);
534 if (last_node != node)
535 {
536 last_slot = pointer_map_contains (set->map, last_node);
537 gcc_checking_assert (last_slot && *last_slot);
538 *last_slot = (void *)(size_t) (index + 1);
539
540 /* Move the last element to the original spot of NODE. */
541 VEC_replace (varpool_node_ptr, set->nodes, index, last_node);
542 }
543
544 /* Remove element from hash table. */
545 *slot = NULL;
546 }
547
548
549 /* Find NODE in SET and return an iterator to it if found. A null iterator
550 is returned if NODE is not in SET. */
551
552 varpool_node_set_iterator
553 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
554 {
555 void **slot;
556 varpool_node_set_iterator vsi;
557
558 slot = pointer_map_contains (set->map, node);
559 if (slot == NULL || !*slot)
560 vsi.index = (unsigned) ~0;
561 else
562 vsi.index = (size_t)*slot - 1;
563 vsi.set = set;
564
565 return vsi;
566 }
567
568
569 /* Dump content of SET to file F. */
570
571 void
572 dump_varpool_node_set (FILE *f, varpool_node_set set)
573 {
574 varpool_node_set_iterator iter;
575
576 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
577 {
578 struct varpool_node *node = vsi_node (iter);
579 fprintf (f, " %s", varpool_node_name (node));
580 }
581 fprintf (f, "\n");
582 }
583
584
585 /* Free varpool node set. */
586
587 void
588 free_varpool_node_set (varpool_node_set set)
589 {
590 VEC_free (varpool_node_ptr, heap, set->nodes);
591 pointer_map_destroy (set->map);
592 free (set);
593 }
594
595
596 /* Dump content of SET to stderr. */
597
598 DEBUG_FUNCTION void
599 debug_varpool_node_set (varpool_node_set set)
600 {
601 dump_varpool_node_set (stderr, set);
602 }