re PR target/65697 (__atomic memory barriers not strong enough for __sync builtins)
[gcc.git] / gcc / tree-phinodes.c
1 /* Generic routines for manipulating PHIs
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "tree.h"
27 #include "fold-const.h"
28 #include "predict.h"
29 #include "hard-reg-set.h"
30 #include "function.h"
31 #include "basic-block.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-expr.h"
35 #include "gimple.h"
36 #include "gimple-iterator.h"
37 #include "gimple-ssa.h"
38 #include "tree-phinodes.h"
39 #include "ssa-iterators.h"
40 #include "stringpool.h"
41 #include "tree-ssanames.h"
42 #include "tree-ssa.h"
43 #include "diagnostic-core.h"
44
45 /* Rewriting a function into SSA form can create a huge number of PHIs
46 many of which may be thrown away shortly after their creation if jumps
47 were threaded through PHI nodes.
48
49 While our garbage collection mechanisms will handle this situation, it
50 is extremely wasteful to create nodes and throw them away, especially
51 when the nodes can be reused.
52
53 For PR 8361, we can significantly reduce the number of nodes allocated
54 and thus the total amount of memory allocated by managing PHIs a
55 little. This additionally helps reduce the amount of work done by the
56 garbage collector. Similar results have been seen on a wider variety
57 of tests (such as the compiler itself).
58
59 PHI nodes have different sizes, so we can't have a single list of all
60 the PHI nodes as it would be too expensive to walk down that list to
61 find a PHI of a suitable size.
62
63 Instead we have an array of lists of free PHI nodes. The array is
64 indexed by the number of PHI alternatives that PHI node can hold.
65 Except for the last array member, which holds all remaining PHI
66 nodes.
67
68 So to find a free PHI node, we compute its index into the free PHI
69 node array and see if there are any elements with an exact match.
70 If so, then we are done. Otherwise, we test the next larger size
71 up and continue until we are in the last array element.
72
73 We do not actually walk members of the last array element. While it
74 might allow us to pick up a few reusable PHI nodes, it could potentially
75 be very expensive if the program has released a bunch of large PHI nodes,
76 but keeps asking for even larger PHI nodes. Experiments have shown that
77 walking the elements of the last array entry would result in finding less
78 than .1% additional reusable PHI nodes.
79
80 Note that we can never have less than two PHI argument slots. Thus,
81 the -2 on all the calculations below. */
82
83 #define NUM_BUCKETS 10
84 static GTY ((deletable (""))) vec<gimple, va_gc> *free_phinodes[NUM_BUCKETS - 2];
85 static unsigned long free_phinode_count;
86
87 static int ideal_phi_node_len (int);
88
89 unsigned int phi_nodes_reused;
90 unsigned int phi_nodes_created;
91
92 /* Dump some simple statistics regarding the re-use of PHI nodes. */
93
94 void
95 phinodes_print_statistics (void)
96 {
97 fprintf (stderr, "PHI nodes allocated: %u\n", phi_nodes_created);
98 fprintf (stderr, "PHI nodes reused: %u\n", phi_nodes_reused);
99 }
100
101 /* Allocate a PHI node with at least LEN arguments. If the free list
102 happens to contain a PHI node with LEN arguments or more, return
103 that one. */
104
105 static inline gphi *
106 allocate_phi_node (size_t len)
107 {
108 gphi *phi;
109 size_t bucket = NUM_BUCKETS - 2;
110 size_t size = sizeof (struct gphi)
111 + (len - 1) * sizeof (struct phi_arg_d);
112
113 if (free_phinode_count)
114 for (bucket = len - 2; bucket < NUM_BUCKETS - 2; bucket++)
115 if (free_phinodes[bucket])
116 break;
117
118 /* If our free list has an element, then use it. */
119 if (bucket < NUM_BUCKETS - 2
120 && gimple_phi_capacity ((*free_phinodes[bucket])[0]) >= len)
121 {
122 free_phinode_count--;
123 phi = as_a <gphi *> (free_phinodes[bucket]->pop ());
124 if (free_phinodes[bucket]->is_empty ())
125 vec_free (free_phinodes[bucket]);
126 if (GATHER_STATISTICS)
127 phi_nodes_reused++;
128 }
129 else
130 {
131 phi = static_cast <gphi *> (ggc_internal_alloc (size));
132 if (GATHER_STATISTICS)
133 {
134 enum gimple_alloc_kind kind = gimple_alloc_kind (GIMPLE_PHI);
135 phi_nodes_created++;
136 gimple_alloc_counts[(int) kind]++;
137 gimple_alloc_sizes[(int) kind] += size;
138 }
139 }
140
141 return phi;
142 }
143
144 /* Given LEN, the original number of requested PHI arguments, return
145 a new, "ideal" length for the PHI node. The "ideal" length rounds
146 the total size of the PHI node up to the next power of two bytes.
147
148 Rounding up will not result in wasting any memory since the size request
149 will be rounded up by the GC system anyway. [ Note this is not entirely
150 true since the original length might have fit on one of the special
151 GC pages. ] By rounding up, we may avoid the need to reallocate the
152 PHI node later if we increase the number of arguments for the PHI. */
153
154 static int
155 ideal_phi_node_len (int len)
156 {
157 size_t size, new_size;
158 int log2, new_len;
159
160 /* We do not support allocations of less than two PHI argument slots. */
161 if (len < 2)
162 len = 2;
163
164 /* Compute the number of bytes of the original request. */
165 size = sizeof (struct gphi)
166 + (len - 1) * sizeof (struct phi_arg_d);
167
168 /* Round it up to the next power of two. */
169 log2 = ceil_log2 (size);
170 new_size = 1 << log2;
171
172 /* Now compute and return the number of PHI argument slots given an
173 ideal size allocation. */
174 new_len = len + (new_size - size) / sizeof (struct phi_arg_d);
175 return new_len;
176 }
177
178 /* Return a PHI node with LEN argument slots for variable VAR. */
179
180 static gphi *
181 make_phi_node (tree var, int len)
182 {
183 gphi *phi;
184 int capacity, i;
185
186 capacity = ideal_phi_node_len (len);
187
188 phi = allocate_phi_node (capacity);
189
190 /* We need to clear the entire PHI node, including the argument
191 portion, because we represent a "missing PHI argument" by placing
192 NULL_TREE in PHI_ARG_DEF. */
193 memset (phi, 0, (sizeof (struct gphi)
194 - sizeof (struct phi_arg_d)
195 + sizeof (struct phi_arg_d) * len));
196 phi->code = GIMPLE_PHI;
197 gimple_init_singleton (phi);
198 phi->nargs = len;
199 phi->capacity = capacity;
200 if (!var)
201 ;
202 else if (TREE_CODE (var) == SSA_NAME)
203 gimple_phi_set_result (phi, var);
204 else
205 gimple_phi_set_result (phi, make_ssa_name (var, phi));
206
207 for (i = 0; i < capacity; i++)
208 {
209 use_operand_p imm;
210
211 gimple_phi_arg_set_location (phi, i, UNKNOWN_LOCATION);
212 imm = gimple_phi_arg_imm_use_ptr (phi, i);
213 imm->use = gimple_phi_arg_def_ptr (phi, i);
214 imm->prev = NULL;
215 imm->next = NULL;
216 imm->loc.stmt = phi;
217 }
218
219 return phi;
220 }
221
222 /* We no longer need PHI, release it so that it may be reused. */
223
224 void
225 release_phi_node (gimple phi)
226 {
227 size_t bucket;
228 size_t len = gimple_phi_capacity (phi);
229 size_t x;
230
231 for (x = 0; x < gimple_phi_num_args (phi); x++)
232 {
233 use_operand_p imm;
234 imm = gimple_phi_arg_imm_use_ptr (phi, x);
235 delink_imm_use (imm);
236 }
237
238 bucket = len > NUM_BUCKETS - 1 ? NUM_BUCKETS - 1 : len;
239 bucket -= 2;
240 vec_safe_push (free_phinodes[bucket], phi);
241 free_phinode_count++;
242 }
243
244
245 /* Resize an existing PHI node. The only way is up. Return the
246 possibly relocated phi. */
247
248 static gphi *
249 resize_phi_node (gphi *phi, size_t len)
250 {
251 size_t old_size, i;
252 gphi *new_phi;
253
254 gcc_assert (len > gimple_phi_capacity (phi));
255
256 /* The garbage collector will not look at the PHI node beyond the
257 first PHI_NUM_ARGS elements. Therefore, all we have to copy is a
258 portion of the PHI node currently in use. */
259 old_size = sizeof (struct gphi)
260 + (gimple_phi_num_args (phi) - 1) * sizeof (struct phi_arg_d);
261
262 new_phi = allocate_phi_node (len);
263
264 memcpy (new_phi, phi, old_size);
265
266 for (i = 0; i < gimple_phi_num_args (new_phi); i++)
267 {
268 use_operand_p imm, old_imm;
269 imm = gimple_phi_arg_imm_use_ptr (new_phi, i);
270 old_imm = gimple_phi_arg_imm_use_ptr (phi, i);
271 imm->use = gimple_phi_arg_def_ptr (new_phi, i);
272 relink_imm_use_stmt (imm, old_imm, new_phi);
273 }
274
275 new_phi->capacity = len;
276
277 for (i = gimple_phi_num_args (new_phi); i < len; i++)
278 {
279 use_operand_p imm;
280
281 gimple_phi_arg_set_location (new_phi, i, UNKNOWN_LOCATION);
282 imm = gimple_phi_arg_imm_use_ptr (new_phi, i);
283 imm->use = gimple_phi_arg_def_ptr (new_phi, i);
284 imm->prev = NULL;
285 imm->next = NULL;
286 imm->loc.stmt = new_phi;
287 }
288
289 return new_phi;
290 }
291
292 /* Reserve PHI arguments for a new edge to basic block BB. */
293
294 void
295 reserve_phi_args_for_new_edge (basic_block bb)
296 {
297 size_t len = EDGE_COUNT (bb->preds);
298 size_t cap = ideal_phi_node_len (len + 4);
299 gphi_iterator gsi;
300
301 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
302 {
303 gphi *stmt = gsi.phi ();
304
305 if (len > gimple_phi_capacity (stmt))
306 {
307 gphi *new_phi = resize_phi_node (stmt, cap);
308
309 /* The result of the PHI is defined by this PHI node. */
310 SSA_NAME_DEF_STMT (gimple_phi_result (new_phi)) = new_phi;
311 gsi_set_stmt (&gsi, new_phi);
312
313 release_phi_node (stmt);
314 stmt = new_phi;
315 }
316
317 /* We represent a "missing PHI argument" by placing NULL_TREE in
318 the corresponding slot. If PHI arguments were added
319 immediately after an edge is created, this zeroing would not
320 be necessary, but unfortunately this is not the case. For
321 example, the loop optimizer duplicates several basic blocks,
322 redirects edges, and then fixes up PHI arguments later in
323 batch. */
324 SET_PHI_ARG_DEF (stmt, len - 1, NULL_TREE);
325 gimple_phi_arg_set_location (stmt, len - 1, UNKNOWN_LOCATION);
326
327 stmt->nargs++;
328 }
329 }
330
331 /* Adds PHI to BB. */
332
333 void
334 add_phi_node_to_bb (gphi *phi, basic_block bb)
335 {
336 gimple_seq seq = phi_nodes (bb);
337 /* Add the new PHI node to the list of PHI nodes for block BB. */
338 if (seq == NULL)
339 set_phi_nodes (bb, gimple_seq_alloc_with_stmt (phi));
340 else
341 {
342 gimple_seq_add_stmt (&seq, phi);
343 gcc_assert (seq == phi_nodes (bb));
344 }
345
346 /* Associate BB to the PHI node. */
347 gimple_set_bb (phi, bb);
348
349 }
350
351 /* Create a new PHI node for variable VAR at basic block BB. */
352
353 gphi *
354 create_phi_node (tree var, basic_block bb)
355 {
356 gphi *phi = make_phi_node (var, EDGE_COUNT (bb->preds));
357
358 add_phi_node_to_bb (phi, bb);
359 return phi;
360 }
361
362
363 /* Add a new argument to PHI node PHI. DEF is the incoming reaching
364 definition and E is the edge through which DEF reaches PHI. The new
365 argument is added at the end of the argument list.
366 If PHI has reached its maximum capacity, add a few slots. In this case,
367 PHI points to the reallocated phi node when we return. */
368
369 void
370 add_phi_arg (gphi *phi, tree def, edge e, source_location locus)
371 {
372 basic_block bb = e->dest;
373
374 gcc_assert (bb == gimple_bb (phi));
375
376 /* We resize PHI nodes upon edge creation. We should always have
377 enough room at this point. */
378 gcc_assert (gimple_phi_num_args (phi) <= gimple_phi_capacity (phi));
379
380 /* We resize PHI nodes upon edge creation. We should always have
381 enough room at this point. */
382 gcc_assert (e->dest_idx < gimple_phi_num_args (phi));
383
384 /* Copy propagation needs to know what object occur in abnormal
385 PHI nodes. This is a convenient place to record such information. */
386 if (e->flags & EDGE_ABNORMAL)
387 {
388 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def) = 1;
389 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
390 }
391
392 SET_PHI_ARG_DEF (phi, e->dest_idx, def);
393 gimple_phi_arg_set_location (phi, e->dest_idx, locus);
394 }
395
396
397 /* Remove the Ith argument from PHI's argument list. This routine
398 implements removal by swapping the last alternative with the
399 alternative we want to delete and then shrinking the vector, which
400 is consistent with how we remove an edge from the edge vector. */
401
402 static void
403 remove_phi_arg_num (gphi *phi, int i)
404 {
405 int num_elem = gimple_phi_num_args (phi);
406
407 gcc_assert (i < num_elem);
408
409 /* Delink the item which is being removed. */
410 delink_imm_use (gimple_phi_arg_imm_use_ptr (phi, i));
411
412 /* If it is not the last element, move the last element
413 to the element we want to delete, resetting all the links. */
414 if (i != num_elem - 1)
415 {
416 use_operand_p old_p, new_p;
417 old_p = gimple_phi_arg_imm_use_ptr (phi, num_elem - 1);
418 new_p = gimple_phi_arg_imm_use_ptr (phi, i);
419 /* Set use on new node, and link into last element's place. */
420 *(new_p->use) = *(old_p->use);
421 relink_imm_use (new_p, old_p);
422 /* Move the location as well. */
423 gimple_phi_arg_set_location (phi, i,
424 gimple_phi_arg_location (phi, num_elem - 1));
425 }
426
427 /* Shrink the vector and return. Note that we do not have to clear
428 PHI_ARG_DEF because the garbage collector will not look at those
429 elements beyond the first PHI_NUM_ARGS elements of the array. */
430 phi->nargs--;
431 }
432
433
434 /* Remove all PHI arguments associated with edge E. */
435
436 void
437 remove_phi_args (edge e)
438 {
439 gphi_iterator gsi;
440
441 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
442 remove_phi_arg_num (gsi.phi (),
443 e->dest_idx);
444 }
445
446
447 /* Remove the PHI node pointed-to by iterator GSI from basic block BB. After
448 removal, iterator GSI is updated to point to the next PHI node in the
449 sequence. If RELEASE_LHS_P is true, the LHS of this PHI node is released
450 into the free pool of SSA names. */
451
452 void
453 remove_phi_node (gimple_stmt_iterator *gsi, bool release_lhs_p)
454 {
455 gimple phi = gsi_stmt (*gsi);
456
457 if (release_lhs_p)
458 insert_debug_temps_for_defs (gsi);
459
460 gsi_remove (gsi, false);
461
462 /* If we are deleting the PHI node, then we should release the
463 SSA_NAME node so that it can be reused. */
464 release_phi_node (phi);
465 if (release_lhs_p)
466 release_ssa_name (gimple_phi_result (phi));
467 }
468
469 /* Remove all the phi nodes from BB. */
470
471 void
472 remove_phi_nodes (basic_block bb)
473 {
474 gphi_iterator gsi;
475
476 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
477 remove_phi_node (&gsi, true);
478
479 set_phi_nodes (bb, NULL);
480 }
481
482 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
483 NULL. */
484
485 tree
486 degenerate_phi_result (gphi *phi)
487 {
488 tree lhs = gimple_phi_result (phi);
489 tree val = NULL;
490 size_t i;
491
492 /* Ignoring arguments which are the same as LHS, if all the remaining
493 arguments are the same, then the PHI is a degenerate and has the
494 value of that common argument. */
495 for (i = 0; i < gimple_phi_num_args (phi); i++)
496 {
497 tree arg = gimple_phi_arg_def (phi, i);
498
499 if (arg == lhs)
500 continue;
501 else if (!arg)
502 break;
503 else if (!val)
504 val = arg;
505 else if (arg == val)
506 continue;
507 /* We bring in some of operand_equal_p not only to speed things
508 up, but also to avoid crashing when dereferencing the type of
509 a released SSA name. */
510 else if (TREE_CODE (val) != TREE_CODE (arg)
511 || TREE_CODE (val) == SSA_NAME
512 || !operand_equal_p (arg, val, 0))
513 break;
514 }
515 return (i == gimple_phi_num_args (phi) ? val : NULL);
516 }
517
518 /* Set PHI nodes of a basic block BB to SEQ. */
519
520 void
521 set_phi_nodes (basic_block bb, gimple_seq seq)
522 {
523 gimple_stmt_iterator i;
524
525 gcc_checking_assert (!(bb->flags & BB_RTL));
526 bb->il.gimple.phi_nodes = seq;
527 if (seq)
528 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
529 gimple_set_bb (gsi_stmt (i), bb);
530 }
531
532 #include "gt-tree-phinodes.h"