47acb0c8ae884e15000128200848be0c92c5a094
[gcc.git] / gcc / tree-switch-conversion.c
1 /* Lower GIMPLE_SWITCH expressions to something more efficient than
2 a jump table.
3 Copyright (C) 2006-2018 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This file handles the lowering of GIMPLE_SWITCH to an indexed
23 load, or a series of bit-test-and-branch expressions. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "insn-codes.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "gimple.h"
33 #include "cfghooks.h"
34 #include "tree-pass.h"
35 #include "ssa.h"
36 #include "optabs-tree.h"
37 #include "cgraph.h"
38 #include "gimple-pretty-print.h"
39 #include "params.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "stor-layout.h"
43 #include "cfganal.h"
44 #include "gimplify.h"
45 #include "gimple-iterator.h"
46 #include "gimplify-me.h"
47 #include "tree-cfg.h"
48 #include "cfgloop.h"
49 #include "alloc-pool.h"
50 #include "target.h"
51 #include "tree-into-ssa.h"
52 #include "omp-general.h"
53
54 /* ??? For lang_hooks.types.type_for_mode, but is there a word_mode
55 type in the GIMPLE type system that is language-independent? */
56 #include "langhooks.h"
57
58 #include "tree-switch-conversion.h"
59 \f
60 using namespace tree_switch_conversion;
61
62 /* Constructor. */
63
64 switch_conversion::switch_conversion (): m_final_bb (NULL), m_other_count (),
65 m_constructors (NULL), m_default_values (NULL),
66 m_arr_ref_first (NULL), m_arr_ref_last (NULL),
67 m_reason (NULL), m_default_case_nonstandard (false), m_cfg_altered (false)
68 {
69 }
70
71 /* Collection information about SWTCH statement. */
72
73 void
74 switch_conversion::collect (gswitch *swtch)
75 {
76 unsigned int branch_num = gimple_switch_num_labels (swtch);
77 tree min_case, max_case;
78 unsigned int i;
79 edge e, e_default, e_first;
80 edge_iterator ei;
81
82 m_switch = swtch;
83
84 /* The gimplifier has already sorted the cases by CASE_LOW and ensured there
85 is a default label which is the first in the vector.
86 Collect the bits we can deduce from the CFG. */
87 m_index_expr = gimple_switch_index (swtch);
88 m_switch_bb = gimple_bb (swtch);
89 e_default = gimple_switch_default_edge (cfun, swtch);
90 m_default_bb = e_default->dest;
91 m_default_prob = e_default->probability;
92 m_default_count = e_default->count ();
93 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
94 if (e != e_default)
95 m_other_count += e->count ();
96
97 /* Get upper and lower bounds of case values, and the covered range. */
98 min_case = gimple_switch_label (swtch, 1);
99 max_case = gimple_switch_label (swtch, branch_num - 1);
100
101 m_range_min = CASE_LOW (min_case);
102 if (CASE_HIGH (max_case) != NULL_TREE)
103 m_range_max = CASE_HIGH (max_case);
104 else
105 m_range_max = CASE_LOW (max_case);
106
107 m_contiguous_range = true;
108 tree last = CASE_HIGH (min_case) ? CASE_HIGH (min_case) : m_range_min;
109 for (i = 2; i < branch_num; i++)
110 {
111 tree elt = gimple_switch_label (swtch, i);
112 if (wi::to_wide (last) + 1 != wi::to_wide (CASE_LOW (elt)))
113 {
114 m_contiguous_range = false;
115 break;
116 }
117 last = CASE_HIGH (elt) ? CASE_HIGH (elt) : CASE_LOW (elt);
118 }
119
120 if (m_contiguous_range)
121 e_first = gimple_switch_edge (cfun, swtch, 1);
122 else
123 e_first = e_default;
124
125 /* See if there is one common successor block for all branch
126 targets. If it exists, record it in FINAL_BB.
127 Start with the destination of the first non-default case
128 if the range is contiguous and default case otherwise as
129 guess or its destination in case it is a forwarder block. */
130 if (! single_pred_p (e_first->dest))
131 m_final_bb = e_first->dest;
132 else if (single_succ_p (e_first->dest)
133 && ! single_pred_p (single_succ (e_first->dest)))
134 m_final_bb = single_succ (e_first->dest);
135 /* Require that all switch destinations are either that common
136 FINAL_BB or a forwarder to it, except for the default
137 case if contiguous range. */
138 if (m_final_bb)
139 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
140 {
141 if (e->dest == m_final_bb)
142 continue;
143
144 if (single_pred_p (e->dest)
145 && single_succ_p (e->dest)
146 && single_succ (e->dest) == m_final_bb)
147 continue;
148
149 if (e == e_default && m_contiguous_range)
150 {
151 m_default_case_nonstandard = true;
152 continue;
153 }
154
155 m_final_bb = NULL;
156 break;
157 }
158
159 m_range_size
160 = int_const_binop (MINUS_EXPR, m_range_max, m_range_min);
161
162 /* Get a count of the number of case labels. Single-valued case labels
163 simply count as one, but a case range counts double, since it may
164 require two compares if it gets lowered as a branching tree. */
165 m_count = 0;
166 for (i = 1; i < branch_num; i++)
167 {
168 tree elt = gimple_switch_label (swtch, i);
169 m_count++;
170 if (CASE_HIGH (elt)
171 && ! tree_int_cst_equal (CASE_LOW (elt), CASE_HIGH (elt)))
172 m_count++;
173 }
174
175 /* Get the number of unique non-default targets out of the GIMPLE_SWITCH
176 block. Assume a CFG cleanup would have already removed degenerate
177 switch statements, this allows us to just use EDGE_COUNT. */
178 m_uniq = EDGE_COUNT (gimple_bb (swtch)->succs) - 1;
179 }
180
181 /* Checks whether the range given by individual case statements of the switch
182 switch statement isn't too big and whether the number of branches actually
183 satisfies the size of the new array. */
184
185 bool
186 switch_conversion::check_range ()
187 {
188 gcc_assert (m_range_size);
189 if (!tree_fits_uhwi_p (m_range_size))
190 {
191 m_reason = "index range way too large or otherwise unusable";
192 return false;
193 }
194
195 if (tree_to_uhwi (m_range_size)
196 > ((unsigned) m_count * SWITCH_CONVERSION_BRANCH_RATIO))
197 {
198 m_reason = "the maximum range-branch ratio exceeded";
199 return false;
200 }
201
202 return true;
203 }
204
205 /* Checks whether all but the final BB basic blocks are empty. */
206
207 bool
208 switch_conversion::check_all_empty_except_final ()
209 {
210 edge e, e_default = find_edge (m_switch_bb, m_default_bb);
211 edge_iterator ei;
212
213 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
214 {
215 if (e->dest == m_final_bb)
216 continue;
217
218 if (!empty_block_p (e->dest))
219 {
220 if (m_contiguous_range && e == e_default)
221 {
222 m_default_case_nonstandard = true;
223 continue;
224 }
225
226 m_reason = "bad case - a non-final BB not empty";
227 return false;
228 }
229 }
230
231 return true;
232 }
233
234 /* This function checks whether all required values in phi nodes in final_bb
235 are constants. Required values are those that correspond to a basic block
236 which is a part of the examined switch statement. It returns true if the
237 phi nodes are OK, otherwise false. */
238
239 bool
240 switch_conversion::check_final_bb ()
241 {
242 gphi_iterator gsi;
243
244 m_phi_count = 0;
245 for (gsi = gsi_start_phis (m_final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
246 {
247 gphi *phi = gsi.phi ();
248 unsigned int i;
249
250 if (virtual_operand_p (gimple_phi_result (phi)))
251 continue;
252
253 m_phi_count++;
254
255 for (i = 0; i < gimple_phi_num_args (phi); i++)
256 {
257 basic_block bb = gimple_phi_arg_edge (phi, i)->src;
258
259 if (bb == m_switch_bb
260 || (single_pred_p (bb)
261 && single_pred (bb) == m_switch_bb
262 && (!m_default_case_nonstandard
263 || empty_block_p (bb))))
264 {
265 tree reloc, val;
266 const char *reason = NULL;
267
268 val = gimple_phi_arg_def (phi, i);
269 if (!is_gimple_ip_invariant (val))
270 reason = "non-invariant value from a case";
271 else
272 {
273 reloc = initializer_constant_valid_p (val, TREE_TYPE (val));
274 if ((flag_pic && reloc != null_pointer_node)
275 || (!flag_pic && reloc == NULL_TREE))
276 {
277 if (reloc)
278 reason
279 = "value from a case would need runtime relocations";
280 else
281 reason
282 = "value from a case is not a valid initializer";
283 }
284 }
285 if (reason)
286 {
287 /* For contiguous range, we can allow non-constant
288 or one that needs relocation, as long as it is
289 only reachable from the default case. */
290 if (bb == m_switch_bb)
291 bb = m_final_bb;
292 if (!m_contiguous_range || bb != m_default_bb)
293 {
294 m_reason = reason;
295 return false;
296 }
297
298 unsigned int branch_num = gimple_switch_num_labels (m_switch);
299 for (unsigned int i = 1; i < branch_num; i++)
300 {
301 if (gimple_switch_label_bb (cfun, m_switch, i) == bb)
302 {
303 m_reason = reason;
304 return false;
305 }
306 }
307 m_default_case_nonstandard = true;
308 }
309 }
310 }
311 }
312
313 return true;
314 }
315
316 /* The following function allocates default_values, target_{in,out}_names and
317 constructors arrays. The last one is also populated with pointers to
318 vectors that will become constructors of new arrays. */
319
320 void
321 switch_conversion::create_temp_arrays ()
322 {
323 int i;
324
325 m_default_values = XCNEWVEC (tree, m_phi_count * 3);
326 /* ??? Macros do not support multi argument templates in their
327 argument list. We create a typedef to work around that problem. */
328 typedef vec<constructor_elt, va_gc> *vec_constructor_elt_gc;
329 m_constructors = XCNEWVEC (vec_constructor_elt_gc, m_phi_count);
330 m_target_inbound_names = m_default_values + m_phi_count;
331 m_target_outbound_names = m_target_inbound_names + m_phi_count;
332 for (i = 0; i < m_phi_count; i++)
333 vec_alloc (m_constructors[i], tree_to_uhwi (m_range_size) + 1);
334 }
335
336 /* Populate the array of default values in the order of phi nodes.
337 DEFAULT_CASE is the CASE_LABEL_EXPR for the default switch branch
338 if the range is non-contiguous or the default case has standard
339 structure, otherwise it is the first non-default case instead. */
340
341 void
342 switch_conversion::gather_default_values (tree default_case)
343 {
344 gphi_iterator gsi;
345 basic_block bb = label_to_block (cfun, CASE_LABEL (default_case));
346 edge e;
347 int i = 0;
348
349 gcc_assert (CASE_LOW (default_case) == NULL_TREE
350 || m_default_case_nonstandard);
351
352 if (bb == m_final_bb)
353 e = find_edge (m_switch_bb, bb);
354 else
355 e = single_succ_edge (bb);
356
357 for (gsi = gsi_start_phis (m_final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
358 {
359 gphi *phi = gsi.phi ();
360 if (virtual_operand_p (gimple_phi_result (phi)))
361 continue;
362 tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
363 gcc_assert (val);
364 m_default_values[i++] = val;
365 }
366 }
367
368 /* The following function populates the vectors in the constructors array with
369 future contents of the static arrays. The vectors are populated in the
370 order of phi nodes. */
371
372 void
373 switch_conversion::build_constructors ()
374 {
375 unsigned i, branch_num = gimple_switch_num_labels (m_switch);
376 tree pos = m_range_min;
377 tree pos_one = build_int_cst (TREE_TYPE (pos), 1);
378
379 for (i = 1; i < branch_num; i++)
380 {
381 tree cs = gimple_switch_label (m_switch, i);
382 basic_block bb = label_to_block (cfun, CASE_LABEL (cs));
383 edge e;
384 tree high;
385 gphi_iterator gsi;
386 int j;
387
388 if (bb == m_final_bb)
389 e = find_edge (m_switch_bb, bb);
390 else
391 e = single_succ_edge (bb);
392 gcc_assert (e);
393
394 while (tree_int_cst_lt (pos, CASE_LOW (cs)))
395 {
396 int k;
397 for (k = 0; k < m_phi_count; k++)
398 {
399 constructor_elt elt;
400
401 elt.index = int_const_binop (MINUS_EXPR, pos, m_range_min);
402 elt.value
403 = unshare_expr_without_location (m_default_values[k]);
404 m_constructors[k]->quick_push (elt);
405 }
406
407 pos = int_const_binop (PLUS_EXPR, pos, pos_one);
408 }
409 gcc_assert (tree_int_cst_equal (pos, CASE_LOW (cs)));
410
411 j = 0;
412 if (CASE_HIGH (cs))
413 high = CASE_HIGH (cs);
414 else
415 high = CASE_LOW (cs);
416 for (gsi = gsi_start_phis (m_final_bb);
417 !gsi_end_p (gsi); gsi_next (&gsi))
418 {
419 gphi *phi = gsi.phi ();
420 if (virtual_operand_p (gimple_phi_result (phi)))
421 continue;
422 tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
423 tree low = CASE_LOW (cs);
424 pos = CASE_LOW (cs);
425
426 do
427 {
428 constructor_elt elt;
429
430 elt.index = int_const_binop (MINUS_EXPR, pos, m_range_min);
431 elt.value = unshare_expr_without_location (val);
432 m_constructors[j]->quick_push (elt);
433
434 pos = int_const_binop (PLUS_EXPR, pos, pos_one);
435 } while (!tree_int_cst_lt (high, pos)
436 && tree_int_cst_lt (low, pos));
437 j++;
438 }
439 }
440 }
441
442 /* If all values in the constructor vector are the same, return the value.
443 Otherwise return NULL_TREE. Not supposed to be called for empty
444 vectors. */
445
446 tree
447 switch_conversion::contains_same_values_p (vec<constructor_elt, va_gc> *vec)
448 {
449 unsigned int i;
450 tree prev = NULL_TREE;
451 constructor_elt *elt;
452
453 FOR_EACH_VEC_SAFE_ELT (vec, i, elt)
454 {
455 if (!prev)
456 prev = elt->value;
457 else if (!operand_equal_p (elt->value, prev, OEP_ONLY_CONST))
458 return NULL_TREE;
459 }
460 return prev;
461 }
462
463 /* Return type which should be used for array elements, either TYPE's
464 main variant or, for integral types, some smaller integral type
465 that can still hold all the constants. */
466
467 tree
468 switch_conversion::array_value_type (tree type, int num)
469 {
470 unsigned int i, len = vec_safe_length (m_constructors[num]);
471 constructor_elt *elt;
472 int sign = 0;
473 tree smaller_type;
474
475 /* Types with alignments greater than their size can reach here, e.g. out of
476 SRA. We couldn't use these as an array component type so get back to the
477 main variant first, which, for our purposes, is fine for other types as
478 well. */
479
480 type = TYPE_MAIN_VARIANT (type);
481
482 if (!INTEGRAL_TYPE_P (type))
483 return type;
484
485 scalar_int_mode type_mode = SCALAR_INT_TYPE_MODE (type);
486 scalar_int_mode mode = get_narrowest_mode (type_mode);
487 if (GET_MODE_SIZE (type_mode) <= GET_MODE_SIZE (mode))
488 return type;
489
490 if (len < (optimize_bb_for_size_p (gimple_bb (m_switch)) ? 2 : 32))
491 return type;
492
493 FOR_EACH_VEC_SAFE_ELT (m_constructors[num], i, elt)
494 {
495 wide_int cst;
496
497 if (TREE_CODE (elt->value) != INTEGER_CST)
498 return type;
499
500 cst = wi::to_wide (elt->value);
501 while (1)
502 {
503 unsigned int prec = GET_MODE_BITSIZE (mode);
504 if (prec > HOST_BITS_PER_WIDE_INT)
505 return type;
506
507 if (sign >= 0 && cst == wi::zext (cst, prec))
508 {
509 if (sign == 0 && cst == wi::sext (cst, prec))
510 break;
511 sign = 1;
512 break;
513 }
514 if (sign <= 0 && cst == wi::sext (cst, prec))
515 {
516 sign = -1;
517 break;
518 }
519
520 if (sign == 1)
521 sign = 0;
522
523 if (!GET_MODE_WIDER_MODE (mode).exists (&mode)
524 || GET_MODE_SIZE (mode) >= GET_MODE_SIZE (type_mode))
525 return type;
526 }
527 }
528
529 if (sign == 0)
530 sign = TYPE_UNSIGNED (type) ? 1 : -1;
531 smaller_type = lang_hooks.types.type_for_mode (mode, sign >= 0);
532 if (GET_MODE_SIZE (type_mode)
533 <= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (smaller_type)))
534 return type;
535
536 return smaller_type;
537 }
538
539 /* Create an appropriate array type and declaration and assemble a static
540 array variable. Also create a load statement that initializes
541 the variable in question with a value from the static array. SWTCH is
542 the switch statement being converted, NUM is the index to
543 arrays of constructors, default values and target SSA names
544 for this particular array. ARR_INDEX_TYPE is the type of the index
545 of the new array, PHI is the phi node of the final BB that corresponds
546 to the value that will be loaded from the created array. TIDX
547 is an ssa name of a temporary variable holding the index for loads from the
548 new array. */
549
550 void
551 switch_conversion::build_one_array (int num, tree arr_index_type,
552 gphi *phi, tree tidx)
553 {
554 tree name, cst;
555 gimple *load;
556 gimple_stmt_iterator gsi = gsi_for_stmt (m_switch);
557 location_t loc = gimple_location (m_switch);
558
559 gcc_assert (m_default_values[num]);
560
561 name = copy_ssa_name (PHI_RESULT (phi));
562 m_target_inbound_names[num] = name;
563
564 cst = contains_same_values_p (m_constructors[num]);
565 if (cst)
566 load = gimple_build_assign (name, cst);
567 else
568 {
569 tree array_type, ctor, decl, value_type, fetch, default_type;
570
571 default_type = TREE_TYPE (m_default_values[num]);
572 value_type = array_value_type (default_type, num);
573 array_type = build_array_type (value_type, arr_index_type);
574 if (default_type != value_type)
575 {
576 unsigned int i;
577 constructor_elt *elt;
578
579 FOR_EACH_VEC_SAFE_ELT (m_constructors[num], i, elt)
580 elt->value = fold_convert (value_type, elt->value);
581 }
582 ctor = build_constructor (array_type, m_constructors[num]);
583 TREE_CONSTANT (ctor) = true;
584 TREE_STATIC (ctor) = true;
585
586 decl = build_decl (loc, VAR_DECL, NULL_TREE, array_type);
587 TREE_STATIC (decl) = 1;
588 DECL_INITIAL (decl) = ctor;
589
590 DECL_NAME (decl) = create_tmp_var_name ("CSWTCH");
591 DECL_ARTIFICIAL (decl) = 1;
592 DECL_IGNORED_P (decl) = 1;
593 TREE_CONSTANT (decl) = 1;
594 TREE_READONLY (decl) = 1;
595 DECL_IGNORED_P (decl) = 1;
596 if (offloading_function_p (cfun->decl))
597 DECL_ATTRIBUTES (decl)
598 = tree_cons (get_identifier ("omp declare target"), NULL_TREE,
599 NULL_TREE);
600 varpool_node::finalize_decl (decl);
601
602 fetch = build4 (ARRAY_REF, value_type, decl, tidx, NULL_TREE,
603 NULL_TREE);
604 if (default_type != value_type)
605 {
606 fetch = fold_convert (default_type, fetch);
607 fetch = force_gimple_operand_gsi (&gsi, fetch, true, NULL_TREE,
608 true, GSI_SAME_STMT);
609 }
610 load = gimple_build_assign (name, fetch);
611 }
612
613 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
614 update_stmt (load);
615 m_arr_ref_last = load;
616 }
617
618 /* Builds and initializes static arrays initialized with values gathered from
619 the switch statement. Also creates statements that load values from
620 them. */
621
622 void
623 switch_conversion::build_arrays ()
624 {
625 tree arr_index_type;
626 tree tidx, sub, utype;
627 gimple *stmt;
628 gimple_stmt_iterator gsi;
629 gphi_iterator gpi;
630 int i;
631 location_t loc = gimple_location (m_switch);
632
633 gsi = gsi_for_stmt (m_switch);
634
635 /* Make sure we do not generate arithmetics in a subrange. */
636 utype = TREE_TYPE (m_index_expr);
637 if (TREE_TYPE (utype))
638 utype = lang_hooks.types.type_for_mode (TYPE_MODE (TREE_TYPE (utype)), 1);
639 else
640 utype = lang_hooks.types.type_for_mode (TYPE_MODE (utype), 1);
641
642 arr_index_type = build_index_type (m_range_size);
643 tidx = make_ssa_name (utype);
644 sub = fold_build2_loc (loc, MINUS_EXPR, utype,
645 fold_convert_loc (loc, utype, m_index_expr),
646 fold_convert_loc (loc, utype, m_range_min));
647 sub = force_gimple_operand_gsi (&gsi, sub,
648 false, NULL, true, GSI_SAME_STMT);
649 stmt = gimple_build_assign (tidx, sub);
650
651 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
652 update_stmt (stmt);
653 m_arr_ref_first = stmt;
654
655 for (gpi = gsi_start_phis (m_final_bb), i = 0;
656 !gsi_end_p (gpi); gsi_next (&gpi))
657 {
658 gphi *phi = gpi.phi ();
659 if (!virtual_operand_p (gimple_phi_result (phi)))
660 build_one_array (i++, arr_index_type, phi, tidx);
661 else
662 {
663 edge e;
664 edge_iterator ei;
665 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
666 {
667 if (e->dest == m_final_bb)
668 break;
669 if (!m_default_case_nonstandard
670 || e->dest != m_default_bb)
671 {
672 e = single_succ_edge (e->dest);
673 break;
674 }
675 }
676 gcc_assert (e && e->dest == m_final_bb);
677 m_target_vop = PHI_ARG_DEF_FROM_EDGE (phi, e);
678 }
679 }
680 }
681
682 /* Generates and appropriately inserts loads of default values at the position
683 given by GSI. Returns the last inserted statement. */
684
685 gassign *
686 switch_conversion::gen_def_assigns (gimple_stmt_iterator *gsi)
687 {
688 int i;
689 gassign *assign = NULL;
690
691 for (i = 0; i < m_phi_count; i++)
692 {
693 tree name = copy_ssa_name (m_target_inbound_names[i]);
694 m_target_outbound_names[i] = name;
695 assign = gimple_build_assign (name, m_default_values[i]);
696 gsi_insert_before (gsi, assign, GSI_SAME_STMT);
697 update_stmt (assign);
698 }
699 return assign;
700 }
701
702 /* Deletes the unused bbs and edges that now contain the switch statement and
703 its empty branch bbs. BBD is the now dead BB containing
704 the original switch statement, FINAL is the last BB of the converted
705 switch statement (in terms of succession). */
706
707 void
708 switch_conversion::prune_bbs (basic_block bbd, basic_block final,
709 basic_block default_bb)
710 {
711 edge_iterator ei;
712 edge e;
713
714 for (ei = ei_start (bbd->succs); (e = ei_safe_edge (ei)); )
715 {
716 basic_block bb;
717 bb = e->dest;
718 remove_edge (e);
719 if (bb != final && bb != default_bb)
720 delete_basic_block (bb);
721 }
722 delete_basic_block (bbd);
723 }
724
725 /* Add values to phi nodes in final_bb for the two new edges. E1F is the edge
726 from the basic block loading values from an array and E2F from the basic
727 block loading default values. BBF is the last switch basic block (see the
728 bbf description in the comment below). */
729
730 void
731 switch_conversion::fix_phi_nodes (edge e1f, edge e2f, basic_block bbf)
732 {
733 gphi_iterator gsi;
734 int i;
735
736 for (gsi = gsi_start_phis (bbf), i = 0;
737 !gsi_end_p (gsi); gsi_next (&gsi))
738 {
739 gphi *phi = gsi.phi ();
740 tree inbound, outbound;
741 if (virtual_operand_p (gimple_phi_result (phi)))
742 inbound = outbound = m_target_vop;
743 else
744 {
745 inbound = m_target_inbound_names[i];
746 outbound = m_target_outbound_names[i++];
747 }
748 add_phi_arg (phi, inbound, e1f, UNKNOWN_LOCATION);
749 if (!m_default_case_nonstandard)
750 add_phi_arg (phi, outbound, e2f, UNKNOWN_LOCATION);
751 }
752 }
753
754 /* Creates a check whether the switch expression value actually falls into the
755 range given by all the cases. If it does not, the temporaries are loaded
756 with default values instead. */
757
758 void
759 switch_conversion::gen_inbound_check ()
760 {
761 tree label_decl1 = create_artificial_label (UNKNOWN_LOCATION);
762 tree label_decl2 = create_artificial_label (UNKNOWN_LOCATION);
763 tree label_decl3 = create_artificial_label (UNKNOWN_LOCATION);
764 glabel *label1, *label2, *label3;
765 tree utype, tidx;
766 tree bound;
767
768 gcond *cond_stmt;
769
770 gassign *last_assign = NULL;
771 gimple_stmt_iterator gsi;
772 basic_block bb0, bb1, bb2, bbf, bbd;
773 edge e01 = NULL, e02, e21, e1d, e1f, e2f;
774 location_t loc = gimple_location (m_switch);
775
776 gcc_assert (m_default_values);
777
778 bb0 = gimple_bb (m_switch);
779
780 tidx = gimple_assign_lhs (m_arr_ref_first);
781 utype = TREE_TYPE (tidx);
782
783 /* (end of) block 0 */
784 gsi = gsi_for_stmt (m_arr_ref_first);
785 gsi_next (&gsi);
786
787 bound = fold_convert_loc (loc, utype, m_range_size);
788 cond_stmt = gimple_build_cond (LE_EXPR, tidx, bound, NULL_TREE, NULL_TREE);
789 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
790 update_stmt (cond_stmt);
791
792 /* block 2 */
793 if (!m_default_case_nonstandard)
794 {
795 label2 = gimple_build_label (label_decl2);
796 gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
797 last_assign = gen_def_assigns (&gsi);
798 }
799
800 /* block 1 */
801 label1 = gimple_build_label (label_decl1);
802 gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
803
804 /* block F */
805 gsi = gsi_start_bb (m_final_bb);
806 label3 = gimple_build_label (label_decl3);
807 gsi_insert_before (&gsi, label3, GSI_SAME_STMT);
808
809 /* cfg fix */
810 e02 = split_block (bb0, cond_stmt);
811 bb2 = e02->dest;
812
813 if (m_default_case_nonstandard)
814 {
815 bb1 = bb2;
816 bb2 = m_default_bb;
817 e01 = e02;
818 e01->flags = EDGE_TRUE_VALUE;
819 e02 = make_edge (bb0, bb2, EDGE_FALSE_VALUE);
820 edge e_default = find_edge (bb1, bb2);
821 for (gphi_iterator gsi = gsi_start_phis (bb2);
822 !gsi_end_p (gsi); gsi_next (&gsi))
823 {
824 gphi *phi = gsi.phi ();
825 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, e_default);
826 add_phi_arg (phi, arg, e02,
827 gimple_phi_arg_location_from_edge (phi, e_default));
828 }
829 /* Partially fix the dominator tree, if it is available. */
830 if (dom_info_available_p (CDI_DOMINATORS))
831 redirect_immediate_dominators (CDI_DOMINATORS, bb1, bb0);
832 }
833 else
834 {
835 e21 = split_block (bb2, last_assign);
836 bb1 = e21->dest;
837 remove_edge (e21);
838 }
839
840 e1d = split_block (bb1, m_arr_ref_last);
841 bbd = e1d->dest;
842 remove_edge (e1d);
843
844 /* Flags and profiles of the edge for in-range values. */
845 if (!m_default_case_nonstandard)
846 e01 = make_edge (bb0, bb1, EDGE_TRUE_VALUE);
847 e01->probability = m_default_prob.invert ();
848
849 /* Flags and profiles of the edge taking care of out-of-range values. */
850 e02->flags &= ~EDGE_FALLTHRU;
851 e02->flags |= EDGE_FALSE_VALUE;
852 e02->probability = m_default_prob;
853
854 bbf = m_final_bb;
855
856 e1f = make_edge (bb1, bbf, EDGE_FALLTHRU);
857 e1f->probability = profile_probability::always ();
858
859 if (m_default_case_nonstandard)
860 e2f = NULL;
861 else
862 {
863 e2f = make_edge (bb2, bbf, EDGE_FALLTHRU);
864 e2f->probability = profile_probability::always ();
865 }
866
867 /* frequencies of the new BBs */
868 bb1->count = e01->count ();
869 bb2->count = e02->count ();
870 if (!m_default_case_nonstandard)
871 bbf->count = e1f->count () + e2f->count ();
872
873 /* Tidy blocks that have become unreachable. */
874 prune_bbs (bbd, m_final_bb,
875 m_default_case_nonstandard ? m_default_bb : NULL);
876
877 /* Fixup the PHI nodes in bbF. */
878 fix_phi_nodes (e1f, e2f, bbf);
879
880 /* Fix the dominator tree, if it is available. */
881 if (dom_info_available_p (CDI_DOMINATORS))
882 {
883 vec<basic_block> bbs_to_fix_dom;
884
885 set_immediate_dominator (CDI_DOMINATORS, bb1, bb0);
886 if (!m_default_case_nonstandard)
887 set_immediate_dominator (CDI_DOMINATORS, bb2, bb0);
888 if (! get_immediate_dominator (CDI_DOMINATORS, bbf))
889 /* If bbD was the immediate dominator ... */
890 set_immediate_dominator (CDI_DOMINATORS, bbf, bb0);
891
892 bbs_to_fix_dom.create (3 + (bb2 != bbf));
893 bbs_to_fix_dom.quick_push (bb0);
894 bbs_to_fix_dom.quick_push (bb1);
895 if (bb2 != bbf)
896 bbs_to_fix_dom.quick_push (bb2);
897 bbs_to_fix_dom.quick_push (bbf);
898
899 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
900 bbs_to_fix_dom.release ();
901 }
902 }
903
904 /* The following function is invoked on every switch statement (the current
905 one is given in SWTCH) and runs the individual phases of switch
906 conversion on it one after another until one fails or the conversion
907 is completed. On success, NULL is in m_reason, otherwise points
908 to a string with the reason why the conversion failed. */
909
910 void
911 switch_conversion::expand (gswitch *swtch)
912 {
913 /* Group case labels so that we get the right results from the heuristics
914 that decide on the code generation approach for this switch. */
915 m_cfg_altered |= group_case_labels_stmt (swtch);
916
917 /* If this switch is now a degenerate case with only a default label,
918 there is nothing left for us to do. */
919 if (gimple_switch_num_labels (swtch) < 2)
920 {
921 m_reason = "switch is a degenerate case";
922 return;
923 }
924
925 collect (swtch);
926
927 /* No error markers should reach here (they should be filtered out
928 during gimplification). */
929 gcc_checking_assert (TREE_TYPE (m_index_expr) != error_mark_node);
930
931 /* A switch on a constant should have been optimized in tree-cfg-cleanup. */
932 gcc_checking_assert (!TREE_CONSTANT (m_index_expr));
933
934 /* Prefer bit test if possible. */
935 if (tree_fits_uhwi_p (m_range_size)
936 && bit_test_cluster::can_be_handled (tree_to_uhwi (m_range_size), m_uniq)
937 && bit_test_cluster::is_beneficial (m_count, m_uniq))
938 {
939 m_reason = "expanding as bit test is preferable";
940 return;
941 }
942
943 if (m_uniq <= 2)
944 {
945 /* This will be expanded as a decision tree . */
946 m_reason = "expanding as jumps is preferable";
947 return;
948 }
949
950 /* If there is no common successor, we cannot do the transformation. */
951 if (!m_final_bb)
952 {
953 m_reason = "no common successor to all case label target blocks found";
954 return;
955 }
956
957 /* Check the case label values are within reasonable range: */
958 if (!check_range ())
959 {
960 gcc_assert (m_reason);
961 return;
962 }
963
964 /* For all the cases, see whether they are empty, the assignments they
965 represent constant and so on... */
966 if (!check_all_empty_except_final ())
967 {
968 gcc_assert (m_reason);
969 return;
970 }
971 if (!check_final_bb ())
972 {
973 gcc_assert (m_reason);
974 return;
975 }
976
977 /* At this point all checks have passed and we can proceed with the
978 transformation. */
979
980 create_temp_arrays ();
981 gather_default_values (m_default_case_nonstandard
982 ? gimple_switch_label (swtch, 1)
983 : gimple_switch_default_label (swtch));
984 build_constructors ();
985
986 build_arrays (); /* Build the static arrays and assignments. */
987 gen_inbound_check (); /* Build the bounds check. */
988
989 m_cfg_altered = true;
990 }
991
992 /* Destructor. */
993
994 switch_conversion::~switch_conversion ()
995 {
996 XDELETEVEC (m_constructors);
997 XDELETEVEC (m_default_values);
998 }
999
1000 /* Constructor. */
1001
1002 group_cluster::group_cluster (vec<cluster *> &clusters,
1003 unsigned start, unsigned end)
1004 {
1005 gcc_checking_assert (end - start + 1 >= 1);
1006 m_prob = profile_probability::never ();
1007 m_cases.create (end - start + 1);
1008 for (unsigned i = start; i <= end; i++)
1009 {
1010 m_cases.quick_push (static_cast<simple_cluster *> (clusters[i]));
1011 m_prob += clusters[i]->m_prob;
1012 }
1013 m_subtree_prob = m_prob;
1014 }
1015
1016 /* Destructor. */
1017
1018 group_cluster::~group_cluster ()
1019 {
1020 for (unsigned i = 0; i < m_cases.length (); i++)
1021 delete m_cases[i];
1022
1023 m_cases.release ();
1024 }
1025
1026 /* Dump content of a cluster. */
1027
1028 void
1029 group_cluster::dump (FILE *f, bool details)
1030 {
1031 unsigned total_values = 0;
1032 for (unsigned i = 0; i < m_cases.length (); i++)
1033 total_values += m_cases[i]->get_range (m_cases[i]->get_low (),
1034 m_cases[i]->get_high ());
1035
1036 unsigned comparison_count = 0;
1037 for (unsigned i = 0; i < m_cases.length (); i++)
1038 {
1039 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1040 comparison_count += sc->m_range_p ? 2 : 1;
1041 }
1042
1043 unsigned HOST_WIDE_INT range = get_range (get_low (), get_high ());
1044 fprintf (f, "%s", get_type () == JUMP_TABLE ? "JT" : "BT");
1045
1046 if (details)
1047 fprintf (f, "(values:%d comparisons:%d range:" HOST_WIDE_INT_PRINT_DEC
1048 " density: %.2f%%)", total_values, comparison_count, range,
1049 100.0f * comparison_count / range);
1050
1051 fprintf (f, ":");
1052 PRINT_CASE (f, get_low ());
1053 fprintf (f, "-");
1054 PRINT_CASE (f, get_high ());
1055 fprintf (f, " ");
1056 }
1057
1058 /* Emit GIMPLE code to handle the cluster. */
1059
1060 void
1061 jump_table_cluster::emit (tree index_expr, tree,
1062 tree default_label_expr, basic_block default_bb)
1063 {
1064 unsigned HOST_WIDE_INT range = get_range (get_low (), get_high ());
1065 unsigned HOST_WIDE_INT nondefault_range = 0;
1066
1067 /* For jump table we just emit a new gswitch statement that will
1068 be latter lowered to jump table. */
1069 auto_vec <tree> labels;
1070 labels.create (m_cases.length ());
1071
1072 make_edge (m_case_bb, default_bb, 0);
1073 for (unsigned i = 0; i < m_cases.length (); i++)
1074 {
1075 labels.quick_push (unshare_expr (m_cases[i]->m_case_label_expr));
1076 make_edge (m_case_bb, m_cases[i]->m_case_bb, 0);
1077 }
1078
1079 gswitch *s = gimple_build_switch (index_expr,
1080 unshare_expr (default_label_expr), labels);
1081 gimple_stmt_iterator gsi = gsi_start_bb (m_case_bb);
1082 gsi_insert_after (&gsi, s, GSI_NEW_STMT);
1083
1084 /* Set up even probabilities for all cases. */
1085 for (unsigned i = 0; i < m_cases.length (); i++)
1086 {
1087 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1088 edge case_edge = find_edge (m_case_bb, sc->m_case_bb);
1089 unsigned HOST_WIDE_INT case_range
1090 = sc->get_range (sc->get_low (), sc->get_high ());
1091 nondefault_range += case_range;
1092
1093 /* case_edge->aux is number of values in a jump-table that are covered
1094 by the case_edge. */
1095 case_edge->aux = (void *) ((intptr_t) (case_edge->aux) + case_range);
1096 }
1097
1098 edge default_edge = gimple_switch_default_edge (cfun, s);
1099 default_edge->probability = profile_probability::never ();
1100
1101 for (unsigned i = 0; i < m_cases.length (); i++)
1102 {
1103 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1104 edge case_edge = find_edge (m_case_bb, sc->m_case_bb);
1105 case_edge->probability
1106 = profile_probability::always ().apply_scale ((intptr_t)case_edge->aux,
1107 range);
1108 }
1109
1110 /* Number of non-default values is probability of default edge. */
1111 default_edge->probability
1112 += profile_probability::always ().apply_scale (nondefault_range,
1113 range).invert ();
1114
1115 switch_decision_tree::reset_out_edges_aux (s);
1116 }
1117
1118 /* Find jump tables of given CLUSTERS, where all members of the vector
1119 are of type simple_cluster. New clusters are returned. */
1120
1121 vec<cluster *>
1122 jump_table_cluster::find_jump_tables (vec<cluster *> &clusters)
1123 {
1124 if (!is_enabled ())
1125 return clusters.copy ();
1126
1127 unsigned l = clusters.length ();
1128 auto_vec<min_cluster_item> min;
1129 min.reserve (l + 1);
1130
1131 min.quick_push (min_cluster_item (0, 0, 0));
1132
1133 for (unsigned i = 1; i <= l; i++)
1134 {
1135 /* Set minimal # of clusters with i-th item to infinite. */
1136 min.quick_push (min_cluster_item (INT_MAX, INT_MAX, INT_MAX));
1137
1138 for (unsigned j = 0; j < i; j++)
1139 {
1140 unsigned HOST_WIDE_INT s = min[j].m_non_jt_cases;
1141 if (i - j < case_values_threshold ())
1142 s += i - j;
1143
1144 /* Prefer clusters with smaller number of numbers covered. */
1145 if ((min[j].m_count + 1 < min[i].m_count
1146 || (min[j].m_count + 1 == min[i].m_count
1147 && s < min[i].m_non_jt_cases))
1148 && can_be_handled (clusters, j, i - 1))
1149 min[i] = min_cluster_item (min[j].m_count + 1, j, s);
1150 }
1151
1152 gcc_checking_assert (min[i].m_count != INT_MAX);
1153 }
1154
1155 /* No result. */
1156 if (min[l].m_count == INT_MAX)
1157 return clusters.copy ();
1158
1159 vec<cluster *> output;
1160 output.create (4);
1161
1162 /* Find and build the clusters. */
1163 for (int end = l;;)
1164 {
1165 int start = min[end].m_start;
1166
1167 /* Do not allow clusters with small number of cases. */
1168 if (is_beneficial (clusters, start, end - 1))
1169 output.safe_push (new jump_table_cluster (clusters, start, end - 1));
1170 else
1171 for (int i = end - 1; i >= start; i--)
1172 output.safe_push (clusters[i]);
1173
1174 end = start;
1175
1176 if (start <= 0)
1177 break;
1178 }
1179
1180 output.reverse ();
1181 return output;
1182 }
1183
1184 /* Return true when cluster starting at START and ending at END (inclusive)
1185 can build a jump-table. */
1186
1187 bool
1188 jump_table_cluster::can_be_handled (const vec<cluster *> &clusters,
1189 unsigned start, unsigned end)
1190 {
1191 /* If the switch is relatively small such that the cost of one
1192 indirect jump on the target are higher than the cost of a
1193 decision tree, go with the decision tree.
1194
1195 If range of values is much bigger than number of values,
1196 or if it is too large to represent in a HOST_WIDE_INT,
1197 make a sequence of conditional branches instead of a dispatch.
1198
1199 The definition of "much bigger" depends on whether we are
1200 optimizing for size or for speed. */
1201 if (!flag_jump_tables)
1202 return false;
1203
1204 /* For algorithm correctness, jump table for a single case must return
1205 true. We bail out in is_beneficial if it's called just for
1206 a single case. */
1207 if (start == end)
1208 return true;
1209
1210 unsigned HOST_WIDE_INT max_ratio
1211 = optimize_insn_for_size_p () ? max_ratio_for_size : max_ratio_for_speed;
1212 unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
1213 clusters[end]->get_high ());
1214 /* Check overflow. */
1215 if (range == 0)
1216 return false;
1217
1218 unsigned HOST_WIDE_INT comparison_count = 0;
1219 for (unsigned i = start; i <= end; i++)
1220 {
1221 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1222 comparison_count += sc->m_range_p ? 2 : 1;
1223 }
1224
1225 return range <= max_ratio * comparison_count;
1226 }
1227
1228 /* Return true if cluster starting at START and ending at END (inclusive)
1229 is profitable transformation. */
1230
1231 bool
1232 jump_table_cluster::is_beneficial (const vec<cluster *> &,
1233 unsigned start, unsigned end)
1234 {
1235 /* Single case bail out. */
1236 if (start == end)
1237 return false;
1238
1239 return end - start + 1 >= case_values_threshold ();
1240 }
1241
1242 /* Definition of jump_table_cluster constants. */
1243
1244 const unsigned HOST_WIDE_INT jump_table_cluster::max_ratio_for_size;
1245 const unsigned HOST_WIDE_INT jump_table_cluster::max_ratio_for_speed;
1246
1247 /* Find bit tests of given CLUSTERS, where all members of the vector
1248 are of type simple_cluster. New clusters are returned. */
1249
1250 vec<cluster *>
1251 bit_test_cluster::find_bit_tests (vec<cluster *> &clusters)
1252 {
1253 vec<cluster *> output;
1254 output.create (4);
1255
1256 unsigned l = clusters.length ();
1257 auto_vec<min_cluster_item> min;
1258 min.reserve (l + 1);
1259
1260 min.quick_push (min_cluster_item (0, 0, 0));
1261
1262 for (unsigned i = 1; i <= l; i++)
1263 {
1264 /* Set minimal # of clusters with i-th item to infinite. */
1265 min.quick_push (min_cluster_item (INT_MAX, INT_MAX, INT_MAX));
1266
1267 for (unsigned j = 0; j < i; j++)
1268 {
1269 if (min[j].m_count + 1 < min[i].m_count
1270 && can_be_handled (clusters, j, i - 1))
1271 min[i] = min_cluster_item (min[j].m_count + 1, j, INT_MAX);
1272 }
1273
1274 gcc_checking_assert (min[i].m_count != INT_MAX);
1275 }
1276
1277 /* No result. */
1278 if (min[l].m_count == INT_MAX)
1279 return clusters.copy ();
1280
1281 /* Find and build the clusters. */
1282 for (unsigned end = l;;)
1283 {
1284 int start = min[end].m_start;
1285
1286 if (is_beneficial (clusters, start, end - 1))
1287 {
1288 bool entire = start == 0 && end == clusters.length ();
1289 output.safe_push (new bit_test_cluster (clusters, start, end - 1,
1290 entire));
1291 }
1292 else
1293 for (int i = end - 1; i >= start; i--)
1294 output.safe_push (clusters[i]);
1295
1296 end = start;
1297
1298 if (start <= 0)
1299 break;
1300 }
1301
1302 output.reverse ();
1303 return output;
1304 }
1305
1306 /* Return true when RANGE of case values with UNIQ labels
1307 can build a bit test. */
1308
1309 bool
1310 bit_test_cluster::can_be_handled (unsigned HOST_WIDE_INT range,
1311 unsigned int uniq)
1312 {
1313 /* Check overflow. */
1314 if (range == 0)
1315 return 0;
1316
1317 if (range >= GET_MODE_BITSIZE (word_mode))
1318 return false;
1319
1320 return uniq <= 3;
1321 }
1322
1323 /* Return true when cluster starting at START and ending at END (inclusive)
1324 can build a bit test. */
1325
1326 bool
1327 bit_test_cluster::can_be_handled (const vec<cluster *> &clusters,
1328 unsigned start, unsigned end)
1329 {
1330 /* For algorithm correctness, bit test for a single case must return
1331 true. We bail out in is_beneficial if it's called just for
1332 a single case. */
1333 if (start == end)
1334 return true;
1335
1336 unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
1337 clusters[end]->get_high ());
1338 auto_bitmap dest_bbs;
1339
1340 for (unsigned i = start; i <= end; i++)
1341 {
1342 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1343 bitmap_set_bit (dest_bbs, sc->m_case_bb->index);
1344 }
1345
1346 return can_be_handled (range, bitmap_count_bits (dest_bbs));
1347 }
1348
1349 /* Return true when COUNT of cases of UNIQ labels is beneficial for bit test
1350 transformation. */
1351
1352 bool
1353 bit_test_cluster::is_beneficial (unsigned count, unsigned uniq)
1354 {
1355 return (((uniq == 1 && count >= 3)
1356 || (uniq == 2 && count >= 5)
1357 || (uniq == 3 && count >= 6)));
1358 }
1359
1360 /* Return true if cluster starting at START and ending at END (inclusive)
1361 is profitable transformation. */
1362
1363 bool
1364 bit_test_cluster::is_beneficial (const vec<cluster *> &clusters,
1365 unsigned start, unsigned end)
1366 {
1367 /* Single case bail out. */
1368 if (start == end)
1369 return false;
1370
1371 auto_bitmap dest_bbs;
1372
1373 for (unsigned i = start; i <= end; i++)
1374 {
1375 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1376 bitmap_set_bit (dest_bbs, sc->m_case_bb->index);
1377 }
1378
1379 unsigned uniq = bitmap_count_bits (dest_bbs);
1380 unsigned count = end - start + 1;
1381 return is_beneficial (count, uniq);
1382 }
1383
1384 /* Comparison function for qsort to order bit tests by decreasing
1385 probability of execution. */
1386
1387 int
1388 case_bit_test::cmp (const void *p1, const void *p2)
1389 {
1390 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
1391 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
1392
1393 if (d2->bits != d1->bits)
1394 return d2->bits - d1->bits;
1395
1396 /* Stabilize the sort. */
1397 return (LABEL_DECL_UID (CASE_LABEL (d2->label))
1398 - LABEL_DECL_UID (CASE_LABEL (d1->label)));
1399 }
1400
1401 /* Expand a switch statement by a short sequence of bit-wise
1402 comparisons. "switch(x)" is effectively converted into
1403 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
1404 integer constants.
1405
1406 INDEX_EXPR is the value being switched on.
1407
1408 MINVAL is the lowest case value of in the case nodes,
1409 and RANGE is highest value minus MINVAL. MINVAL and RANGE
1410 are not guaranteed to be of the same type as INDEX_EXPR
1411 (the gimplifier doesn't change the type of case label values,
1412 and MINVAL and RANGE are derived from those values).
1413 MAXVAL is MINVAL + RANGE.
1414
1415 There *MUST* be max_case_bit_tests or less unique case
1416 node targets. */
1417
1418 void
1419 bit_test_cluster::emit (tree index_expr, tree index_type,
1420 tree, basic_block default_bb)
1421 {
1422 struct case_bit_test test[m_max_case_bit_tests] = { {} };
1423 unsigned int i, j, k;
1424 unsigned int count;
1425
1426 tree unsigned_index_type = unsigned_type_for (index_type);
1427
1428 gimple_stmt_iterator gsi;
1429 gassign *shift_stmt;
1430
1431 tree idx, tmp, csui;
1432 tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
1433 tree word_mode_zero = fold_convert (word_type_node, integer_zero_node);
1434 tree word_mode_one = fold_convert (word_type_node, integer_one_node);
1435 int prec = TYPE_PRECISION (word_type_node);
1436 wide_int wone = wi::one (prec);
1437
1438 tree minval = get_low ();
1439 tree maxval = get_high ();
1440 tree range = int_const_binop (MINUS_EXPR, maxval, minval);
1441 unsigned HOST_WIDE_INT bt_range = get_range (minval, maxval);
1442
1443 /* Go through all case labels, and collect the case labels, profile
1444 counts, and other information we need to build the branch tests. */
1445 count = 0;
1446 for (i = 0; i < m_cases.length (); i++)
1447 {
1448 unsigned int lo, hi;
1449 simple_cluster *n = static_cast<simple_cluster *> (m_cases[i]);
1450 for (k = 0; k < count; k++)
1451 if (n->m_case_bb == test[k].target_bb)
1452 break;
1453
1454 if (k == count)
1455 {
1456 gcc_checking_assert (count < m_max_case_bit_tests);
1457 test[k].mask = wi::zero (prec);
1458 test[k].target_bb = n->m_case_bb;
1459 test[k].label = n->m_case_label_expr;
1460 test[k].bits = 0;
1461 count++;
1462 }
1463
1464 test[k].bits += n->get_range (n->get_low (), n->get_high ());
1465
1466 lo = tree_to_uhwi (int_const_binop (MINUS_EXPR, n->get_low (), minval));
1467 if (n->get_high () == NULL_TREE)
1468 hi = lo;
1469 else
1470 hi = tree_to_uhwi (int_const_binop (MINUS_EXPR, n->get_high (),
1471 minval));
1472
1473 for (j = lo; j <= hi; j++)
1474 test[k].mask |= wi::lshift (wone, j);
1475 }
1476
1477 qsort (test, count, sizeof (*test), case_bit_test::cmp);
1478
1479 /* If all values are in the 0 .. BITS_PER_WORD-1 range, we can get rid of
1480 the minval subtractions, but it might make the mask constants more
1481 expensive. So, compare the costs. */
1482 if (compare_tree_int (minval, 0) > 0
1483 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
1484 {
1485 int cost_diff;
1486 HOST_WIDE_INT m = tree_to_uhwi (minval);
1487 rtx reg = gen_raw_REG (word_mode, 10000);
1488 bool speed_p = optimize_insn_for_speed_p ();
1489 cost_diff = set_rtx_cost (gen_rtx_PLUS (word_mode, reg,
1490 GEN_INT (-m)), speed_p);
1491 for (i = 0; i < count; i++)
1492 {
1493 rtx r = immed_wide_int_const (test[i].mask, word_mode);
1494 cost_diff += set_src_cost (gen_rtx_AND (word_mode, reg, r),
1495 word_mode, speed_p);
1496 r = immed_wide_int_const (wi::lshift (test[i].mask, m), word_mode);
1497 cost_diff -= set_src_cost (gen_rtx_AND (word_mode, reg, r),
1498 word_mode, speed_p);
1499 }
1500 if (cost_diff > 0)
1501 {
1502 for (i = 0; i < count; i++)
1503 test[i].mask = wi::lshift (test[i].mask, m);
1504 minval = build_zero_cst (TREE_TYPE (minval));
1505 range = maxval;
1506 }
1507 }
1508
1509 /* Now build the test-and-branch code. */
1510
1511 gsi = gsi_last_bb (m_case_bb);
1512
1513 /* idx = (unsigned)x - minval. */
1514 idx = fold_convert (unsigned_index_type, index_expr);
1515 idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx,
1516 fold_convert (unsigned_index_type, minval));
1517 idx = force_gimple_operand_gsi (&gsi, idx,
1518 /*simple=*/true, NULL_TREE,
1519 /*before=*/true, GSI_SAME_STMT);
1520
1521 if (m_handles_entire_switch)
1522 {
1523 /* if (idx > range) goto default */
1524 range
1525 = force_gimple_operand_gsi (&gsi,
1526 fold_convert (unsigned_index_type, range),
1527 /*simple=*/true, NULL_TREE,
1528 /*before=*/true, GSI_SAME_STMT);
1529 tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range);
1530 basic_block new_bb
1531 = hoist_edge_and_branch_if_true (&gsi, tmp, default_bb,
1532 profile_probability::unlikely ());
1533 gsi = gsi_last_bb (new_bb);
1534 }
1535
1536 /* csui = (1 << (word_mode) idx) */
1537 csui = make_ssa_name (word_type_node);
1538 tmp = fold_build2 (LSHIFT_EXPR, word_type_node, word_mode_one,
1539 fold_convert (word_type_node, idx));
1540 tmp = force_gimple_operand_gsi (&gsi, tmp,
1541 /*simple=*/false, NULL_TREE,
1542 /*before=*/true, GSI_SAME_STMT);
1543 shift_stmt = gimple_build_assign (csui, tmp);
1544 gsi_insert_before (&gsi, shift_stmt, GSI_SAME_STMT);
1545 update_stmt (shift_stmt);
1546
1547 profile_probability prob = profile_probability::always ();
1548
1549 /* for each unique set of cases:
1550 if (const & csui) goto target */
1551 for (k = 0; k < count; k++)
1552 {
1553 prob = profile_probability::always ().apply_scale (test[k].bits,
1554 bt_range);
1555 bt_range -= test[k].bits;
1556 tmp = wide_int_to_tree (word_type_node, test[k].mask);
1557 tmp = fold_build2 (BIT_AND_EXPR, word_type_node, csui, tmp);
1558 tmp = force_gimple_operand_gsi (&gsi, tmp,
1559 /*simple=*/true, NULL_TREE,
1560 /*before=*/true, GSI_SAME_STMT);
1561 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp, word_mode_zero);
1562 basic_block new_bb
1563 = hoist_edge_and_branch_if_true (&gsi, tmp, test[k].target_bb, prob);
1564 gsi = gsi_last_bb (new_bb);
1565 }
1566
1567 /* We should have removed all edges now. */
1568 gcc_assert (EDGE_COUNT (gsi_bb (gsi)->succs) == 0);
1569
1570 /* If nothing matched, go to the default label. */
1571 edge e = make_edge (gsi_bb (gsi), default_bb, EDGE_FALLTHRU);
1572 e->probability = profile_probability::always ();
1573 }
1574
1575 /* Split the basic block at the statement pointed to by GSIP, and insert
1576 a branch to the target basic block of E_TRUE conditional on tree
1577 expression COND.
1578
1579 It is assumed that there is already an edge from the to-be-split
1580 basic block to E_TRUE->dest block. This edge is removed, and the
1581 profile information on the edge is re-used for the new conditional
1582 jump.
1583
1584 The CFG is updated. The dominator tree will not be valid after
1585 this transformation, but the immediate dominators are updated if
1586 UPDATE_DOMINATORS is true.
1587
1588 Returns the newly created basic block. */
1589
1590 basic_block
1591 bit_test_cluster::hoist_edge_and_branch_if_true (gimple_stmt_iterator *gsip,
1592 tree cond, basic_block case_bb,
1593 profile_probability prob)
1594 {
1595 tree tmp;
1596 gcond *cond_stmt;
1597 edge e_false;
1598 basic_block new_bb, split_bb = gsi_bb (*gsip);
1599
1600 edge e_true = make_edge (split_bb, case_bb, EDGE_TRUE_VALUE);
1601 e_true->probability = prob;
1602 gcc_assert (e_true->src == split_bb);
1603
1604 tmp = force_gimple_operand_gsi (gsip, cond, /*simple=*/true, NULL,
1605 /*before=*/true, GSI_SAME_STMT);
1606 cond_stmt = gimple_build_cond_from_tree (tmp, NULL_TREE, NULL_TREE);
1607 gsi_insert_before (gsip, cond_stmt, GSI_SAME_STMT);
1608
1609 e_false = split_block (split_bb, cond_stmt);
1610 new_bb = e_false->dest;
1611 redirect_edge_pred (e_true, split_bb);
1612
1613 e_false->flags &= ~EDGE_FALLTHRU;
1614 e_false->flags |= EDGE_FALSE_VALUE;
1615 e_false->probability = e_true->probability.invert ();
1616 new_bb->count = e_false->count ();
1617
1618 return new_bb;
1619 }
1620
1621 /* Compute the number of case labels that correspond to each outgoing edge of
1622 switch statement. Record this information in the aux field of the edge. */
1623
1624 void
1625 switch_decision_tree::compute_cases_per_edge ()
1626 {
1627 reset_out_edges_aux (m_switch);
1628 int ncases = gimple_switch_num_labels (m_switch);
1629 for (int i = ncases - 1; i >= 1; --i)
1630 {
1631 edge case_edge = gimple_switch_edge (cfun, m_switch, i);
1632 case_edge->aux = (void *) ((intptr_t) (case_edge->aux) + 1);
1633 }
1634 }
1635
1636 /* Analyze switch statement and return true when the statement is expanded
1637 as decision tree. */
1638
1639 bool
1640 switch_decision_tree::analyze_switch_statement ()
1641 {
1642 unsigned l = gimple_switch_num_labels (m_switch);
1643 basic_block bb = gimple_bb (m_switch);
1644 auto_vec<cluster *> clusters;
1645 clusters.create (l - 1);
1646
1647 basic_block default_bb = gimple_switch_default_bb (cfun, m_switch);
1648 m_case_bbs.reserve (l);
1649 m_case_bbs.quick_push (default_bb);
1650
1651 compute_cases_per_edge ();
1652
1653 for (unsigned i = 1; i < l; i++)
1654 {
1655 tree elt = gimple_switch_label (m_switch, i);
1656 tree lab = CASE_LABEL (elt);
1657 basic_block case_bb = label_to_block (cfun, lab);
1658 edge case_edge = find_edge (bb, case_bb);
1659 tree low = CASE_LOW (elt);
1660 tree high = CASE_HIGH (elt);
1661
1662 profile_probability p
1663 = case_edge->probability.apply_scale (1, (intptr_t) (case_edge->aux));
1664 clusters.quick_push (new simple_cluster (low, high, elt, case_edge->dest,
1665 p));
1666 m_case_bbs.quick_push (case_edge->dest);
1667 }
1668
1669 reset_out_edges_aux (m_switch);
1670
1671 /* Find jump table clusters. */
1672 vec<cluster *> output = jump_table_cluster::find_jump_tables (clusters);
1673
1674 /* Find bit test clusters. */
1675 vec<cluster *> output2;
1676 auto_vec<cluster *> tmp;
1677 output2.create (1);
1678 tmp.create (1);
1679
1680 for (unsigned i = 0; i < output.length (); i++)
1681 {
1682 cluster *c = output[i];
1683 if (c->get_type () != SIMPLE_CASE)
1684 {
1685 if (!tmp.is_empty ())
1686 {
1687 vec<cluster *> n = bit_test_cluster::find_bit_tests (tmp);
1688 output2.safe_splice (n);
1689 n.release ();
1690 tmp.truncate (0);
1691 }
1692 output2.safe_push (c);
1693 }
1694 else
1695 tmp.safe_push (c);
1696 }
1697
1698 /* We still can have a temporary vector to test. */
1699 if (!tmp.is_empty ())
1700 {
1701 vec<cluster *> n = bit_test_cluster::find_bit_tests (tmp);
1702 output2.safe_splice (n);
1703 n.release ();
1704 }
1705
1706 if (dump_file)
1707 {
1708 fprintf (dump_file, ";; GIMPLE switch case clusters: ");
1709 for (unsigned i = 0; i < output2.length (); i++)
1710 output2[i]->dump (dump_file, dump_flags & TDF_DETAILS);
1711 fprintf (dump_file, "\n");
1712 }
1713
1714 output.release ();
1715
1716 bool expanded = try_switch_expansion (output2);
1717
1718 for (unsigned i = 0; i < output2.length (); i++)
1719 delete output2[i];
1720
1721 output2.release ();
1722
1723 return expanded;
1724 }
1725
1726 /* Attempt to expand CLUSTERS as a decision tree. Return true when
1727 expanded. */
1728
1729 bool
1730 switch_decision_tree::try_switch_expansion (vec<cluster *> &clusters)
1731 {
1732 tree index_expr = gimple_switch_index (m_switch);
1733 tree index_type = TREE_TYPE (index_expr);
1734 basic_block bb = gimple_bb (m_switch);
1735
1736 if (gimple_switch_num_labels (m_switch) == 1)
1737 return false;
1738
1739 /* Find the default case target label. */
1740 edge default_edge = gimple_switch_default_edge (cfun, m_switch);
1741 m_default_bb = default_edge->dest;
1742
1743 /* Do the insertion of a case label into m_case_list. The labels are
1744 fed to us in descending order from the sorted vector of case labels used
1745 in the tree part of the middle end. So the list we construct is
1746 sorted in ascending order. */
1747
1748 for (int i = clusters.length () - 1; i >= 0; i--)
1749 {
1750 case_tree_node *r = m_case_list;
1751 m_case_list = m_case_node_pool.allocate ();
1752 m_case_list->m_right = r;
1753 m_case_list->m_c = clusters[i];
1754 }
1755
1756 record_phi_operand_mapping ();
1757
1758 /* Split basic block that contains the gswitch statement. */
1759 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1760 edge e;
1761 if (gsi_end_p (gsi))
1762 e = split_block_after_labels (bb);
1763 else
1764 {
1765 gsi_prev (&gsi);
1766 e = split_block (bb, gsi_stmt (gsi));
1767 }
1768 bb = split_edge (e);
1769
1770 /* Create new basic blocks for non-case clusters where specific expansion
1771 needs to happen. */
1772 for (unsigned i = 0; i < clusters.length (); i++)
1773 if (clusters[i]->get_type () != SIMPLE_CASE)
1774 {
1775 clusters[i]->m_case_bb = create_empty_bb (bb);
1776 clusters[i]->m_case_bb->loop_father = bb->loop_father;
1777 }
1778
1779 /* Do not do an extra work for a single cluster. */
1780 if (clusters.length () == 1
1781 && clusters[0]->get_type () != SIMPLE_CASE)
1782 {
1783 cluster *c = clusters[0];
1784 c->emit (index_expr, index_type,
1785 gimple_switch_default_label (m_switch), m_default_bb);
1786 redirect_edge_succ (single_succ_edge (bb), c->m_case_bb);
1787 }
1788 else
1789 {
1790 emit (bb, index_expr, default_edge->probability, index_type);
1791
1792 /* Emit cluster-specific switch handling. */
1793 for (unsigned i = 0; i < clusters.length (); i++)
1794 if (clusters[i]->get_type () != SIMPLE_CASE)
1795 clusters[i]->emit (index_expr, index_type,
1796 gimple_switch_default_label (m_switch),
1797 m_default_bb);
1798 }
1799
1800 fix_phi_operands_for_edges ();
1801
1802 return true;
1803 }
1804
1805 /* Before switch transformation, record all SSA_NAMEs defined in switch BB
1806 and used in a label basic block. */
1807
1808 void
1809 switch_decision_tree::record_phi_operand_mapping ()
1810 {
1811 basic_block switch_bb = gimple_bb (m_switch);
1812 /* Record all PHI nodes that have to be fixed after conversion. */
1813 for (unsigned i = 0; i < m_case_bbs.length (); i++)
1814 {
1815 gphi_iterator gsi;
1816 basic_block bb = m_case_bbs[i];
1817 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1818 {
1819 gphi *phi = gsi.phi ();
1820
1821 for (unsigned i = 0; i < gimple_phi_num_args (phi); i++)
1822 {
1823 basic_block phi_src_bb = gimple_phi_arg_edge (phi, i)->src;
1824 if (phi_src_bb == switch_bb)
1825 {
1826 tree def = gimple_phi_arg_def (phi, i);
1827 tree result = gimple_phi_result (phi);
1828 m_phi_mapping.put (result, def);
1829 break;
1830 }
1831 }
1832 }
1833 }
1834 }
1835
1836 /* Append new operands to PHI statements that were introduced due to
1837 addition of new edges to case labels. */
1838
1839 void
1840 switch_decision_tree::fix_phi_operands_for_edges ()
1841 {
1842 gphi_iterator gsi;
1843
1844 for (unsigned i = 0; i < m_case_bbs.length (); i++)
1845 {
1846 basic_block bb = m_case_bbs[i];
1847 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1848 {
1849 gphi *phi = gsi.phi ();
1850 for (unsigned j = 0; j < gimple_phi_num_args (phi); j++)
1851 {
1852 tree def = gimple_phi_arg_def (phi, j);
1853 if (def == NULL_TREE)
1854 {
1855 edge e = gimple_phi_arg_edge (phi, j);
1856 tree *definition
1857 = m_phi_mapping.get (gimple_phi_result (phi));
1858 gcc_assert (definition);
1859 add_phi_arg (phi, *definition, e, UNKNOWN_LOCATION);
1860 }
1861 }
1862 }
1863 }
1864 }
1865
1866 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1867 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1868
1869 We generate a binary decision tree to select the appropriate target
1870 code. */
1871
1872 void
1873 switch_decision_tree::emit (basic_block bb, tree index_expr,
1874 profile_probability default_prob, tree index_type)
1875 {
1876 balance_case_nodes (&m_case_list, NULL);
1877
1878 if (dump_file)
1879 dump_function_to_file (current_function_decl, dump_file, dump_flags);
1880 if (dump_file && (dump_flags & TDF_DETAILS))
1881 {
1882 int indent_step = ceil_log2 (TYPE_PRECISION (index_type)) + 2;
1883 fprintf (dump_file, ";; Expanding GIMPLE switch as decision tree:\n");
1884 gcc_assert (m_case_list != NULL);
1885 dump_case_nodes (dump_file, m_case_list, indent_step, 0);
1886 }
1887
1888 bb = emit_case_nodes (bb, index_expr, m_case_list, default_prob, index_type);
1889
1890 if (bb)
1891 emit_jump (bb, m_default_bb);
1892
1893 /* Remove all edges and do just an edge that will reach default_bb. */
1894 bb = gimple_bb (m_switch);
1895 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1896 gsi_remove (&gsi, true);
1897
1898 delete_basic_block (bb);
1899 }
1900
1901 /* Take an ordered list of case nodes
1902 and transform them into a near optimal binary tree,
1903 on the assumption that any target code selection value is as
1904 likely as any other.
1905
1906 The transformation is performed by splitting the ordered
1907 list into two equal sections plus a pivot. The parts are
1908 then attached to the pivot as left and right branches. Each
1909 branch is then transformed recursively. */
1910
1911 void
1912 switch_decision_tree::balance_case_nodes (case_tree_node **head,
1913 case_tree_node *parent)
1914 {
1915 case_tree_node *np;
1916
1917 np = *head;
1918 if (np)
1919 {
1920 int i = 0;
1921 int ranges = 0;
1922 case_tree_node **npp;
1923 case_tree_node *left;
1924
1925 /* Count the number of entries on branch. Also count the ranges. */
1926
1927 while (np)
1928 {
1929 if (!tree_int_cst_equal (np->m_c->get_low (), np->m_c->get_high ()))
1930 ranges++;
1931
1932 i++;
1933 np = np->m_right;
1934 }
1935
1936 if (i > 2)
1937 {
1938 /* Split this list if it is long enough for that to help. */
1939 npp = head;
1940 left = *npp;
1941
1942 /* If there are just three nodes, split at the middle one. */
1943 if (i == 3)
1944 npp = &(*npp)->m_right;
1945 else
1946 {
1947 /* Find the place in the list that bisects the list's total cost,
1948 where ranges count as 2.
1949 Here I gets half the total cost. */
1950 i = (i + ranges + 1) / 2;
1951 while (1)
1952 {
1953 /* Skip nodes while their cost does not reach that amount. */
1954 if (!tree_int_cst_equal ((*npp)->m_c->get_low (),
1955 (*npp)->m_c->get_high ()))
1956 i--;
1957 i--;
1958 if (i <= 0)
1959 break;
1960 npp = &(*npp)->m_right;
1961 }
1962 }
1963 *head = np = *npp;
1964 *npp = 0;
1965 np->m_parent = parent;
1966 np->m_left = left;
1967
1968 /* Optimize each of the two split parts. */
1969 balance_case_nodes (&np->m_left, np);
1970 balance_case_nodes (&np->m_right, np);
1971 np->m_c->m_subtree_prob = np->m_c->m_prob;
1972 np->m_c->m_subtree_prob += np->m_left->m_c->m_subtree_prob;
1973 np->m_c->m_subtree_prob += np->m_right->m_c->m_subtree_prob;
1974 }
1975 else
1976 {
1977 /* Else leave this branch as one level,
1978 but fill in `parent' fields. */
1979 np = *head;
1980 np->m_parent = parent;
1981 np->m_c->m_subtree_prob = np->m_c->m_prob;
1982 for (; np->m_right; np = np->m_right)
1983 {
1984 np->m_right->m_parent = np;
1985 (*head)->m_c->m_subtree_prob += np->m_right->m_c->m_subtree_prob;
1986 }
1987 }
1988 }
1989 }
1990
1991 /* Dump ROOT, a list or tree of case nodes, to file. */
1992
1993 void
1994 switch_decision_tree::dump_case_nodes (FILE *f, case_tree_node *root,
1995 int indent_step, int indent_level)
1996 {
1997 if (root == 0)
1998 return;
1999 indent_level++;
2000
2001 dump_case_nodes (f, root->m_left, indent_step, indent_level);
2002
2003 fputs (";; ", f);
2004 fprintf (f, "%*s", indent_step * indent_level, "");
2005 root->m_c->dump (f);
2006 root->m_c->m_prob.dump (f);
2007 fputs ("\n", f);
2008
2009 dump_case_nodes (f, root->m_right, indent_step, indent_level);
2010 }
2011
2012
2013 /* Add an unconditional jump to CASE_BB that happens in basic block BB. */
2014
2015 void
2016 switch_decision_tree::emit_jump (basic_block bb, basic_block case_bb)
2017 {
2018 edge e = single_succ_edge (bb);
2019 redirect_edge_succ (e, case_bb);
2020 }
2021
2022 /* Generate code to compare OP0 with OP1 so that the condition codes are
2023 set and to jump to LABEL_BB if the condition is true.
2024 COMPARISON is the GIMPLE comparison (EQ, NE, GT, etc.).
2025 PROB is the probability of jumping to LABEL_BB. */
2026
2027 basic_block
2028 switch_decision_tree::emit_cmp_and_jump_insns (basic_block bb, tree op0,
2029 tree op1, tree_code comparison,
2030 basic_block label_bb,
2031 profile_probability prob)
2032 {
2033 // TODO: it's once called with lhs != index.
2034 op1 = fold_convert (TREE_TYPE (op0), op1);
2035
2036 gcond *cond = gimple_build_cond (comparison, op0, op1, NULL_TREE, NULL_TREE);
2037 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2038 gsi_insert_after (&gsi, cond, GSI_NEW_STMT);
2039
2040 gcc_assert (single_succ_p (bb));
2041
2042 /* Make a new basic block where false branch will take place. */
2043 edge false_edge = split_block (bb, cond);
2044 false_edge->flags = EDGE_FALSE_VALUE;
2045 false_edge->probability = prob.invert ();
2046
2047 edge true_edge = make_edge (bb, label_bb, EDGE_TRUE_VALUE);
2048 true_edge->probability = prob;
2049
2050 return false_edge->dest;
2051 }
2052
2053 /* Emit step-by-step code to select a case for the value of INDEX.
2054 The thus generated decision tree follows the form of the
2055 case-node binary tree NODE, whose nodes represent test conditions.
2056 DEFAULT_PROB is probability of cases leading to default BB.
2057 INDEX_TYPE is the type of the index of the switch. */
2058
2059 basic_block
2060 switch_decision_tree::emit_case_nodes (basic_block bb, tree index,
2061 case_tree_node *node,
2062 profile_probability default_prob,
2063 tree index_type)
2064 {
2065 /* If node is null, we are done. */
2066 if (node == NULL)
2067 return bb;
2068
2069 /* Branch to a label where we will handle it later. */
2070 basic_block test_bb = split_edge (single_succ_edge (bb));
2071 redirect_edge_succ (single_pred_edge (test_bb),
2072 single_succ_edge (bb)->dest);
2073
2074 profile_probability probability
2075 = (node->m_right
2076 ? node->m_right->m_c->m_subtree_prob : profile_probability::never ());
2077 probability = ((probability + default_prob.apply_scale (1, 2))
2078 / (node->m_c->m_subtree_prob + default_prob));
2079 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (), GT_EXPR,
2080 test_bb, probability);
2081 default_prob = default_prob.apply_scale (1, 2);
2082
2083 /* Value belongs to this node or to the left-hand subtree. */
2084 probability = node->m_c->m_prob /
2085 (node->m_c->m_subtree_prob + default_prob);
2086 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (), GE_EXPR,
2087 node->m_c->m_case_bb, probability);
2088
2089 /* Handle the left-hand subtree. */
2090 bb = emit_case_nodes (bb, index, node->m_left,
2091 default_prob, index_type);
2092
2093 /* If the left-hand subtree fell through,
2094 don't let it fall into the right-hand subtree. */
2095 if (m_default_bb)
2096 emit_jump (bb, m_default_bb);
2097
2098 bb = emit_case_nodes (test_bb, index, node->m_right,
2099 default_prob, index_type);
2100
2101 return bb;
2102 }
2103
2104 /* The main function of the pass scans statements for switches and invokes
2105 process_switch on them. */
2106
2107 namespace {
2108
2109 const pass_data pass_data_convert_switch =
2110 {
2111 GIMPLE_PASS, /* type */
2112 "switchconv", /* name */
2113 OPTGROUP_NONE, /* optinfo_flags */
2114 TV_TREE_SWITCH_CONVERSION, /* tv_id */
2115 ( PROP_cfg | PROP_ssa ), /* properties_required */
2116 0, /* properties_provided */
2117 0, /* properties_destroyed */
2118 0, /* todo_flags_start */
2119 TODO_update_ssa, /* todo_flags_finish */
2120 };
2121
2122 class pass_convert_switch : public gimple_opt_pass
2123 {
2124 public:
2125 pass_convert_switch (gcc::context *ctxt)
2126 : gimple_opt_pass (pass_data_convert_switch, ctxt)
2127 {}
2128
2129 /* opt_pass methods: */
2130 virtual bool gate (function *) { return flag_tree_switch_conversion != 0; }
2131 virtual unsigned int execute (function *);
2132
2133 }; // class pass_convert_switch
2134
2135 unsigned int
2136 pass_convert_switch::execute (function *fun)
2137 {
2138 basic_block bb;
2139 bool cfg_altered = false;
2140
2141 FOR_EACH_BB_FN (bb, fun)
2142 {
2143 gimple *stmt = last_stmt (bb);
2144 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
2145 {
2146 if (dump_file)
2147 {
2148 expanded_location loc = expand_location (gimple_location (stmt));
2149
2150 fprintf (dump_file, "beginning to process the following "
2151 "SWITCH statement (%s:%d) : ------- \n",
2152 loc.file, loc.line);
2153 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2154 putc ('\n', dump_file);
2155 }
2156
2157 switch_conversion sconv;
2158 sconv.expand (as_a <gswitch *> (stmt));
2159 cfg_altered |= sconv.m_cfg_altered;
2160 if (!sconv.m_reason)
2161 {
2162 if (dump_file)
2163 {
2164 fputs ("Switch converted\n", dump_file);
2165 fputs ("--------------------------------\n", dump_file);
2166 }
2167
2168 /* Make no effort to update the post-dominator tree.
2169 It is actually not that hard for the transformations
2170 we have performed, but it is not supported
2171 by iterate_fix_dominators. */
2172 free_dominance_info (CDI_POST_DOMINATORS);
2173 }
2174 else
2175 {
2176 if (dump_file)
2177 {
2178 fputs ("Bailing out - ", dump_file);
2179 fputs (sconv.m_reason, dump_file);
2180 fputs ("\n--------------------------------\n", dump_file);
2181 }
2182 }
2183 }
2184 }
2185
2186 return cfg_altered ? TODO_cleanup_cfg : 0;;
2187 }
2188
2189 } // anon namespace
2190
2191 gimple_opt_pass *
2192 make_pass_convert_switch (gcc::context *ctxt)
2193 {
2194 return new pass_convert_switch (ctxt);
2195 }
2196
2197 /* The main function of the pass scans statements for switches and invokes
2198 process_switch on them. */
2199
2200 namespace {
2201
2202 template <bool O0> class pass_lower_switch: public gimple_opt_pass
2203 {
2204 public:
2205 pass_lower_switch (gcc::context *ctxt) : gimple_opt_pass (data, ctxt) {}
2206
2207 static const pass_data data;
2208 opt_pass *
2209 clone ()
2210 {
2211 return new pass_lower_switch<O0> (m_ctxt);
2212 }
2213
2214 virtual bool
2215 gate (function *)
2216 {
2217 return !O0 || !optimize;
2218 }
2219
2220 virtual unsigned int execute (function *fun);
2221 }; // class pass_lower_switch
2222
2223 template <bool O0>
2224 const pass_data pass_lower_switch<O0>::data = {
2225 GIMPLE_PASS, /* type */
2226 O0 ? "switchlower_O0" : "switchlower", /* name */
2227 OPTGROUP_NONE, /* optinfo_flags */
2228 TV_TREE_SWITCH_LOWERING, /* tv_id */
2229 ( PROP_cfg | PROP_ssa ), /* properties_required */
2230 0, /* properties_provided */
2231 0, /* properties_destroyed */
2232 0, /* todo_flags_start */
2233 TODO_update_ssa | TODO_cleanup_cfg, /* todo_flags_finish */
2234 };
2235
2236 template <bool O0>
2237 unsigned int
2238 pass_lower_switch<O0>::execute (function *fun)
2239 {
2240 basic_block bb;
2241 bool expanded = false;
2242
2243 auto_vec<gimple *> switch_statements;
2244 switch_statements.create (1);
2245
2246 FOR_EACH_BB_FN (bb, fun)
2247 {
2248 gimple *stmt = last_stmt (bb);
2249 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
2250 switch_statements.safe_push (stmt);
2251 }
2252
2253 for (unsigned i = 0; i < switch_statements.length (); i++)
2254 {
2255 gimple *stmt = switch_statements[i];
2256 if (dump_file)
2257 {
2258 expanded_location loc = expand_location (gimple_location (stmt));
2259
2260 fprintf (dump_file, "beginning to process the following "
2261 "SWITCH statement (%s:%d) : ------- \n",
2262 loc.file, loc.line);
2263 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2264 putc ('\n', dump_file);
2265 }
2266
2267 gswitch *swtch = dyn_cast<gswitch *> (stmt);
2268 if (swtch)
2269 {
2270 switch_decision_tree dt (swtch);
2271 expanded |= dt.analyze_switch_statement ();
2272 }
2273 }
2274
2275 if (expanded)
2276 {
2277 free_dominance_info (CDI_DOMINATORS);
2278 free_dominance_info (CDI_POST_DOMINATORS);
2279 mark_virtual_operands_for_renaming (cfun);
2280 }
2281
2282 return 0;
2283 }
2284
2285 } // anon namespace
2286
2287 gimple_opt_pass *
2288 make_pass_lower_switch_O0 (gcc::context *ctxt)
2289 {
2290 return new pass_lower_switch<true> (ctxt);
2291 }
2292 gimple_opt_pass *
2293 make_pass_lower_switch (gcc::context *ctxt)
2294 {
2295 return new pass_lower_switch<false> (ctxt);
2296 }
2297
2298