inclhack.def (hpux_imaginary_i): Remove spaces.
[gcc.git] / gcc / tree-ssa-math-opts.c
1 /* Global, SSA-based optimizations using mathematical identities.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Currently, the only mini-pass in this file tries to CSE reciprocal
21 operations. These are common in sequences such as this one:
22
23 modulus = sqrt(x*x + y*y + z*z);
24 x = x / modulus;
25 y = y / modulus;
26 z = z / modulus;
27
28 that can be optimized to
29
30 modulus = sqrt(x*x + y*y + z*z);
31 rmodulus = 1.0 / modulus;
32 x = x * rmodulus;
33 y = y * rmodulus;
34 z = z * rmodulus;
35
36 We do this for loop invariant divisors, and with this pass whenever
37 we notice that a division has the same divisor multiple times.
38
39 Of course, like in PRE, we don't insert a division if a dominator
40 already has one. However, this cannot be done as an extension of
41 PRE for several reasons.
42
43 First of all, with some experiments it was found out that the
44 transformation is not always useful if there are only two divisions
45 hy the same divisor. This is probably because modern processors
46 can pipeline the divisions; on older, in-order processors it should
47 still be effective to optimize two divisions by the same number.
48 We make this a param, and it shall be called N in the remainder of
49 this comment.
50
51 Second, if trapping math is active, we have less freedom on where
52 to insert divisions: we can only do so in basic blocks that already
53 contain one. (If divisions don't trap, instead, we can insert
54 divisions elsewhere, which will be in blocks that are common dominators
55 of those that have the division).
56
57 We really don't want to compute the reciprocal unless a division will
58 be found. To do this, we won't insert the division in a basic block
59 that has less than N divisions *post-dominating* it.
60
61 The algorithm constructs a subset of the dominator tree, holding the
62 blocks containing the divisions and the common dominators to them,
63 and walk it twice. The first walk is in post-order, and it annotates
64 each block with the number of divisions that post-dominate it: this
65 gives information on where divisions can be inserted profitably.
66 The second walk is in pre-order, and it inserts divisions as explained
67 above, and replaces divisions by multiplications.
68
69 In the best case, the cost of the pass is O(n_statements). In the
70 worst-case, the cost is due to creating the dominator tree subset,
71 with a cost of O(n_basic_blocks ^ 2); however this can only happen
72 for n_statements / n_basic_blocks statements. So, the amortized cost
73 of creating the dominator tree subset is O(n_basic_blocks) and the
74 worst-case cost of the pass is O(n_statements * n_basic_blocks).
75
76 More practically, the cost will be small because there are few
77 divisions, and they tend to be in the same basic block, so insert_bb
78 is called very few times.
79
80 If we did this using domwalk.c, an efficient implementation would have
81 to work on all the variables in a single pass, because we could not
82 work on just a subset of the dominator tree, as we do now, and the
83 cost would also be something like O(n_statements * n_basic_blocks).
84 The data structures would be more complex in order to work on all the
85 variables in a single pass. */
86
87 #include "config.h"
88 #include "system.h"
89 #include "coretypes.h"
90 #include "tm.h"
91 #include "flags.h"
92 #include "tree.h"
93 #include "tree-flow.h"
94 #include "real.h"
95 #include "timevar.h"
96 #include "tree-pass.h"
97 #include "alloc-pool.h"
98 #include "basic-block.h"
99 #include "target.h"
100 #include "diagnostic.h"
101 #include "rtl.h"
102 #include "expr.h"
103 #include "optabs.h"
104
105 /* This structure represents one basic block that either computes a
106 division, or is a common dominator for basic block that compute a
107 division. */
108 struct occurrence {
109 /* The basic block represented by this structure. */
110 basic_block bb;
111
112 /* If non-NULL, the SSA_NAME holding the definition for a reciprocal
113 inserted in BB. */
114 tree recip_def;
115
116 /* If non-NULL, the GIMPLE_ASSIGN for a reciprocal computation that
117 was inserted in BB. */
118 gimple recip_def_stmt;
119
120 /* Pointer to a list of "struct occurrence"s for blocks dominated
121 by BB. */
122 struct occurrence *children;
123
124 /* Pointer to the next "struct occurrence"s in the list of blocks
125 sharing a common dominator. */
126 struct occurrence *next;
127
128 /* The number of divisions that are in BB before compute_merit. The
129 number of divisions that are in BB or post-dominate it after
130 compute_merit. */
131 int num_divisions;
132
133 /* True if the basic block has a division, false if it is a common
134 dominator for basic blocks that do. If it is false and trapping
135 math is active, BB is not a candidate for inserting a reciprocal. */
136 bool bb_has_division;
137 };
138
139
140 /* The instance of "struct occurrence" representing the highest
141 interesting block in the dominator tree. */
142 static struct occurrence *occ_head;
143
144 /* Allocation pool for getting instances of "struct occurrence". */
145 static alloc_pool occ_pool;
146
147
148
149 /* Allocate and return a new struct occurrence for basic block BB, and
150 whose children list is headed by CHILDREN. */
151 static struct occurrence *
152 occ_new (basic_block bb, struct occurrence *children)
153 {
154 struct occurrence *occ;
155
156 bb->aux = occ = (struct occurrence *) pool_alloc (occ_pool);
157 memset (occ, 0, sizeof (struct occurrence));
158
159 occ->bb = bb;
160 occ->children = children;
161 return occ;
162 }
163
164
165 /* Insert NEW_OCC into our subset of the dominator tree. P_HEAD points to a
166 list of "struct occurrence"s, one per basic block, having IDOM as
167 their common dominator.
168
169 We try to insert NEW_OCC as deep as possible in the tree, and we also
170 insert any other block that is a common dominator for BB and one
171 block already in the tree. */
172
173 static void
174 insert_bb (struct occurrence *new_occ, basic_block idom,
175 struct occurrence **p_head)
176 {
177 struct occurrence *occ, **p_occ;
178
179 for (p_occ = p_head; (occ = *p_occ) != NULL; )
180 {
181 basic_block bb = new_occ->bb, occ_bb = occ->bb;
182 basic_block dom = nearest_common_dominator (CDI_DOMINATORS, occ_bb, bb);
183 if (dom == bb)
184 {
185 /* BB dominates OCC_BB. OCC becomes NEW_OCC's child: remove OCC
186 from its list. */
187 *p_occ = occ->next;
188 occ->next = new_occ->children;
189 new_occ->children = occ;
190
191 /* Try the next block (it may as well be dominated by BB). */
192 }
193
194 else if (dom == occ_bb)
195 {
196 /* OCC_BB dominates BB. Tail recurse to look deeper. */
197 insert_bb (new_occ, dom, &occ->children);
198 return;
199 }
200
201 else if (dom != idom)
202 {
203 gcc_assert (!dom->aux);
204
205 /* There is a dominator between IDOM and BB, add it and make
206 two children out of NEW_OCC and OCC. First, remove OCC from
207 its list. */
208 *p_occ = occ->next;
209 new_occ->next = occ;
210 occ->next = NULL;
211
212 /* None of the previous blocks has DOM as a dominator: if we tail
213 recursed, we would reexamine them uselessly. Just switch BB with
214 DOM, and go on looking for blocks dominated by DOM. */
215 new_occ = occ_new (dom, new_occ);
216 }
217
218 else
219 {
220 /* Nothing special, go on with the next element. */
221 p_occ = &occ->next;
222 }
223 }
224
225 /* No place was found as a child of IDOM. Make BB a sibling of IDOM. */
226 new_occ->next = *p_head;
227 *p_head = new_occ;
228 }
229
230 /* Register that we found a division in BB. */
231
232 static inline void
233 register_division_in (basic_block bb)
234 {
235 struct occurrence *occ;
236
237 occ = (struct occurrence *) bb->aux;
238 if (!occ)
239 {
240 occ = occ_new (bb, NULL);
241 insert_bb (occ, ENTRY_BLOCK_PTR, &occ_head);
242 }
243
244 occ->bb_has_division = true;
245 occ->num_divisions++;
246 }
247
248
249 /* Compute the number of divisions that postdominate each block in OCC and
250 its children. */
251
252 static void
253 compute_merit (struct occurrence *occ)
254 {
255 struct occurrence *occ_child;
256 basic_block dom = occ->bb;
257
258 for (occ_child = occ->children; occ_child; occ_child = occ_child->next)
259 {
260 basic_block bb;
261 if (occ_child->children)
262 compute_merit (occ_child);
263
264 if (flag_exceptions)
265 bb = single_noncomplex_succ (dom);
266 else
267 bb = dom;
268
269 if (dominated_by_p (CDI_POST_DOMINATORS, bb, occ_child->bb))
270 occ->num_divisions += occ_child->num_divisions;
271 }
272 }
273
274
275 /* Return whether USE_STMT is a floating-point division by DEF. */
276 static inline bool
277 is_division_by (gimple use_stmt, tree def)
278 {
279 return is_gimple_assign (use_stmt)
280 && gimple_assign_rhs_code (use_stmt) == RDIV_EXPR
281 && gimple_assign_rhs2 (use_stmt) == def
282 /* Do not recognize x / x as valid division, as we are getting
283 confused later by replacing all immediate uses x in such
284 a stmt. */
285 && gimple_assign_rhs1 (use_stmt) != def;
286 }
287
288 /* Walk the subset of the dominator tree rooted at OCC, setting the
289 RECIP_DEF field to a definition of 1.0 / DEF that can be used in
290 the given basic block. The field may be left NULL, of course,
291 if it is not possible or profitable to do the optimization.
292
293 DEF_BSI is an iterator pointing at the statement defining DEF.
294 If RECIP_DEF is set, a dominator already has a computation that can
295 be used. */
296
297 static void
298 insert_reciprocals (gimple_stmt_iterator *def_gsi, struct occurrence *occ,
299 tree def, tree recip_def, int threshold)
300 {
301 tree type;
302 gimple new_stmt;
303 gimple_stmt_iterator gsi;
304 struct occurrence *occ_child;
305
306 if (!recip_def
307 && (occ->bb_has_division || !flag_trapping_math)
308 && occ->num_divisions >= threshold)
309 {
310 /* Make a variable with the replacement and substitute it. */
311 type = TREE_TYPE (def);
312 recip_def = make_rename_temp (type, "reciptmp");
313 new_stmt = gimple_build_assign_with_ops (RDIV_EXPR, recip_def,
314 build_one_cst (type), def);
315
316 if (occ->bb_has_division)
317 {
318 /* Case 1: insert before an existing division. */
319 gsi = gsi_after_labels (occ->bb);
320 while (!gsi_end_p (gsi) && !is_division_by (gsi_stmt (gsi), def))
321 gsi_next (&gsi);
322
323 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
324 }
325 else if (def_gsi && occ->bb == def_gsi->bb)
326 {
327 /* Case 2: insert right after the definition. Note that this will
328 never happen if the definition statement can throw, because in
329 that case the sole successor of the statement's basic block will
330 dominate all the uses as well. */
331 gsi_insert_after (def_gsi, new_stmt, GSI_NEW_STMT);
332 }
333 else
334 {
335 /* Case 3: insert in a basic block not containing defs/uses. */
336 gsi = gsi_after_labels (occ->bb);
337 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
338 }
339
340 occ->recip_def_stmt = new_stmt;
341 }
342
343 occ->recip_def = recip_def;
344 for (occ_child = occ->children; occ_child; occ_child = occ_child->next)
345 insert_reciprocals (def_gsi, occ_child, def, recip_def, threshold);
346 }
347
348
349 /* Replace the division at USE_P with a multiplication by the reciprocal, if
350 possible. */
351
352 static inline void
353 replace_reciprocal (use_operand_p use_p)
354 {
355 gimple use_stmt = USE_STMT (use_p);
356 basic_block bb = gimple_bb (use_stmt);
357 struct occurrence *occ = (struct occurrence *) bb->aux;
358
359 if (optimize_bb_for_speed_p (bb)
360 && occ->recip_def && use_stmt != occ->recip_def_stmt)
361 {
362 gimple_assign_set_rhs_code (use_stmt, MULT_EXPR);
363 SET_USE (use_p, occ->recip_def);
364 fold_stmt_inplace (use_stmt);
365 update_stmt (use_stmt);
366 }
367 }
368
369
370 /* Free OCC and return one more "struct occurrence" to be freed. */
371
372 static struct occurrence *
373 free_bb (struct occurrence *occ)
374 {
375 struct occurrence *child, *next;
376
377 /* First get the two pointers hanging off OCC. */
378 next = occ->next;
379 child = occ->children;
380 occ->bb->aux = NULL;
381 pool_free (occ_pool, occ);
382
383 /* Now ensure that we don't recurse unless it is necessary. */
384 if (!child)
385 return next;
386 else
387 {
388 while (next)
389 next = free_bb (next);
390
391 return child;
392 }
393 }
394
395
396 /* Look for floating-point divisions among DEF's uses, and try to
397 replace them by multiplications with the reciprocal. Add
398 as many statements computing the reciprocal as needed.
399
400 DEF must be a GIMPLE register of a floating-point type. */
401
402 static void
403 execute_cse_reciprocals_1 (gimple_stmt_iterator *def_gsi, tree def)
404 {
405 use_operand_p use_p;
406 imm_use_iterator use_iter;
407 struct occurrence *occ;
408 int count = 0, threshold;
409
410 gcc_assert (FLOAT_TYPE_P (TREE_TYPE (def)) && is_gimple_reg (def));
411
412 FOR_EACH_IMM_USE_FAST (use_p, use_iter, def)
413 {
414 gimple use_stmt = USE_STMT (use_p);
415 if (is_division_by (use_stmt, def))
416 {
417 register_division_in (gimple_bb (use_stmt));
418 count++;
419 }
420 }
421
422 /* Do the expensive part only if we can hope to optimize something. */
423 threshold = targetm.min_divisions_for_recip_mul (TYPE_MODE (TREE_TYPE (def)));
424 if (count >= threshold)
425 {
426 gimple use_stmt;
427 for (occ = occ_head; occ; occ = occ->next)
428 {
429 compute_merit (occ);
430 insert_reciprocals (def_gsi, occ, def, NULL, threshold);
431 }
432
433 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, def)
434 {
435 if (is_division_by (use_stmt, def))
436 {
437 FOR_EACH_IMM_USE_ON_STMT (use_p, use_iter)
438 replace_reciprocal (use_p);
439 }
440 }
441 }
442
443 for (occ = occ_head; occ; )
444 occ = free_bb (occ);
445
446 occ_head = NULL;
447 }
448
449 static bool
450 gate_cse_reciprocals (void)
451 {
452 return optimize && flag_reciprocal_math;
453 }
454
455 /* Go through all the floating-point SSA_NAMEs, and call
456 execute_cse_reciprocals_1 on each of them. */
457 static unsigned int
458 execute_cse_reciprocals (void)
459 {
460 basic_block bb;
461 tree arg;
462
463 occ_pool = create_alloc_pool ("dominators for recip",
464 sizeof (struct occurrence),
465 n_basic_blocks / 3 + 1);
466
467 calculate_dominance_info (CDI_DOMINATORS);
468 calculate_dominance_info (CDI_POST_DOMINATORS);
469
470 #ifdef ENABLE_CHECKING
471 FOR_EACH_BB (bb)
472 gcc_assert (!bb->aux);
473 #endif
474
475 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = TREE_CHAIN (arg))
476 if (gimple_default_def (cfun, arg)
477 && FLOAT_TYPE_P (TREE_TYPE (arg))
478 && is_gimple_reg (arg))
479 execute_cse_reciprocals_1 (NULL, gimple_default_def (cfun, arg));
480
481 FOR_EACH_BB (bb)
482 {
483 gimple_stmt_iterator gsi;
484 gimple phi;
485 tree def;
486
487 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
488 {
489 phi = gsi_stmt (gsi);
490 def = PHI_RESULT (phi);
491 if (FLOAT_TYPE_P (TREE_TYPE (def))
492 && is_gimple_reg (def))
493 execute_cse_reciprocals_1 (NULL, def);
494 }
495
496 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
497 {
498 gimple stmt = gsi_stmt (gsi);
499
500 if (gimple_has_lhs (stmt)
501 && (def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF)) != NULL
502 && FLOAT_TYPE_P (TREE_TYPE (def))
503 && TREE_CODE (def) == SSA_NAME)
504 execute_cse_reciprocals_1 (&gsi, def);
505 }
506
507 if (optimize_bb_for_size_p (bb))
508 continue;
509
510 /* Scan for a/func(b) and convert it to reciprocal a*rfunc(b). */
511 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
512 {
513 gimple stmt = gsi_stmt (gsi);
514 tree fndecl;
515
516 if (is_gimple_assign (stmt)
517 && gimple_assign_rhs_code (stmt) == RDIV_EXPR)
518 {
519 tree arg1 = gimple_assign_rhs2 (stmt);
520 gimple stmt1;
521
522 if (TREE_CODE (arg1) != SSA_NAME)
523 continue;
524
525 stmt1 = SSA_NAME_DEF_STMT (arg1);
526
527 if (is_gimple_call (stmt1)
528 && gimple_call_lhs (stmt1)
529 && (fndecl = gimple_call_fndecl (stmt1))
530 && (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
531 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD))
532 {
533 enum built_in_function code;
534 bool md_code;
535
536 code = DECL_FUNCTION_CODE (fndecl);
537 md_code = DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD;
538
539 fndecl = targetm.builtin_reciprocal (code, md_code, false);
540 if (!fndecl)
541 continue;
542
543 gimple_call_set_fndecl (stmt1, fndecl);
544 update_stmt (stmt1);
545
546 gimple_assign_set_rhs_code (stmt, MULT_EXPR);
547 fold_stmt_inplace (stmt);
548 update_stmt (stmt);
549 }
550 }
551 }
552 }
553
554 free_dominance_info (CDI_DOMINATORS);
555 free_dominance_info (CDI_POST_DOMINATORS);
556 free_alloc_pool (occ_pool);
557 return 0;
558 }
559
560 struct gimple_opt_pass pass_cse_reciprocals =
561 {
562 {
563 GIMPLE_PASS,
564 "recip", /* name */
565 gate_cse_reciprocals, /* gate */
566 execute_cse_reciprocals, /* execute */
567 NULL, /* sub */
568 NULL, /* next */
569 0, /* static_pass_number */
570 TV_NONE, /* tv_id */
571 PROP_ssa, /* properties_required */
572 0, /* properties_provided */
573 0, /* properties_destroyed */
574 0, /* todo_flags_start */
575 TODO_dump_func | TODO_update_ssa | TODO_verify_ssa
576 | TODO_verify_stmts /* todo_flags_finish */
577 }
578 };
579
580 /* Records an occurrence at statement USE_STMT in the vector of trees
581 STMTS if it is dominated by *TOP_BB or dominates it or this basic block
582 is not yet initialized. Returns true if the occurrence was pushed on
583 the vector. Adjusts *TOP_BB to be the basic block dominating all
584 statements in the vector. */
585
586 static bool
587 maybe_record_sincos (VEC(gimple, heap) **stmts,
588 basic_block *top_bb, gimple use_stmt)
589 {
590 basic_block use_bb = gimple_bb (use_stmt);
591 if (*top_bb
592 && (*top_bb == use_bb
593 || dominated_by_p (CDI_DOMINATORS, use_bb, *top_bb)))
594 VEC_safe_push (gimple, heap, *stmts, use_stmt);
595 else if (!*top_bb
596 || dominated_by_p (CDI_DOMINATORS, *top_bb, use_bb))
597 {
598 VEC_safe_push (gimple, heap, *stmts, use_stmt);
599 *top_bb = use_bb;
600 }
601 else
602 return false;
603
604 return true;
605 }
606
607 /* Look for sin, cos and cexpi calls with the same argument NAME and
608 create a single call to cexpi CSEing the result in this case.
609 We first walk over all immediate uses of the argument collecting
610 statements that we can CSE in a vector and in a second pass replace
611 the statement rhs with a REALPART or IMAGPART expression on the
612 result of the cexpi call we insert before the use statement that
613 dominates all other candidates. */
614
615 static void
616 execute_cse_sincos_1 (tree name)
617 {
618 gimple_stmt_iterator gsi;
619 imm_use_iterator use_iter;
620 tree fndecl, res, type;
621 gimple def_stmt, use_stmt, stmt;
622 int seen_cos = 0, seen_sin = 0, seen_cexpi = 0;
623 VEC(gimple, heap) *stmts = NULL;
624 basic_block top_bb = NULL;
625 int i;
626
627 type = TREE_TYPE (name);
628 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, name)
629 {
630 if (gimple_code (use_stmt) != GIMPLE_CALL
631 || !gimple_call_lhs (use_stmt)
632 || !(fndecl = gimple_call_fndecl (use_stmt))
633 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
634 continue;
635
636 switch (DECL_FUNCTION_CODE (fndecl))
637 {
638 CASE_FLT_FN (BUILT_IN_COS):
639 seen_cos |= maybe_record_sincos (&stmts, &top_bb, use_stmt) ? 1 : 0;
640 break;
641
642 CASE_FLT_FN (BUILT_IN_SIN):
643 seen_sin |= maybe_record_sincos (&stmts, &top_bb, use_stmt) ? 1 : 0;
644 break;
645
646 CASE_FLT_FN (BUILT_IN_CEXPI):
647 seen_cexpi |= maybe_record_sincos (&stmts, &top_bb, use_stmt) ? 1 : 0;
648 break;
649
650 default:;
651 }
652 }
653
654 if (seen_cos + seen_sin + seen_cexpi <= 1)
655 {
656 VEC_free(gimple, heap, stmts);
657 return;
658 }
659
660 /* Simply insert cexpi at the beginning of top_bb but not earlier than
661 the name def statement. */
662 fndecl = mathfn_built_in (type, BUILT_IN_CEXPI);
663 if (!fndecl)
664 return;
665 res = make_rename_temp (TREE_TYPE (TREE_TYPE (fndecl)), "sincostmp");
666 stmt = gimple_build_call (fndecl, 1, name);
667 gimple_call_set_lhs (stmt, res);
668
669 def_stmt = SSA_NAME_DEF_STMT (name);
670 if (!SSA_NAME_IS_DEFAULT_DEF (name)
671 && gimple_code (def_stmt) != GIMPLE_PHI
672 && gimple_bb (def_stmt) == top_bb)
673 {
674 gsi = gsi_for_stmt (def_stmt);
675 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
676 }
677 else
678 {
679 gsi = gsi_after_labels (top_bb);
680 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
681 }
682 update_stmt (stmt);
683
684 /* And adjust the recorded old call sites. */
685 for (i = 0; VEC_iterate(gimple, stmts, i, use_stmt); ++i)
686 {
687 tree rhs = NULL;
688 fndecl = gimple_call_fndecl (use_stmt);
689
690 switch (DECL_FUNCTION_CODE (fndecl))
691 {
692 CASE_FLT_FN (BUILT_IN_COS):
693 rhs = fold_build1 (REALPART_EXPR, type, res);
694 break;
695
696 CASE_FLT_FN (BUILT_IN_SIN):
697 rhs = fold_build1 (IMAGPART_EXPR, type, res);
698 break;
699
700 CASE_FLT_FN (BUILT_IN_CEXPI):
701 rhs = res;
702 break;
703
704 default:;
705 gcc_unreachable ();
706 }
707
708 /* Replace call with a copy. */
709 stmt = gimple_build_assign (gimple_call_lhs (use_stmt), rhs);
710
711 gsi = gsi_for_stmt (use_stmt);
712 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
713 gsi_remove (&gsi, true);
714 }
715
716 VEC_free(gimple, heap, stmts);
717 }
718
719 /* Go through all calls to sin, cos and cexpi and call execute_cse_sincos_1
720 on the SSA_NAME argument of each of them. */
721
722 static unsigned int
723 execute_cse_sincos (void)
724 {
725 basic_block bb;
726
727 calculate_dominance_info (CDI_DOMINATORS);
728
729 FOR_EACH_BB (bb)
730 {
731 gimple_stmt_iterator gsi;
732
733 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
734 {
735 gimple stmt = gsi_stmt (gsi);
736 tree fndecl;
737
738 if (is_gimple_call (stmt)
739 && gimple_call_lhs (stmt)
740 && (fndecl = gimple_call_fndecl (stmt))
741 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
742 {
743 tree arg;
744
745 switch (DECL_FUNCTION_CODE (fndecl))
746 {
747 CASE_FLT_FN (BUILT_IN_COS):
748 CASE_FLT_FN (BUILT_IN_SIN):
749 CASE_FLT_FN (BUILT_IN_CEXPI):
750 arg = gimple_call_arg (stmt, 0);
751 if (TREE_CODE (arg) == SSA_NAME)
752 execute_cse_sincos_1 (arg);
753 break;
754
755 default:;
756 }
757 }
758 }
759 }
760
761 free_dominance_info (CDI_DOMINATORS);
762 return 0;
763 }
764
765 static bool
766 gate_cse_sincos (void)
767 {
768 /* Make sure we have either sincos or cexp. */
769 return (TARGET_HAS_SINCOS
770 || TARGET_C99_FUNCTIONS)
771 && optimize;
772 }
773
774 struct gimple_opt_pass pass_cse_sincos =
775 {
776 {
777 GIMPLE_PASS,
778 "sincos", /* name */
779 gate_cse_sincos, /* gate */
780 execute_cse_sincos, /* execute */
781 NULL, /* sub */
782 NULL, /* next */
783 0, /* static_pass_number */
784 TV_NONE, /* tv_id */
785 PROP_ssa, /* properties_required */
786 0, /* properties_provided */
787 0, /* properties_destroyed */
788 0, /* todo_flags_start */
789 TODO_dump_func | TODO_update_ssa | TODO_verify_ssa
790 | TODO_verify_stmts /* todo_flags_finish */
791 }
792 };
793
794 /* Find all expressions in the form of sqrt(a/b) and
795 convert them to rsqrt(b/a). */
796
797 static unsigned int
798 execute_convert_to_rsqrt (void)
799 {
800 basic_block bb;
801
802 FOR_EACH_BB (bb)
803 {
804 gimple_stmt_iterator gsi;
805
806 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
807 {
808 gimple stmt = gsi_stmt (gsi);
809 tree fndecl;
810
811 if (is_gimple_call (stmt)
812 && gimple_call_lhs (stmt)
813 && (fndecl = gimple_call_fndecl (stmt))
814 && (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
815 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD))
816 {
817 enum built_in_function code;
818 bool md_code;
819 tree arg1;
820 gimple stmt1;
821
822 code = DECL_FUNCTION_CODE (fndecl);
823 md_code = DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD;
824
825 fndecl = targetm.builtin_reciprocal (code, md_code, true);
826 if (!fndecl)
827 continue;
828
829 arg1 = gimple_call_arg (stmt, 0);
830
831 if (TREE_CODE (arg1) != SSA_NAME)
832 continue;
833
834 stmt1 = SSA_NAME_DEF_STMT (arg1);
835
836 if (is_gimple_assign (stmt1)
837 && gimple_assign_rhs_code (stmt1) == RDIV_EXPR)
838 {
839 tree arg10, arg11;
840
841 arg10 = gimple_assign_rhs1 (stmt1);
842 arg11 = gimple_assign_rhs2 (stmt1);
843
844 /* Swap operands of RDIV_EXPR. */
845 gimple_assign_set_rhs1 (stmt1, arg11);
846 gimple_assign_set_rhs2 (stmt1, arg10);
847 fold_stmt_inplace (stmt1);
848 update_stmt (stmt1);
849
850 gimple_call_set_fndecl (stmt, fndecl);
851 update_stmt (stmt);
852 }
853 }
854 }
855 }
856
857 return 0;
858 }
859
860 static bool
861 gate_convert_to_rsqrt (void)
862 {
863 return flag_unsafe_math_optimizations && optimize;
864 }
865
866 struct gimple_opt_pass pass_convert_to_rsqrt =
867 {
868 {
869 GIMPLE_PASS,
870 "rsqrt", /* name */
871 gate_convert_to_rsqrt, /* gate */
872 execute_convert_to_rsqrt, /* execute */
873 NULL, /* sub */
874 NULL, /* next */
875 0, /* static_pass_number */
876 TV_NONE, /* tv_id */
877 PROP_ssa, /* properties_required */
878 0, /* properties_provided */
879 0, /* properties_destroyed */
880 0, /* todo_flags_start */
881 TODO_dump_func | TODO_update_ssa | TODO_verify_ssa
882 | TODO_verify_stmts /* todo_flags_finish */
883 }
884 };
885
886 /* A symbolic number is used to detect byte permutation and selection
887 patterns. Therefore the field N contains an artificial number
888 consisting of byte size markers:
889
890 0 - byte has the value 0
891 1..size - byte contains the content of the byte
892 number indexed with that value minus one */
893
894 struct symbolic_number {
895 unsigned HOST_WIDEST_INT n;
896 int size;
897 };
898
899 /* Perform a SHIFT or ROTATE operation by COUNT bits on symbolic
900 number N. Return false if the requested operation is not permitted
901 on a symbolic number. */
902
903 static inline bool
904 do_shift_rotate (enum tree_code code,
905 struct symbolic_number *n,
906 int count)
907 {
908 if (count % 8 != 0)
909 return false;
910
911 /* Zero out the extra bits of N in order to avoid them being shifted
912 into the significant bits. */
913 if (n->size < (int)sizeof (HOST_WIDEST_INT))
914 n->n &= ((unsigned HOST_WIDEST_INT)1 << (n->size * BITS_PER_UNIT)) - 1;
915
916 switch (code)
917 {
918 case LSHIFT_EXPR:
919 n->n <<= count;
920 break;
921 case RSHIFT_EXPR:
922 n->n >>= count;
923 break;
924 case LROTATE_EXPR:
925 n->n = (n->n << count) | (n->n >> ((n->size * BITS_PER_UNIT) - count));
926 break;
927 case RROTATE_EXPR:
928 n->n = (n->n >> count) | (n->n << ((n->size * BITS_PER_UNIT) - count));
929 break;
930 default:
931 return false;
932 }
933 return true;
934 }
935
936 /* Perform sanity checking for the symbolic number N and the gimple
937 statement STMT. */
938
939 static inline bool
940 verify_symbolic_number_p (struct symbolic_number *n, gimple stmt)
941 {
942 tree lhs_type;
943
944 lhs_type = gimple_expr_type (stmt);
945
946 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
947 return false;
948
949 if (TYPE_PRECISION (lhs_type) != n->size * BITS_PER_UNIT)
950 return false;
951
952 return true;
953 }
954
955 /* find_bswap_1 invokes itself recursively with N and tries to perform
956 the operation given by the rhs of STMT on the result. If the
957 operation could successfully be executed the function returns the
958 tree expression of the source operand and NULL otherwise. */
959
960 static tree
961 find_bswap_1 (gimple stmt, struct symbolic_number *n, int limit)
962 {
963 enum tree_code code;
964 tree rhs1, rhs2 = NULL;
965 gimple rhs1_stmt, rhs2_stmt;
966 tree source_expr1;
967 enum gimple_rhs_class rhs_class;
968
969 if (!limit || !is_gimple_assign (stmt))
970 return NULL_TREE;
971
972 rhs1 = gimple_assign_rhs1 (stmt);
973
974 if (TREE_CODE (rhs1) != SSA_NAME)
975 return NULL_TREE;
976
977 code = gimple_assign_rhs_code (stmt);
978 rhs_class = gimple_assign_rhs_class (stmt);
979 rhs1_stmt = SSA_NAME_DEF_STMT (rhs1);
980
981 if (rhs_class == GIMPLE_BINARY_RHS)
982 rhs2 = gimple_assign_rhs2 (stmt);
983
984 /* Handle unary rhs and binary rhs with integer constants as second
985 operand. */
986
987 if (rhs_class == GIMPLE_UNARY_RHS
988 || (rhs_class == GIMPLE_BINARY_RHS
989 && TREE_CODE (rhs2) == INTEGER_CST))
990 {
991 if (code != BIT_AND_EXPR
992 && code != LSHIFT_EXPR
993 && code != RSHIFT_EXPR
994 && code != LROTATE_EXPR
995 && code != RROTATE_EXPR
996 && code != NOP_EXPR
997 && code != CONVERT_EXPR)
998 return NULL_TREE;
999
1000 source_expr1 = find_bswap_1 (rhs1_stmt, n, limit - 1);
1001
1002 /* If find_bswap_1 returned NULL STMT is a leaf node and we have
1003 to initialize the symbolic number. */
1004 if (!source_expr1)
1005 {
1006 /* Set up the symbolic number N by setting each byte to a
1007 value between 1 and the byte size of rhs1. The highest
1008 order byte is set to 1 and the lowest order byte to
1009 n.size. */
1010 n->size = TYPE_PRECISION (TREE_TYPE (rhs1));
1011 if (n->size % BITS_PER_UNIT != 0)
1012 return NULL_TREE;
1013 n->size /= BITS_PER_UNIT;
1014 n->n = (sizeof (HOST_WIDEST_INT) < 8 ? 0 :
1015 (unsigned HOST_WIDEST_INT)0x01020304 << 32 | 0x05060708);
1016 n->n >>= (sizeof (HOST_WIDEST_INT) - n->size) * BITS_PER_UNIT;
1017
1018 source_expr1 = rhs1;
1019 }
1020
1021 switch (code)
1022 {
1023 case BIT_AND_EXPR:
1024 {
1025 int i;
1026 unsigned HOST_WIDEST_INT val = widest_int_cst_value (rhs2);
1027 unsigned HOST_WIDEST_INT tmp = val;
1028
1029 /* Only constants masking full bytes are allowed. */
1030 for (i = 0; i < n->size; i++, tmp >>= BITS_PER_UNIT)
1031 if ((tmp & 0xff) != 0 && (tmp & 0xff) != 0xff)
1032 return NULL_TREE;
1033
1034 n->n &= val;
1035 }
1036 break;
1037 case LSHIFT_EXPR:
1038 case RSHIFT_EXPR:
1039 case LROTATE_EXPR:
1040 case RROTATE_EXPR:
1041 if (!do_shift_rotate (code, n, (int)TREE_INT_CST_LOW (rhs2)))
1042 return NULL_TREE;
1043 break;
1044 CASE_CONVERT:
1045 {
1046 int type_size;
1047
1048 type_size = TYPE_PRECISION (gimple_expr_type (stmt));
1049 if (type_size % BITS_PER_UNIT != 0)
1050 return NULL_TREE;
1051
1052 if (type_size / BITS_PER_UNIT < (int)(sizeof (HOST_WIDEST_INT)))
1053 {
1054 /* If STMT casts to a smaller type mask out the bits not
1055 belonging to the target type. */
1056 n->size = type_size / BITS_PER_UNIT;
1057 n->n &= ((unsigned HOST_WIDEST_INT)1 << type_size) - 1;
1058 }
1059 }
1060 break;
1061 default:
1062 return NULL_TREE;
1063 };
1064 return verify_symbolic_number_p (n, stmt) ? source_expr1 : NULL;
1065 }
1066
1067 /* Handle binary rhs. */
1068
1069 if (rhs_class == GIMPLE_BINARY_RHS)
1070 {
1071 struct symbolic_number n1, n2;
1072 tree source_expr2;
1073
1074 if (code != BIT_IOR_EXPR)
1075 return NULL_TREE;
1076
1077 if (TREE_CODE (rhs2) != SSA_NAME)
1078 return NULL_TREE;
1079
1080 rhs2_stmt = SSA_NAME_DEF_STMT (rhs2);
1081
1082 switch (code)
1083 {
1084 case BIT_IOR_EXPR:
1085 source_expr1 = find_bswap_1 (rhs1_stmt, &n1, limit - 1);
1086
1087 if (!source_expr1)
1088 return NULL_TREE;
1089
1090 source_expr2 = find_bswap_1 (rhs2_stmt, &n2, limit - 1);
1091
1092 if (source_expr1 != source_expr2
1093 || n1.size != n2.size)
1094 return NULL_TREE;
1095
1096 n->size = n1.size;
1097 n->n = n1.n | n2.n;
1098
1099 if (!verify_symbolic_number_p (n, stmt))
1100 return NULL_TREE;
1101
1102 break;
1103 default:
1104 return NULL_TREE;
1105 }
1106 return source_expr1;
1107 }
1108 return NULL_TREE;
1109 }
1110
1111 /* Check if STMT completes a bswap implementation consisting of ORs,
1112 SHIFTs and ANDs. Return the source tree expression on which the
1113 byte swap is performed and NULL if no bswap was found. */
1114
1115 static tree
1116 find_bswap (gimple stmt)
1117 {
1118 /* The number which the find_bswap result should match in order to
1119 have a full byte swap. The insignificant bytes are masked out
1120 before using it. */
1121 unsigned HOST_WIDEST_INT cmp =
1122 sizeof (HOST_WIDEST_INT) < 8 ? 0 :
1123 (unsigned HOST_WIDEST_INT)0x08070605 << 32 | 0x04030201;
1124
1125 struct symbolic_number n;
1126 tree source_expr;
1127
1128 /* The last parameter determines the depth search limit. It usually
1129 correlates directly to the number of bytes to be touched. We
1130 increase that number by one here in order to also cover signed ->
1131 unsigned conversions of the src operand as can be seen in
1132 libgcc. */
1133 source_expr = find_bswap_1 (stmt, &n,
1134 TREE_INT_CST_LOW (
1135 TYPE_SIZE_UNIT (gimple_expr_type (stmt))) + 1);
1136
1137 if (!source_expr)
1138 return NULL_TREE;
1139
1140 /* Zero out the extra bits of N and CMP. */
1141 if (n.size < (int)sizeof (HOST_WIDEST_INT))
1142 {
1143 unsigned HOST_WIDEST_INT mask =
1144 ((unsigned HOST_WIDEST_INT)1 << (n.size * BITS_PER_UNIT)) - 1;
1145
1146 n.n &= mask;
1147 cmp &= mask;
1148 }
1149
1150 /* A complete byte swap should make the symbolic number to start
1151 with the largest digit in the highest order byte. */
1152 if (cmp != n.n)
1153 return NULL_TREE;
1154
1155 return source_expr;
1156 }
1157
1158 /* Find manual byte swap implementations and turn them into a bswap
1159 builtin invokation. */
1160
1161 static unsigned int
1162 execute_optimize_bswap (void)
1163 {
1164 basic_block bb;
1165 bool bswap32_p, bswap64_p;
1166 bool changed = false;
1167 tree bswap32_type = NULL_TREE, bswap64_type = NULL_TREE;
1168
1169 if (BITS_PER_UNIT != 8)
1170 return 0;
1171
1172 if (sizeof (HOST_WIDEST_INT) < 8)
1173 return 0;
1174
1175 bswap32_p = (built_in_decls[BUILT_IN_BSWAP32]
1176 && optab_handler (bswap_optab, SImode)->insn_code !=
1177 CODE_FOR_nothing);
1178 bswap64_p = (built_in_decls[BUILT_IN_BSWAP64]
1179 && optab_handler (bswap_optab, DImode)->insn_code !=
1180 CODE_FOR_nothing);
1181
1182 if (!bswap32_p && !bswap64_p)
1183 return 0;
1184
1185 /* Determine the argument type of the builtins. The code later on
1186 assumes that the return and argument type are the same. */
1187 if (bswap32_p)
1188 {
1189 tree fndecl = built_in_decls[BUILT_IN_BSWAP32];
1190 bswap32_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
1191 }
1192
1193 if (bswap64_p)
1194 {
1195 tree fndecl = built_in_decls[BUILT_IN_BSWAP64];
1196 bswap64_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
1197 }
1198
1199 FOR_EACH_BB (bb)
1200 {
1201 gimple_stmt_iterator gsi;
1202
1203 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1204 {
1205 gimple stmt = gsi_stmt (gsi);
1206 tree bswap_src, bswap_type;
1207 tree bswap_tmp;
1208 tree fndecl = NULL_TREE;
1209 int type_size;
1210 gimple call;
1211
1212 if (!is_gimple_assign (stmt)
1213 || gimple_assign_rhs_code (stmt) != BIT_IOR_EXPR)
1214 continue;
1215
1216 type_size = TYPE_PRECISION (gimple_expr_type (stmt));
1217
1218 switch (type_size)
1219 {
1220 case 32:
1221 if (bswap32_p)
1222 {
1223 fndecl = built_in_decls[BUILT_IN_BSWAP32];
1224 bswap_type = bswap32_type;
1225 }
1226 break;
1227 case 64:
1228 if (bswap64_p)
1229 {
1230 fndecl = built_in_decls[BUILT_IN_BSWAP64];
1231 bswap_type = bswap64_type;
1232 }
1233 break;
1234 default:
1235 continue;
1236 }
1237
1238 if (!fndecl)
1239 continue;
1240
1241 bswap_src = find_bswap (stmt);
1242
1243 if (!bswap_src)
1244 continue;
1245
1246 changed = true;
1247
1248 bswap_tmp = bswap_src;
1249
1250 /* Convert the src expression if necessary. */
1251 if (!useless_type_conversion_p (TREE_TYPE (bswap_tmp), bswap_type))
1252 {
1253 gimple convert_stmt;
1254
1255 bswap_tmp = create_tmp_var (bswap_type, "bswapsrc");
1256 add_referenced_var (bswap_tmp);
1257 bswap_tmp = make_ssa_name (bswap_tmp, NULL);
1258
1259 convert_stmt = gimple_build_assign_with_ops (
1260 CONVERT_EXPR, bswap_tmp, bswap_src, NULL);
1261 gsi_insert_before (&gsi, convert_stmt, GSI_SAME_STMT);
1262 }
1263
1264 call = gimple_build_call (fndecl, 1, bswap_tmp);
1265
1266 bswap_tmp = gimple_assign_lhs (stmt);
1267
1268 /* Convert the result if necessary. */
1269 if (!useless_type_conversion_p (TREE_TYPE (bswap_tmp), bswap_type))
1270 {
1271 gimple convert_stmt;
1272
1273 bswap_tmp = create_tmp_var (bswap_type, "bswapdst");
1274 add_referenced_var (bswap_tmp);
1275 bswap_tmp = make_ssa_name (bswap_tmp, NULL);
1276 convert_stmt = gimple_build_assign_with_ops (
1277 CONVERT_EXPR, gimple_assign_lhs (stmt), bswap_tmp, NULL);
1278 gsi_insert_after (&gsi, convert_stmt, GSI_SAME_STMT);
1279 }
1280
1281 gimple_call_set_lhs (call, bswap_tmp);
1282
1283 if (dump_file)
1284 {
1285 fprintf (dump_file, "%d bit bswap implementation found at: ",
1286 (int)type_size);
1287 print_gimple_stmt (dump_file, stmt, 0, 0);
1288 }
1289
1290 gsi_insert_after (&gsi, call, GSI_SAME_STMT);
1291 gsi_remove (&gsi, true);
1292 }
1293 }
1294
1295 return (changed ? TODO_dump_func | TODO_update_ssa | TODO_verify_ssa
1296 | TODO_verify_stmts : 0);
1297 }
1298
1299 static bool
1300 gate_optimize_bswap (void)
1301 {
1302 return flag_expensive_optimizations && optimize;
1303 }
1304
1305 struct gimple_opt_pass pass_optimize_bswap =
1306 {
1307 {
1308 GIMPLE_PASS,
1309 "bswap", /* name */
1310 gate_optimize_bswap, /* gate */
1311 execute_optimize_bswap, /* execute */
1312 NULL, /* sub */
1313 NULL, /* next */
1314 0, /* static_pass_number */
1315 TV_NONE, /* tv_id */
1316 PROP_ssa, /* properties_required */
1317 0, /* properties_provided */
1318 0, /* properties_destroyed */
1319 0, /* todo_flags_start */
1320 0 /* todo_flags_finish */
1321 }
1322 };