1 /* intrinsics.cc -- D language compiler intrinsics.
2 Copyright (C) 2006-2020 Free Software Foundation, Inc.
4 GCC is free software; you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation; either version 3, or (at your option)
9 GCC is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU General Public License
15 along with GCC; see the file COPYING3. If not see
16 <http://www.gnu.org/licenses/>. */
20 #include "coretypes.h"
22 #include "dmd/declaration.h"
23 #include "dmd/identifier.h"
24 #include "dmd/mangle.h"
25 #include "dmd/mangle.h"
26 #include "dmd/module.h"
27 #include "dmd/template.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
39 /* An internal struct used to hold information on D intrinsics. */
43 /* The DECL_FUNCTION_CODE of this decl. */
46 /* The name of the intrinsic. */
49 /* The module where the intrinsic is located. */
52 /* The mangled signature decoration of the intrinsic. */
55 /* True if the intrinsic is only handled in CTFE. */
59 static const intrinsic_decl intrinsic_decls
[] =
61 #define DEF_D_INTRINSIC(CODE, ALIAS, NAME, MODULE, DECO, CTFE) \
62 { INTRINSIC_ ## ALIAS, NAME, MODULE, DECO, CTFE },
64 #include "intrinsics.def"
66 #undef DEF_D_INTRINSIC
69 /* Checks if DECL is an intrinsic or run time library function that requires
70 special processing. Sets DECL_INTRINSIC_CODE so it can be identified
71 later in maybe_expand_intrinsic. */
74 maybe_set_intrinsic (FuncDeclaration
*decl
)
76 if (!decl
->ident
|| decl
->builtin
!= BUILTINunknown
)
79 /* The builtin flag is updated only if we can evaluate the intrinsic
80 at compile-time. Such as the math or bitop intrinsics. */
81 decl
->builtin
= BUILTINno
;
83 /* Check if it's a compiler intrinsic. We only require that any
84 internally recognised intrinsics are declared in a module with
85 an explicit module declaration. */
86 Module
*m
= decl
->getModule ();
91 TemplateInstance
*ti
= decl
->isInstantiated ();
92 TemplateDeclaration
*td
= ti
? ti
->tempdecl
->isTemplateDeclaration () : NULL
;
94 const char *tname
= decl
->ident
->toChars ();
95 const char *tmodule
= m
->md
->toChars ();
96 const char *tdeco
= (td
== NULL
) ? decl
->type
->deco
: NULL
;
98 /* Look through all D intrinsics. */
99 for (size_t i
= 0; i
< (int) INTRINSIC_LAST
; i
++)
101 if (!intrinsic_decls
[i
].name
)
104 if (strcmp (intrinsic_decls
[i
].name
, tname
) != 0
105 || strcmp (intrinsic_decls
[i
].module
, tmodule
) != 0)
108 /* Instantiated functions would have the wrong type deco, get it from the
109 template member instead. */
112 if (!td
|| !td
->onemember
)
115 FuncDeclaration
*fd
= td
->onemember
->isFuncDeclaration ();
120 mangleToBuffer (fd
->type
, &buf
);
121 tdeco
= buf
.extractChars ();
124 /* Matching the type deco may be a bit too strict, as it means that all
125 function attributes that end up in the signature must be kept aligned
126 between the compiler and library declaration. */
127 if (strcmp (intrinsic_decls
[i
].deco
, tdeco
) == 0)
129 intrinsic_code code
= intrinsic_decls
[i
].code
;
131 if (decl
->csym
== NULL
)
132 get_symbol_decl (decl
);
134 /* If there is no function body, then the implementation is always
135 provided by the compiler. */
137 set_decl_built_in_function (decl
->csym
, BUILT_IN_FRONTEND
, code
);
139 /* Infer whether the intrinsic can be used for CTFE, let the
140 front-end know that it can be evaluated at compile-time. */
143 case INTRINSIC_VA_ARG
:
144 case INTRINSIC_C_VA_ARG
:
145 case INTRINSIC_VASTART
:
150 case INTRINSIC_VLOAD
:
151 case INTRINSIC_VSTORE
:
156 /* Check that this overload of pow() is has an equivalent
157 built-in function. It could be `int pow(int, int)'. */
158 tree rettype
= TREE_TYPE (TREE_TYPE (decl
->csym
));
159 if (mathfn_built_in (rettype
, BUILT_IN_POW
) != NULL_TREE
)
160 decl
->builtin
= BUILTINyes
;
165 decl
->builtin
= BUILTINyes
;
169 /* The intrinsic was marked as CTFE-only. */
170 if (intrinsic_decls
[i
].ctfeonly
)
171 DECL_BUILT_IN_CTFE (decl
->csym
) = 1;
173 DECL_INTRINSIC_CODE (decl
->csym
) = code
;
179 /* Construct a function call to the built-in function CODE, N is the number of
180 arguments, and the `...' parameters are the argument expressions.
181 The original call expression is held in CALLEXP. */
184 call_builtin_fn (tree callexp
, built_in_function code
, int n
, ...)
186 tree
*argarray
= XALLOCAVEC (tree
, n
);
190 for (int i
= 0; i
< n
; i
++)
191 argarray
[i
] = va_arg (ap
, tree
);
194 tree exp
= build_call_expr_loc_array (EXPR_LOCATION (callexp
),
195 builtin_decl_explicit (code
),
197 return convert (TREE_TYPE (callexp
), fold (exp
));
200 /* Expand a front-end instrinsic call to bsf(). This takes one argument,
201 the signature to which can be either:
206 This scans all bits in the given argument starting with the first,
207 returning the bit number of the first bit set. The original call
208 expression is held in CALLEXP. */
211 expand_intrinsic_bsf (tree callexp
)
213 /* The bsr() intrinsic gets turned into __builtin_ctz(arg).
214 The return value is supposed to be undefined if arg is zero. */
215 tree arg
= CALL_EXPR_ARG (callexp
, 0);
216 int argsize
= TYPE_PRECISION (TREE_TYPE (arg
));
218 /* Which variant of __builtin_ctz* should we call? */
219 built_in_function code
= (argsize
<= INT_TYPE_SIZE
) ? BUILT_IN_CTZ
220 : (argsize
<= LONG_TYPE_SIZE
) ? BUILT_IN_CTZL
221 : (argsize
<= LONG_LONG_TYPE_SIZE
) ? BUILT_IN_CTZLL
224 gcc_assert (code
!= END_BUILTINS
);
226 return call_builtin_fn (callexp
, code
, 1, arg
);
229 /* Expand a front-end instrinsic call to bsr(). This takes one argument,
230 the signature to which can be either:
235 This scans all bits in the given argument from the most significant bit
236 to the least significant, returning the bit number of the first bit set.
237 The original call expression is held in CALLEXP. */
240 expand_intrinsic_bsr (tree callexp
)
242 /* The bsr() intrinsic gets turned into (size - 1) - __builtin_clz(arg).
243 The return value is supposed to be undefined if arg is zero. */
244 tree arg
= CALL_EXPR_ARG (callexp
, 0);
245 tree type
= TREE_TYPE (arg
);
246 int argsize
= TYPE_PRECISION (type
);
248 /* Which variant of __builtin_clz* should we call? */
249 built_in_function code
= (argsize
<= INT_TYPE_SIZE
) ? BUILT_IN_CLZ
250 : (argsize
<= LONG_TYPE_SIZE
) ? BUILT_IN_CLZL
251 : (argsize
<= LONG_LONG_TYPE_SIZE
) ? BUILT_IN_CLZLL
254 gcc_assert (code
!= END_BUILTINS
);
256 tree result
= call_builtin_fn (callexp
, code
, 1, arg
);
258 /* Handle int -> long conversions. */
259 if (TREE_TYPE (result
) != type
)
260 result
= fold_convert (type
, result
);
262 result
= fold_build2 (MINUS_EXPR
, type
,
263 build_integer_cst (argsize
- 1, type
), result
);
264 return fold_convert (TREE_TYPE (callexp
), result
);
267 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
268 bt(), btc(), btr(), or bts(). These intrinsics expect to take two arguments,
269 the signature to which is:
271 int bt (size_t* ptr, size_t bitnum);
273 All intrinsics test if a bit is set and return the result of that condition.
274 Variants of `bt' will then update that bit. `btc' compliments the bit, `bts'
275 sets the bit, and `btr' resets the bit. The original call expression is
279 expand_intrinsic_bt (intrinsic_code intrinsic
, tree callexp
)
281 tree ptr
= CALL_EXPR_ARG (callexp
, 0);
282 tree bitnum
= CALL_EXPR_ARG (callexp
, 1);
283 tree type
= TREE_TYPE (TREE_TYPE (ptr
));
285 /* size_t bitsize = sizeof(*ptr) * BITS_PER_UNIT; */
286 tree bitsize
= fold_convert (type
, TYPE_SIZE (type
));
288 /* ptr[bitnum / bitsize] */
289 ptr
= build_array_index (ptr
, fold_build2 (TRUNC_DIV_EXPR
, type
,
291 ptr
= indirect_ref (type
, ptr
);
293 /* mask = 1 << (bitnum % bitsize); */
294 bitnum
= fold_build2 (TRUNC_MOD_EXPR
, type
, bitnum
, bitsize
);
295 bitnum
= fold_build2 (LSHIFT_EXPR
, type
, size_one_node
, bitnum
);
297 /* cond = ptr[bitnum / size] & mask; */
298 tree cond
= fold_build2 (BIT_AND_EXPR
, type
, ptr
, bitnum
);
301 cond
= build_condition (TREE_TYPE (callexp
), d_truthvalue_conversion (cond
),
302 integer_minus_one_node
, integer_zero_node
);
304 /* Update the bit as needed, only testing the bit for bt(). */
305 if (intrinsic
== INTRINSIC_BT
)
308 tree_code code
= (intrinsic
== INTRINSIC_BTC
) ? BIT_XOR_EXPR
309 : (intrinsic
== INTRINSIC_BTR
) ? BIT_AND_EXPR
310 : (intrinsic
== INTRINSIC_BTS
) ? BIT_IOR_EXPR
312 gcc_assert (code
!= ERROR_MARK
);
314 /* ptr[bitnum / size] op= mask; */
315 if (intrinsic
== INTRINSIC_BTR
)
316 bitnum
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (bitnum
), bitnum
);
318 ptr
= modify_expr (ptr
, fold_build2 (code
, TREE_TYPE (ptr
), ptr
, bitnum
));
320 /* Store the condition result in a temporary, and return expressions in
321 correct order of evaluation. */
322 tree tmp
= build_local_temp (TREE_TYPE (callexp
));
323 cond
= modify_expr (tmp
, cond
);
325 return compound_expr (cond
, compound_expr (ptr
, tmp
));
328 /* Expand a front-end intrinsic call to bswap(). This takes one argument, the
329 signature to which can be either:
331 int bswap (uint arg);
332 int bswap (ulong arg);
334 This swaps all bytes in an N byte type end-to-end. The original call
335 expression is held in CALLEXP. */
338 expand_intrinsic_bswap (tree callexp
)
340 tree arg
= CALL_EXPR_ARG (callexp
, 0);
341 int argsize
= TYPE_PRECISION (TREE_TYPE (arg
));
343 /* Which variant of __builtin_bswap* should we call? */
344 built_in_function code
= (argsize
== 32) ? BUILT_IN_BSWAP32
345 : (argsize
== 64) ? BUILT_IN_BSWAP64
348 gcc_assert (code
!= END_BUILTINS
);
350 return call_builtin_fn (callexp
, code
, 1, arg
);
353 /* Expand a front-end intrinsic call to popcnt(). This takes one argument, the
354 signature to which can be either:
356 int popcnt (uint arg);
357 int popcnt (ulong arg);
359 Calculates the number of set bits in an integer. The original call
360 expression is held in CALLEXP. */
363 expand_intrinsic_popcnt (tree callexp
)
365 tree arg
= CALL_EXPR_ARG (callexp
, 0);
366 int argsize
= TYPE_PRECISION (TREE_TYPE (arg
));
368 /* Which variant of __builtin_popcount* should we call? */
369 built_in_function code
= (argsize
<= INT_TYPE_SIZE
) ? BUILT_IN_POPCOUNT
370 : (argsize
<= LONG_TYPE_SIZE
) ? BUILT_IN_POPCOUNTL
371 : (argsize
<= LONG_LONG_TYPE_SIZE
) ? BUILT_IN_POPCOUNTLL
374 gcc_assert (code
!= END_BUILTINS
);
376 return call_builtin_fn (callexp
, code
, 1, arg
);
379 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
380 sqrt(), sqrtf(), sqrtl(). These intrinsics expect to take one argument,
381 the signature to which can be either:
383 float sqrt (float arg);
384 double sqrt (double arg);
385 real sqrt (real arg);
387 This computes the square root of the given argument. The original call
388 expression is held in CALLEXP. */
391 expand_intrinsic_sqrt (intrinsic_code intrinsic
, tree callexp
)
393 tree arg
= CALL_EXPR_ARG (callexp
, 0);
395 /* Which variant of __builtin_sqrt* should we call? */
396 built_in_function code
= (intrinsic
== INTRINSIC_SQRT
) ? BUILT_IN_SQRT
397 : (intrinsic
== INTRINSIC_SQRTF
) ? BUILT_IN_SQRTF
398 : (intrinsic
== INTRINSIC_SQRTL
) ? BUILT_IN_SQRTL
401 gcc_assert (code
!= END_BUILTINS
);
402 return call_builtin_fn (callexp
, code
, 1, arg
);
405 /* Expand a front-end intrinsic call to copysign(). This takes two arguments,
406 the signature to which can be either:
408 float copysign (T to, float from);
409 double copysign (T to, double from);
410 real copysign (T to, real from);
412 This computes a value composed of TO with the sign bit of FROM. The original
413 call expression is held in CALLEXP. */
416 expand_intrinsic_copysign (tree callexp
)
418 tree to
= CALL_EXPR_ARG (callexp
, 0);
419 tree from
= CALL_EXPR_ARG (callexp
, 1);
420 tree type
= TREE_TYPE (to
);
422 /* Convert parameters to the same type. Prefer the first parameter unless it
423 is an integral type. */
424 if (INTEGRAL_TYPE_P (type
))
426 to
= fold_convert (TREE_TYPE (from
), to
);
427 type
= TREE_TYPE (to
);
430 from
= fold_convert (type
, from
);
432 /* Which variant of __builtin_copysign* should we call? */
433 tree builtin
= mathfn_built_in (type
, BUILT_IN_COPYSIGN
);
434 gcc_assert (builtin
!= NULL_TREE
);
436 return call_builtin_fn (callexp
, DECL_FUNCTION_CODE (builtin
), 2,
440 /* Expand a front-end intrinsic call to pow(). This takes two arguments, the
441 signature to which can be either:
443 float pow (float base, T exponent);
444 double pow (double base, T exponent);
445 real pow (real base, T exponent);
447 This computes the value of BASE raised to the power of EXPONENT.
448 The original call expression is held in CALLEXP. */
451 expand_intrinsic_pow (tree callexp
)
453 tree base
= CALL_EXPR_ARG (callexp
, 0);
454 tree exponent
= CALL_EXPR_ARG (callexp
, 1);
455 tree exptype
= TREE_TYPE (exponent
);
457 /* Which variant of __builtin_pow* should we call? */
458 built_in_function code
= SCALAR_FLOAT_TYPE_P (exptype
) ? BUILT_IN_POW
459 : INTEGRAL_TYPE_P (exptype
) ? BUILT_IN_POWI
461 gcc_assert (code
!= END_BUILTINS
);
463 tree builtin
= mathfn_built_in (TREE_TYPE (base
), code
);
464 gcc_assert (builtin
!= NULL_TREE
);
466 return call_builtin_fn (callexp
, DECL_FUNCTION_CODE (builtin
), 2,
470 /* Expand a front-end intrinsic call to toPrec(). This takes one argument, the
471 signature to which can be either:
473 T toPrec(T)(float f);
474 T toPrec(T)(double f);
477 This rounds the argument F to the precision of the specified floating
478 point type T. The original call expression is held in CALLEXP. */
481 expand_intrinsic_toprec (tree callexp
)
483 tree f
= CALL_EXPR_ARG (callexp
, 0);
484 tree type
= TREE_TYPE (callexp
);
486 return convert (type
, f
);
489 /* Expand a front-end intrinsic call to va_arg(). This takes either one or two
490 arguments, the signature to which can be either:
492 T va_arg(T) (ref va_list ap);
493 void va_arg(T) (va_list ap, ref T parmn);
495 This retrieves the next variadic parameter that is type T from the given
496 va_list. If also given, store the value into parmn, otherwise return it.
497 The original call expression is held in CALLEXP. */
500 expand_intrinsic_vaarg (tree callexp
)
502 tree ap
= CALL_EXPR_ARG (callexp
, 0);
503 tree parmn
= NULL_TREE
;
508 if (call_expr_nargs (callexp
) == 1)
509 type
= TREE_TYPE (callexp
);
512 parmn
= CALL_EXPR_ARG (callexp
, 1);
514 gcc_assert (TREE_CODE (parmn
) == ADDR_EXPR
);
515 parmn
= TREE_OPERAND (parmn
, 0);
516 type
= TREE_TYPE (parmn
);
519 /* (T) VA_ARG_EXP<ap>; */
520 tree exp
= build1 (VA_ARG_EXPR
, type
, ap
);
522 /* parmn = (T) VA_ARG_EXP<ap>; */
523 if (parmn
!= NULL_TREE
)
524 exp
= modify_expr (parmn
, exp
);
529 /* Expand a front-end intrinsic call to va_start(), which takes two arguments,
530 the signature to which is:
532 void va_start(T) (out va_list ap, ref T parmn);
534 This initializes the va_list type, where parmn should be the last named
535 parameter. The original call expression is held in CALLEXP. */
538 expand_intrinsic_vastart (tree callexp
)
540 tree ap
= CALL_EXPR_ARG (callexp
, 0);
541 tree parmn
= CALL_EXPR_ARG (callexp
, 1);
546 /* The va_list argument should already have its address taken. The second
547 argument, however, is inout and that needs to be fixed to prevent a
548 warning. Could be casting, so need to check type too? */
549 gcc_assert (TREE_CODE (ap
) == ADDR_EXPR
&& TREE_CODE (parmn
) == ADDR_EXPR
);
551 /* Assuming nobody tries to change the return type. */
552 parmn
= TREE_OPERAND (parmn
, 0);
554 return call_builtin_fn (callexp
, BUILT_IN_VA_START
, 2, ap
, parmn
);
557 /* Expand a front-end instrinsic call to INTRINSIC, which is either a call to
558 adds(), addu(), subs(), subu(), negs(), muls(), or mulu(). These intrinsics
559 expect to take two or three arguments, the signature to which can be either:
561 int adds (int x, int y, ref bool overflow);
562 long adds (long x, long y, ref bool overflow);
563 int negs (int x, ref bool overflow);
564 long negs (long x, ref bool overflow);
566 This performs an operation on two signed or unsigned integers, checking for
567 overflow. The overflow is sticky, meaning that a sequence of operations
568 can be done and overflow need only be checked at the end. The original call
569 expression is held in CALLEXP. */
572 expand_intrinsic_checkedint (intrinsic_code intrinsic
, tree callexp
)
574 tree type
= TREE_TYPE (callexp
);
579 /* The negs() intrinsic gets turned into SUB_OVERFLOW (0, y). */
580 if (intrinsic
== INTRINSIC_NEGS
)
582 x
= fold_convert (type
, integer_zero_node
);
583 y
= CALL_EXPR_ARG (callexp
, 0);
584 overflow
= CALL_EXPR_ARG (callexp
, 1);
588 x
= CALL_EXPR_ARG (callexp
, 0);
589 y
= CALL_EXPR_ARG (callexp
, 1);
590 overflow
= CALL_EXPR_ARG (callexp
, 2);
593 /* Which variant of *_OVERFLOW should we generate? */
594 internal_fn icode
= (intrinsic
== INTRINSIC_ADDS
) ? IFN_ADD_OVERFLOW
595 : (intrinsic
== INTRINSIC_SUBS
) ? IFN_SUB_OVERFLOW
596 : (intrinsic
== INTRINSIC_MULS
) ? IFN_MUL_OVERFLOW
597 : (intrinsic
== INTRINSIC_NEGS
) ? IFN_SUB_OVERFLOW
599 gcc_assert (icode
!= IFN_LAST
);
602 = build_call_expr_internal_loc (EXPR_LOCATION (callexp
), icode
,
603 build_complex_type (type
), 2, x
, y
);
605 STRIP_NOPS (overflow
);
606 overflow
= build_deref (overflow
);
608 /* Assign returned result to overflow parameter, however if overflow is
609 already true, maintain its value. */
610 type
= TREE_TYPE (overflow
);
611 result
= save_expr (result
);
613 tree exp
= fold_build2 (BIT_IOR_EXPR
, type
, overflow
,
614 fold_convert (type
, imaginary_part (result
)));
615 exp
= modify_expr (overflow
, exp
);
617 /* Return the value of result. */
618 return compound_expr (exp
, real_part (result
));
621 /* Expand a front-end instrinsic call to volatileLoad(). This takes one
622 argument, the signature to which can be either:
624 ubyte volatileLoad (ubyte* ptr);
625 ushort volatileLoad (ushort* ptr);
626 uint volatileLoad (uint* ptr);
627 ulong volatileLoad (ulong* ptr);
629 This reads a value from the memory location indicated by ptr. Calls to
630 them are be guaranteed to not be removed (such as during DCE) or reordered
631 in the same thread. The original call expression is held in CALLEXP. */
634 expand_volatile_load (tree callexp
)
636 tree ptr
= CALL_EXPR_ARG (callexp
, 0);
637 tree ptrtype
= TREE_TYPE (ptr
);
638 gcc_assert (POINTER_TYPE_P (ptrtype
));
640 /* (T) *(volatile T *) ptr; */
641 tree type
= build_qualified_type (TREE_TYPE (ptrtype
), TYPE_QUAL_VOLATILE
);
642 tree result
= indirect_ref (type
, ptr
);
643 TREE_THIS_VOLATILE (result
) = 1;
648 /* Expand a front-end instrinsic call to volatileStore(). This takes two
649 arguments, the signature to which can be either:
651 void volatileStore (ubyte* ptr, ubyte value);
652 void volatileStore (ushort* ptr, ushort value);
653 void volatileStore (uint* ptr, uint value);
654 void volatileStore (ulong* ptr, ulong value);
656 This writes a value to the memory location indicated by ptr. Calls to
657 them are be guaranteed to not be removed (such as during DCE) or reordered
658 in the same thread. The original call expression is held in CALLEXP. */
661 expand_volatile_store (tree callexp
)
663 tree ptr
= CALL_EXPR_ARG (callexp
, 0);
664 tree ptrtype
= TREE_TYPE (ptr
);
665 gcc_assert (POINTER_TYPE_P (ptrtype
));
667 /* (T) *(volatile T *) ptr; */
668 tree type
= build_qualified_type (TREE_TYPE (ptrtype
), TYPE_QUAL_VOLATILE
);
669 tree result
= indirect_ref (type
, ptr
);
670 TREE_THIS_VOLATILE (result
) = 1;
672 /* (*(volatile T *) ptr) = value; */
673 tree value
= CALL_EXPR_ARG (callexp
, 1);
674 return modify_expr (result
, value
);
677 /* If CALLEXP is for an intrinsic , expand and return inlined compiler
678 generated instructions. Most map directly to GCC builtins, others
679 require a little extra work around them. */
682 maybe_expand_intrinsic (tree callexp
)
684 tree callee
= CALL_EXPR_FN (callexp
);
686 if (TREE_CODE (callee
) == ADDR_EXPR
)
687 callee
= TREE_OPERAND (callee
, 0);
689 if (TREE_CODE (callee
) != FUNCTION_DECL
)
692 /* Don't expand CTFE-only intrinsics outside of semantic processing. */
693 if (DECL_BUILT_IN_CTFE (callee
) && !doing_semantic_analysis_p
)
696 intrinsic_code intrinsic
= DECL_INTRINSIC_CODE (callee
);
697 built_in_function code
;
705 return expand_intrinsic_bsf (callexp
);
708 return expand_intrinsic_bsr (callexp
);
714 return expand_intrinsic_bt (intrinsic
, callexp
);
716 case INTRINSIC_BSWAP
:
717 return expand_intrinsic_bswap (callexp
);
719 case INTRINSIC_POPCNT
:
720 return expand_intrinsic_popcnt (callexp
);
723 return call_builtin_fn (callexp
, BUILT_IN_COSL
, 1,
724 CALL_EXPR_ARG (callexp
, 0));
727 return call_builtin_fn (callexp
, BUILT_IN_SINL
, 1,
728 CALL_EXPR_ARG (callexp
, 0));
730 case INTRINSIC_RNDTOL
:
731 /* Not sure if llroundl stands as a good replacement for the
732 expected behavior of rndtol. */
733 return call_builtin_fn (callexp
, BUILT_IN_LLROUNDL
, 1,
734 CALL_EXPR_ARG (callexp
, 0));
737 case INTRINSIC_SQRTF
:
738 case INTRINSIC_SQRTL
:
739 return expand_intrinsic_sqrt (intrinsic
, callexp
);
741 case INTRINSIC_LDEXP
:
742 return call_builtin_fn (callexp
, BUILT_IN_LDEXPL
, 2,
743 CALL_EXPR_ARG (callexp
, 0),
744 CALL_EXPR_ARG (callexp
, 1));
747 return call_builtin_fn (callexp
, BUILT_IN_FABSL
, 1,
748 CALL_EXPR_ARG (callexp
, 0));
751 return call_builtin_fn (callexp
, BUILT_IN_RINTL
, 1,
752 CALL_EXPR_ARG (callexp
, 0));
755 return call_builtin_fn (callexp
, BUILT_IN_TANL
, 1,
756 CALL_EXPR_ARG (callexp
, 0));
758 case INTRINSIC_ISNAN
:
759 return call_builtin_fn (callexp
, BUILT_IN_ISNAN
, 1,
760 CALL_EXPR_ARG (callexp
, 0));
762 case INTRINSIC_ISINFINITY
:
763 return call_builtin_fn (callexp
, BUILT_IN_ISINF
, 1,
764 CALL_EXPR_ARG (callexp
, 0));
766 case INTRINSIC_ISFINITE
:
767 return call_builtin_fn (callexp
, BUILT_IN_ISFINITE
, 1,
768 CALL_EXPR_ARG (callexp
, 0));
771 return call_builtin_fn (callexp
, BUILT_IN_EXPL
, 1,
772 CALL_EXPR_ARG (callexp
, 0));
774 case INTRINSIC_EXPM1
:
775 return call_builtin_fn (callexp
, BUILT_IN_EXPM1L
, 1,
776 CALL_EXPR_ARG (callexp
, 0));
779 return call_builtin_fn (callexp
, BUILT_IN_EXP2L
, 1,
780 CALL_EXPR_ARG (callexp
, 0));
783 return call_builtin_fn (callexp
, BUILT_IN_LOGL
, 1,
784 CALL_EXPR_ARG (callexp
, 0));
787 return call_builtin_fn (callexp
, BUILT_IN_LOG2L
, 1,
788 CALL_EXPR_ARG (callexp
, 0));
790 case INTRINSIC_LOG10
:
791 return call_builtin_fn (callexp
, BUILT_IN_LOG10L
, 1,
792 CALL_EXPR_ARG (callexp
, 0));
794 case INTRINSIC_ROUND
:
795 return call_builtin_fn (callexp
, BUILT_IN_ROUNDL
, 1,
796 CALL_EXPR_ARG (callexp
, 0));
798 case INTRINSIC_FLOORF
:
799 case INTRINSIC_FLOOR
:
800 case INTRINSIC_FLOORL
:
801 code
= (intrinsic
== INTRINSIC_FLOOR
) ? BUILT_IN_FLOOR
802 : (intrinsic
== INTRINSIC_FLOORF
) ? BUILT_IN_FLOORF
804 return call_builtin_fn (callexp
, code
, 1, CALL_EXPR_ARG (callexp
, 0));
806 case INTRINSIC_CEILF
:
808 case INTRINSIC_CEILL
:
809 code
= (intrinsic
== INTRINSIC_CEIL
) ? BUILT_IN_CEIL
810 : (intrinsic
== INTRINSIC_CEILF
) ? BUILT_IN_CEILF
812 return call_builtin_fn (callexp
, code
, 1, CALL_EXPR_ARG (callexp
, 0));
814 case INTRINSIC_TRUNC
:
815 return call_builtin_fn (callexp
, BUILT_IN_TRUNCL
, 1,
816 CALL_EXPR_ARG (callexp
, 0));
819 return call_builtin_fn (callexp
, BUILT_IN_FMINL
, 2,
820 CALL_EXPR_ARG (callexp
, 0),
821 CALL_EXPR_ARG (callexp
, 1));
824 return call_builtin_fn (callexp
, BUILT_IN_FMAXL
, 2,
825 CALL_EXPR_ARG (callexp
, 0),
826 CALL_EXPR_ARG (callexp
, 1));
828 case INTRINSIC_COPYSIGN
:
829 return expand_intrinsic_copysign (callexp
);
832 return expand_intrinsic_pow (callexp
);
835 return call_builtin_fn (callexp
, BUILT_IN_FMAL
, 3,
836 CALL_EXPR_ARG (callexp
, 0),
837 CALL_EXPR_ARG (callexp
, 1),
838 CALL_EXPR_ARG (callexp
, 2));
840 case INTRINSIC_TOPREC
:
841 return expand_intrinsic_toprec (callexp
);
843 case INTRINSIC_VA_ARG
:
844 case INTRINSIC_C_VA_ARG
:
845 return expand_intrinsic_vaarg (callexp
);
847 case INTRINSIC_VASTART
:
848 return expand_intrinsic_vastart (callexp
);
854 return expand_intrinsic_checkedint (intrinsic
, callexp
);
856 case INTRINSIC_VLOAD
:
857 return expand_volatile_load (callexp
);
859 case INTRINSIC_VSTORE
:
860 return expand_volatile_store (callexp
);