d: Fix ICE in expand_intrinsic_vaarg
[gcc.git] / gcc / d / intrinsics.cc
1 /* intrinsics.cc -- D language compiler intrinsics.
2 Copyright (C) 2006-2020 Free Software Foundation, Inc.
3
4 GCC is free software; you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation; either version 3, or (at your option)
7 any later version.
8
9 GCC is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
13
14 You should have received a copy of the GNU General Public License
15 along with GCC; see the file COPYING3. If not see
16 <http://www.gnu.org/licenses/>. */
17
18 #include "config.h"
19 #include "system.h"
20 #include "coretypes.h"
21
22 #include "dmd/declaration.h"
23 #include "dmd/identifier.h"
24 #include "dmd/mangle.h"
25 #include "dmd/mangle.h"
26 #include "dmd/module.h"
27 #include "dmd/template.h"
28
29 #include "tm.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
34 #include "builtins.h"
35
36 #include "d-tree.h"
37
38
39 /* An internal struct used to hold information on D intrinsics. */
40
41 struct intrinsic_decl
42 {
43 /* The DECL_INTRINSIC_CODE of this decl. */
44 intrinsic_code code;
45
46 /* The DECL_FUNCTION_CODE of this decl, if it directly maps to any. */
47 built_in_function built_in;
48
49 /* The name of the intrinsic. */
50 const char *name;
51
52 /* The module where the intrinsic is located. */
53 const char *module;
54
55 /* The mangled signature decoration of the intrinsic. */
56 const char *deco;
57
58 /* True if the intrinsic is only handled in CTFE. */
59 bool ctfeonly;
60 };
61
62 static const intrinsic_decl intrinsic_decls[] =
63 {
64 #define DEF_D_INTRINSIC(CODE, BUILTIN, NAME, MODULE, DECO, CTFE) \
65 { INTRINSIC_ ## CODE, BUILT_IN_ ## BUILTIN, NAME, MODULE, DECO, CTFE },
66
67 #include "intrinsics.def"
68
69 #undef DEF_D_INTRINSIC
70 };
71
72 /* Checks if DECL is an intrinsic or run time library function that requires
73 special processing. Sets DECL_INTRINSIC_CODE so it can be identified
74 later in maybe_expand_intrinsic. */
75
76 void
77 maybe_set_intrinsic (FuncDeclaration *decl)
78 {
79 if (!decl->ident || decl->builtin != BUILTINunknown)
80 return;
81
82 /* The builtin flag is updated only if we can evaluate the intrinsic
83 at compile-time. Such as the math or bitop intrinsics. */
84 decl->builtin = BUILTINno;
85
86 /* Check if it's a compiler intrinsic. We only require that any
87 internally recognised intrinsics are declared in a module with
88 an explicit module declaration. */
89 Module *m = decl->getModule ();
90
91 if (!m || !m->md)
92 return;
93
94 TemplateInstance *ti = decl->isInstantiated ();
95 TemplateDeclaration *td = ti ? ti->tempdecl->isTemplateDeclaration () : NULL;
96
97 const char *tname = decl->ident->toChars ();
98 const char *tmodule = m->md->toChars ();
99 const char *tdeco = (td == NULL) ? decl->type->deco : NULL;
100
101 /* Look through all D intrinsics. */
102 for (size_t i = 0; i < (int) INTRINSIC_LAST; i++)
103 {
104 if (!intrinsic_decls[i].name)
105 continue;
106
107 if (strcmp (intrinsic_decls[i].name, tname) != 0
108 || strcmp (intrinsic_decls[i].module, tmodule) != 0)
109 continue;
110
111 /* Instantiated functions would have the wrong type deco, get it from the
112 template member instead. */
113 if (tdeco == NULL)
114 {
115 if (!td || !td->onemember)
116 return;
117
118 FuncDeclaration *fd = td->onemember->isFuncDeclaration ();
119 if (fd == NULL)
120 return;
121
122 OutBuffer buf;
123 mangleToBuffer (fd->type, &buf);
124 tdeco = buf.extractChars ();
125 }
126
127 /* Matching the type deco may be a bit too strict, as it means that all
128 function attributes that end up in the signature must be kept aligned
129 between the compiler and library declaration. */
130 if (strcmp (intrinsic_decls[i].deco, tdeco) == 0)
131 {
132 intrinsic_code code = intrinsic_decls[i].code;
133
134 if (decl->csym == NULL)
135 get_symbol_decl (decl);
136
137 /* If there is no function body, then the implementation is always
138 provided by the compiler. */
139 if (!decl->fbody)
140 set_decl_built_in_function (decl->csym, BUILT_IN_FRONTEND, code);
141
142 /* Infer whether the intrinsic can be used for CTFE, let the
143 front-end know that it can be evaluated at compile-time. */
144 switch (code)
145 {
146 case INTRINSIC_VA_ARG:
147 case INTRINSIC_C_VA_ARG:
148 case INTRINSIC_VASTART:
149 case INTRINSIC_ADDS:
150 case INTRINSIC_ADDSL:
151 case INTRINSIC_ADDU:
152 case INTRINSIC_ADDUL:
153 case INTRINSIC_SUBS:
154 case INTRINSIC_SUBSL:
155 case INTRINSIC_SUBU:
156 case INTRINSIC_SUBUL:
157 case INTRINSIC_MULS:
158 case INTRINSIC_MULSL:
159 case INTRINSIC_MULU:
160 case INTRINSIC_MULUI:
161 case INTRINSIC_MULUL:
162 case INTRINSIC_NEGS:
163 case INTRINSIC_NEGSL:
164 case INTRINSIC_VLOAD8:
165 case INTRINSIC_VLOAD16:
166 case INTRINSIC_VLOAD32:
167 case INTRINSIC_VLOAD64:
168 case INTRINSIC_VSTORE8:
169 case INTRINSIC_VSTORE16:
170 case INTRINSIC_VSTORE32:
171 case INTRINSIC_VSTORE64:
172 break;
173
174 case INTRINSIC_POW:
175 {
176 /* Check that this overload of pow() is has an equivalent
177 built-in function. It could be `int pow(int, int)'. */
178 tree rettype = TREE_TYPE (TREE_TYPE (decl->csym));
179 if (mathfn_built_in (rettype, BUILT_IN_POW) != NULL_TREE)
180 decl->builtin = BUILTINyes;
181 break;
182 }
183
184 default:
185 decl->builtin = BUILTINyes;
186 break;
187 }
188
189 /* The intrinsic was marked as CTFE-only. */
190 if (intrinsic_decls[i].ctfeonly)
191 DECL_BUILT_IN_CTFE (decl->csym) = 1;
192
193 DECL_INTRINSIC_CODE (decl->csym) = code;
194 break;
195 }
196 }
197 }
198
199 /* Construct a function call to the built-in function CODE, N is the number of
200 arguments, and the `...' parameters are the argument expressions.
201 The original call expression is held in CALLEXP. */
202
203 static tree
204 call_builtin_fn (tree callexp, built_in_function code, int n, ...)
205 {
206 tree *argarray = XALLOCAVEC (tree, n);
207 va_list ap;
208
209 va_start (ap, n);
210 for (int i = 0; i < n; i++)
211 argarray[i] = va_arg (ap, tree);
212 va_end (ap);
213
214 tree exp = build_call_expr_loc_array (EXPR_LOCATION (callexp),
215 builtin_decl_explicit (code),
216 n, argarray);
217 return convert (TREE_TYPE (callexp), fold (exp));
218 }
219
220 /* Expand a front-end instrinsic call to bsf(). This takes one argument,
221 the signature to which can be either:
222
223 int bsf (uint arg);
224 int bsf (ulong arg);
225
226 This scans all bits in the given argument starting with the first,
227 returning the bit number of the first bit set. The original call
228 expression is held in CALLEXP. */
229
230 static tree
231 expand_intrinsic_bsf (tree callexp)
232 {
233 /* The bsr() intrinsic gets turned into __builtin_ctz(arg).
234 The return value is supposed to be undefined if arg is zero. */
235 tree arg = CALL_EXPR_ARG (callexp, 0);
236 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
237
238 /* Which variant of __builtin_ctz* should we call? */
239 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CTZ
240 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CTZL
241 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CTZLL
242 : END_BUILTINS;
243
244 gcc_assert (code != END_BUILTINS);
245
246 return call_builtin_fn (callexp, code, 1, arg);
247 }
248
249 /* Expand a front-end instrinsic call to bsr(). This takes one argument,
250 the signature to which can be either:
251
252 int bsr (uint arg);
253 int bsr (ulong arg);
254
255 This scans all bits in the given argument from the most significant bit
256 to the least significant, returning the bit number of the first bit set.
257 The original call expression is held in CALLEXP. */
258
259 static tree
260 expand_intrinsic_bsr (tree callexp)
261 {
262 /* The bsr() intrinsic gets turned into (size - 1) - __builtin_clz(arg).
263 The return value is supposed to be undefined if arg is zero. */
264 tree arg = CALL_EXPR_ARG (callexp, 0);
265 tree type = TREE_TYPE (arg);
266 int argsize = TYPE_PRECISION (type);
267
268 /* Which variant of __builtin_clz* should we call? */
269 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CLZ
270 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CLZL
271 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CLZLL
272 : END_BUILTINS;
273
274 gcc_assert (code != END_BUILTINS);
275
276 tree result = call_builtin_fn (callexp, code, 1, arg);
277
278 /* Handle int -> long conversions. */
279 if (TREE_TYPE (result) != type)
280 result = fold_convert (type, result);
281
282 result = fold_build2 (MINUS_EXPR, type,
283 build_integer_cst (argsize - 1, type), result);
284 return fold_convert (TREE_TYPE (callexp), result);
285 }
286
287 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
288 bt(), btc(), btr(), or bts(). These intrinsics expect to take two arguments,
289 the signature to which is:
290
291 int bt (size_t* ptr, size_t bitnum);
292
293 All intrinsics test if a bit is set and return the result of that condition.
294 Variants of `bt' will then update that bit. `btc' compliments the bit, `bts'
295 sets the bit, and `btr' resets the bit. The original call expression is
296 held in CALLEXP. */
297
298 static tree
299 expand_intrinsic_bt (intrinsic_code intrinsic, tree callexp)
300 {
301 tree ptr = CALL_EXPR_ARG (callexp, 0);
302 tree bitnum = CALL_EXPR_ARG (callexp, 1);
303 tree type = TREE_TYPE (TREE_TYPE (ptr));
304
305 /* size_t bitsize = sizeof(*ptr) * BITS_PER_UNIT; */
306 tree bitsize = fold_convert (type, TYPE_SIZE (type));
307
308 /* ptr[bitnum / bitsize] */
309 ptr = build_array_index (ptr, fold_build2 (TRUNC_DIV_EXPR, type,
310 bitnum, bitsize));
311 ptr = indirect_ref (type, ptr);
312
313 /* mask = 1 << (bitnum % bitsize); */
314 bitnum = fold_build2 (TRUNC_MOD_EXPR, type, bitnum, bitsize);
315 bitnum = fold_build2 (LSHIFT_EXPR, type, size_one_node, bitnum);
316
317 /* cond = ptr[bitnum / size] & mask; */
318 tree cond = fold_build2 (BIT_AND_EXPR, type, ptr, bitnum);
319
320 /* cond ? -1 : 0; */
321 cond = build_condition (TREE_TYPE (callexp), d_truthvalue_conversion (cond),
322 integer_minus_one_node, integer_zero_node);
323
324 /* Update the bit as needed, only testing the bit for bt(). */
325 tree_code code;
326
327 switch (intrinsic)
328 {
329 case INTRINSIC_BT:
330 case INTRINSIC_BT64:
331 return cond;
332
333 case INTRINSIC_BTC:
334 case INTRINSIC_BTC64:
335 code = BIT_XOR_EXPR;
336 break;
337
338 case INTRINSIC_BTR:
339 case INTRINSIC_BTR64:
340 bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum);
341 code = BIT_AND_EXPR;
342 break;
343
344 case INTRINSIC_BTS:
345 case INTRINSIC_BTS64:
346 code = BIT_IOR_EXPR;
347 break;
348
349 default:
350 gcc_unreachable ();
351 }
352
353 /* ptr[bitnum / size] op= mask; */
354 if (intrinsic == INTRINSIC_BTR)
355 bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum);
356
357 ptr = modify_expr (ptr, fold_build2 (code, TREE_TYPE (ptr), ptr, bitnum));
358
359 /* Store the condition result in a temporary, and return expressions in
360 correct order of evaluation. */
361 tree tmp = build_local_temp (TREE_TYPE (callexp));
362 cond = modify_expr (tmp, cond);
363
364 return compound_expr (cond, compound_expr (ptr, tmp));
365 }
366
367 /* Expand a front-end intrinsic call to popcnt(). This takes one argument, the
368 signature to which can be either:
369
370 int popcnt (uint arg);
371 int popcnt (ulong arg);
372
373 Calculates the number of set bits in an integer. The original call
374 expression is held in CALLEXP. */
375
376 static tree
377 expand_intrinsic_popcnt (tree callexp)
378 {
379 tree arg = CALL_EXPR_ARG (callexp, 0);
380 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
381
382 /* Which variant of __builtin_popcount* should we call? */
383 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_POPCOUNT
384 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTL
385 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTLL
386 : END_BUILTINS;
387
388 gcc_assert (code != END_BUILTINS);
389
390 return call_builtin_fn (callexp, code, 1, arg);
391 }
392
393 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
394 rol() or ror(). These intrinsics expect to take one or two arguments,
395 the signature to which can be either:
396
397 T rol(T) (const T value, const uint count);
398 T rol(uint count, T) (const T value);
399 T ror(T) (const T value, const uint count);
400 T ror(uint count, T) (const T value);
401
402 This bitwise rotates VALUE left or right by COUNT bit positions. */
403
404 static tree
405 expand_intrinsic_rotate (intrinsic_code intrinsic, tree callexp)
406 {
407 tree type = TREE_TYPE (callexp);
408 tree value = CALL_EXPR_ARG (callexp, 0);
409 tree count;
410 tree_code code;
411
412 /* Get the equivalent tree code for the intrinsic. */
413 if (intrinsic == INTRINSIC_ROL || intrinsic == INTRINSIC_ROL_TIARG)
414 code = LROTATE_EXPR;
415 else if (intrinsic == INTRINSIC_ROR || intrinsic == INTRINSIC_ROR_TIARG)
416 code = RROTATE_EXPR;
417 else
418 gcc_unreachable ();
419
420 /* Get the COUNT parameter. Either from the call expression arguments or the
421 template instantiation arguments. */
422 if (intrinsic == INTRINSIC_ROL || intrinsic == INTRINSIC_ROR)
423 count = CALL_EXPR_ARG (callexp, 1);
424 else
425 {
426 tree callee = CALL_EXPR_FN (callexp);
427
428 if (TREE_CODE (callee) == ADDR_EXPR)
429 callee = TREE_OPERAND (callee, 0);
430
431 /* Retrieve from the encoded template instantation. */
432 TemplateInstance *ti = DECL_LANG_FRONTEND (callee)->isInstantiated ();
433 gcc_assert (ti && ti->tiargs && ti->tiargs->length == 2);
434
435 Expression *e = isExpression ((*ti->tiargs)[0]);
436 gcc_assert (e && e->op == TOKint64);
437 count = build_expr (e, true);
438 }
439
440 return fold_build2 (code, type, value, count);
441 }
442
443 /* Expand a front-end intrinsic call to copysign(). This takes two arguments,
444 the signature to which can be either:
445
446 float copysign (T to, float from);
447 double copysign (T to, double from);
448 real copysign (T to, real from);
449
450 This computes a value composed of TO with the sign bit of FROM. The original
451 call expression is held in CALLEXP. */
452
453 static tree
454 expand_intrinsic_copysign (tree callexp)
455 {
456 tree to = CALL_EXPR_ARG (callexp, 0);
457 tree from = CALL_EXPR_ARG (callexp, 1);
458 tree type = TREE_TYPE (to);
459
460 /* Convert parameters to the same type. Prefer the first parameter unless it
461 is an integral type. */
462 if (INTEGRAL_TYPE_P (type))
463 {
464 to = fold_convert (TREE_TYPE (from), to);
465 type = TREE_TYPE (to);
466 }
467 else
468 from = fold_convert (type, from);
469
470 /* Which variant of __builtin_copysign* should we call? */
471 tree builtin = mathfn_built_in (type, BUILT_IN_COPYSIGN);
472 gcc_assert (builtin != NULL_TREE);
473
474 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
475 to, from);
476 }
477
478 /* Expand a front-end intrinsic call to pow(). This takes two arguments, the
479 signature to which can be either:
480
481 float pow (float base, T exponent);
482 double pow (double base, T exponent);
483 real pow (real base, T exponent);
484
485 This computes the value of BASE raised to the power of EXPONENT.
486 The original call expression is held in CALLEXP. */
487
488 static tree
489 expand_intrinsic_pow (tree callexp)
490 {
491 tree base = CALL_EXPR_ARG (callexp, 0);
492 tree exponent = CALL_EXPR_ARG (callexp, 1);
493 tree exptype = TREE_TYPE (exponent);
494
495 /* Which variant of __builtin_pow* should we call? */
496 built_in_function code = SCALAR_FLOAT_TYPE_P (exptype) ? BUILT_IN_POW
497 : INTEGRAL_TYPE_P (exptype) ? BUILT_IN_POWI
498 : END_BUILTINS;
499 gcc_assert (code != END_BUILTINS);
500
501 tree builtin = mathfn_built_in (TREE_TYPE (base), code);
502 gcc_assert (builtin != NULL_TREE);
503
504 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
505 base, exponent);
506 }
507
508 /* Expand a front-end intrinsic call to toPrec(). This takes one argument, the
509 signature to which can be either:
510
511 T toPrec(T)(float f);
512 T toPrec(T)(double f);
513 T toPrec(T)(real f);
514
515 This rounds the argument F to the precision of the specified floating
516 point type T. The original call expression is held in CALLEXP. */
517
518 static tree
519 expand_intrinsic_toprec (tree callexp)
520 {
521 tree f = CALL_EXPR_ARG (callexp, 0);
522 tree type = TREE_TYPE (callexp);
523
524 return convert (type, f);
525 }
526
527 /* Expand a front-end intrinsic call to va_arg(). This takes either one or two
528 arguments, the signature to which can be either:
529
530 T va_arg(T) (ref va_list ap);
531 void va_arg(T) (va_list ap, ref T parmn);
532
533 This retrieves the next variadic parameter that is type T from the given
534 va_list. If also given, store the value into parmn, otherwise return it.
535 The original call expression is held in CALLEXP. */
536
537 static tree
538 expand_intrinsic_vaarg (tree callexp)
539 {
540 tree ap = CALL_EXPR_ARG (callexp, 0);
541 tree parmn = NULL_TREE;
542 tree type;
543
544 STRIP_NOPS (ap);
545
546 if (call_expr_nargs (callexp) == 1)
547 type = TREE_TYPE (callexp);
548 else
549 {
550 parmn = CALL_EXPR_ARG (callexp, 1);
551 STRIP_NOPS (parmn);
552
553 /* The `ref' argument to va_arg is either an address or reference,
554 get the value of it. */
555 if (TREE_CODE (parmn) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (parmn)))
556 parmn = build_deref (parmn);
557 else
558 {
559 gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
560 parmn = TREE_OPERAND (parmn, 0);
561 }
562
563 type = TREE_TYPE (parmn);
564 }
565
566 /* (T) VA_ARG_EXP<ap>; */
567 tree exp = build1 (VA_ARG_EXPR, type, ap);
568
569 /* parmn = (T) VA_ARG_EXP<ap>; */
570 if (parmn != NULL_TREE)
571 exp = modify_expr (parmn, exp);
572
573 return exp;
574 }
575
576 /* Expand a front-end intrinsic call to va_start(), which takes two arguments,
577 the signature to which is:
578
579 void va_start(T) (out va_list ap, ref T parmn);
580
581 This initializes the va_list type, where parmn should be the last named
582 parameter. The original call expression is held in CALLEXP. */
583
584 static tree
585 expand_intrinsic_vastart (tree callexp)
586 {
587 tree ap = CALL_EXPR_ARG (callexp, 0);
588 tree parmn = CALL_EXPR_ARG (callexp, 1);
589
590 STRIP_NOPS (ap);
591 STRIP_NOPS (parmn);
592
593 /* The va_list argument should already have its address taken. The second
594 argument, however, is inout and that needs to be fixed to prevent a
595 warning. Could be casting, so need to check type too? */
596 gcc_assert (TREE_CODE (ap) == ADDR_EXPR
597 || (TREE_CODE (ap) == PARM_DECL
598 && POINTER_TYPE_P (TREE_TYPE (ap))));
599
600 /* Assuming nobody tries to change the return type. */
601 if (TREE_CODE (parmn) != PARM_DECL)
602 {
603 gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
604 parmn = TREE_OPERAND (parmn, 0);
605 }
606
607 return call_builtin_fn (callexp, BUILT_IN_VA_START, 2, ap, parmn);
608 }
609
610 /* Expand a front-end instrinsic call to INTRINSIC, which is either a call to
611 adds(), addu(), subs(), subu(), negs(), muls(), or mulu(). These intrinsics
612 expect to take two or three arguments, the signature to which can be either:
613
614 int adds (int x, int y, ref bool overflow);
615 long adds (long x, long y, ref bool overflow);
616 int negs (int x, ref bool overflow);
617 long negs (long x, ref bool overflow);
618
619 This performs an operation on two signed or unsigned integers, checking for
620 overflow. The overflow is sticky, meaning that a sequence of operations
621 can be done and overflow need only be checked at the end. The original call
622 expression is held in CALLEXP. */
623
624 static tree
625 expand_intrinsic_checkedint (intrinsic_code intrinsic, tree callexp)
626 {
627 tree type = TREE_TYPE (callexp);
628 tree x;
629 tree y;
630 tree overflow;
631 internal_fn icode;
632
633 /* Which variant of *_OVERFLOW should we generate? */
634 switch (intrinsic)
635 {
636 case INTRINSIC_ADDS:
637 case INTRINSIC_ADDSL:
638 case INTRINSIC_ADDU:
639 case INTRINSIC_ADDUL:
640 x = CALL_EXPR_ARG (callexp, 0);
641 y = CALL_EXPR_ARG (callexp, 1);
642 overflow = CALL_EXPR_ARG (callexp, 2);
643 icode = IFN_ADD_OVERFLOW;
644 break;
645
646 case INTRINSIC_SUBS:
647 case INTRINSIC_SUBSL:
648 case INTRINSIC_SUBU:
649 case INTRINSIC_SUBUL:
650 x = CALL_EXPR_ARG (callexp, 0);
651 y = CALL_EXPR_ARG (callexp, 1);
652 overflow = CALL_EXPR_ARG (callexp, 2);
653 icode = IFN_SUB_OVERFLOW;
654 break;
655
656 case INTRINSIC_MULS:
657 case INTRINSIC_MULSL:
658 case INTRINSIC_MULU:
659 case INTRINSIC_MULUI:
660 case INTRINSIC_MULUL:
661 x = CALL_EXPR_ARG (callexp, 0);
662 y = CALL_EXPR_ARG (callexp, 1);
663 overflow = CALL_EXPR_ARG (callexp, 2);
664 icode = IFN_MUL_OVERFLOW;
665 break;
666
667 case INTRINSIC_NEGS:
668 case INTRINSIC_NEGSL:
669 /* The negs() intrinsic gets turned into SUB_OVERFLOW (0, y). */
670 x = fold_convert (type, integer_zero_node);
671 y = CALL_EXPR_ARG (callexp, 0);
672 overflow = CALL_EXPR_ARG (callexp, 1);
673 icode = IFN_SUB_OVERFLOW;
674 break;
675
676 default:
677 gcc_unreachable ();
678 }
679
680 tree result
681 = build_call_expr_internal_loc (EXPR_LOCATION (callexp), icode,
682 build_complex_type (type), 2, x, y);
683
684 STRIP_NOPS (overflow);
685 overflow = build_deref (overflow);
686
687 /* Assign returned result to overflow parameter, however if overflow is
688 already true, maintain its value. */
689 type = TREE_TYPE (overflow);
690 result = save_expr (result);
691
692 tree exp = fold_build2 (BIT_IOR_EXPR, type, overflow,
693 fold_convert (type, imaginary_part (result)));
694 exp = modify_expr (overflow, exp);
695
696 /* Return the value of result. */
697 return compound_expr (exp, real_part (result));
698 }
699
700 /* Expand a front-end instrinsic call to volatileLoad(). This takes one
701 argument, the signature to which can be either:
702
703 ubyte volatileLoad (ubyte* ptr);
704 ushort volatileLoad (ushort* ptr);
705 uint volatileLoad (uint* ptr);
706 ulong volatileLoad (ulong* ptr);
707
708 This reads a value from the memory location indicated by ptr. Calls to
709 them are be guaranteed to not be removed (such as during DCE) or reordered
710 in the same thread. The original call expression is held in CALLEXP. */
711
712 static tree
713 expand_volatile_load (tree callexp)
714 {
715 tree ptr = CALL_EXPR_ARG (callexp, 0);
716 tree ptrtype = TREE_TYPE (ptr);
717 gcc_assert (POINTER_TYPE_P (ptrtype));
718
719 /* (T) *(volatile T *) ptr; */
720 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
721 tree result = indirect_ref (type, ptr);
722 TREE_THIS_VOLATILE (result) = 1;
723
724 return result;
725 }
726
727 /* Expand a front-end instrinsic call to volatileStore(). This takes two
728 arguments, the signature to which can be either:
729
730 void volatileStore (ubyte* ptr, ubyte value);
731 void volatileStore (ushort* ptr, ushort value);
732 void volatileStore (uint* ptr, uint value);
733 void volatileStore (ulong* ptr, ulong value);
734
735 This writes a value to the memory location indicated by ptr. Calls to
736 them are be guaranteed to not be removed (such as during DCE) or reordered
737 in the same thread. The original call expression is held in CALLEXP. */
738
739 static tree
740 expand_volatile_store (tree callexp)
741 {
742 tree ptr = CALL_EXPR_ARG (callexp, 0);
743 tree ptrtype = TREE_TYPE (ptr);
744 gcc_assert (POINTER_TYPE_P (ptrtype));
745
746 /* (T) *(volatile T *) ptr; */
747 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
748 tree result = indirect_ref (type, ptr);
749 TREE_THIS_VOLATILE (result) = 1;
750
751 /* (*(volatile T *) ptr) = value; */
752 tree value = CALL_EXPR_ARG (callexp, 1);
753 return modify_expr (result, value);
754 }
755
756 /* If CALLEXP is for an intrinsic , expand and return inlined compiler
757 generated instructions. Most map directly to GCC builtins, others
758 require a little extra work around them. */
759
760 tree
761 maybe_expand_intrinsic (tree callexp)
762 {
763 tree callee = CALL_EXPR_FN (callexp);
764
765 if (TREE_CODE (callee) == ADDR_EXPR)
766 callee = TREE_OPERAND (callee, 0);
767
768 if (TREE_CODE (callee) != FUNCTION_DECL)
769 return callexp;
770
771 /* Don't expand CTFE-only intrinsics outside of semantic processing. */
772 if (DECL_BUILT_IN_CTFE (callee) && !doing_semantic_analysis_p)
773 return callexp;
774
775 intrinsic_code intrinsic = DECL_INTRINSIC_CODE (callee);
776 built_in_function code;
777
778 switch (intrinsic)
779 {
780 case INTRINSIC_NONE:
781 return callexp;
782
783 case INTRINSIC_BSF:
784 case INTRINSIC_BSF64:
785 return expand_intrinsic_bsf (callexp);
786
787 case INTRINSIC_BSR:
788 case INTRINSIC_BSR64:
789 return expand_intrinsic_bsr (callexp);
790
791 case INTRINSIC_BT:
792 case INTRINSIC_BT64:
793 case INTRINSIC_BTC:
794 case INTRINSIC_BTC64:
795 case INTRINSIC_BTR:
796 case INTRINSIC_BTR64:
797 case INTRINSIC_BTS:
798 case INTRINSIC_BTS64:
799 return expand_intrinsic_bt (intrinsic, callexp);
800
801 case INTRINSIC_POPCNT32:
802 case INTRINSIC_POPCNT64:
803 return expand_intrinsic_popcnt (callexp);
804
805 case INTRINSIC_ROL:
806 case INTRINSIC_ROL_TIARG:
807 case INTRINSIC_ROR:
808 case INTRINSIC_ROR_TIARG:
809 return expand_intrinsic_rotate (intrinsic, callexp);
810
811 case INTRINSIC_BSWAP32:
812 case INTRINSIC_BSWAP64:
813 case INTRINSIC_CEIL:
814 case INTRINSIC_CEILF:
815 case INTRINSIC_CEILL:
816 case INTRINSIC_COSL:
817 case INTRINSIC_EXP:
818 case INTRINSIC_EXP2:
819 case INTRINSIC_EXPM1:
820 case INTRINSIC_FABSL:
821 case INTRINSIC_FLOOR:
822 case INTRINSIC_FLOORF:
823 case INTRINSIC_FLOORL:
824 case INTRINSIC_ISFINITE:
825 case INTRINSIC_ISINFINITY:
826 case INTRINSIC_ISNAN:
827 case INTRINSIC_LOG:
828 case INTRINSIC_LOG10:
829 case INTRINSIC_LOG2:
830 case INTRINSIC_RINTL:
831 case INTRINSIC_RNDTOLL:
832 case INTRINSIC_ROUND:
833 case INTRINSIC_SINL:
834 case INTRINSIC_SQRT:
835 case INTRINSIC_SQRTF:
836 case INTRINSIC_SQRTL:
837 case INTRINSIC_TAN:
838 case INTRINSIC_TRUNC:
839 code = intrinsic_decls[intrinsic].built_in;
840 gcc_assert (code != BUILT_IN_NONE);
841 return call_builtin_fn (callexp, code, 1,
842 CALL_EXPR_ARG (callexp, 0));
843
844 case INTRINSIC_FMAX:
845 case INTRINSIC_FMIN:
846 case INTRINSIC_LDEXPL:
847 code = intrinsic_decls[intrinsic].built_in;
848 gcc_assert (code != BUILT_IN_NONE);
849 return call_builtin_fn (callexp, code, 2,
850 CALL_EXPR_ARG (callexp, 0),
851 CALL_EXPR_ARG (callexp, 1));
852
853 case INTRINSIC_FMA:
854 code = intrinsic_decls[intrinsic].built_in;
855 gcc_assert (code != BUILT_IN_NONE);
856 return call_builtin_fn (callexp, code, 3,
857 CALL_EXPR_ARG (callexp, 0),
858 CALL_EXPR_ARG (callexp, 1),
859 CALL_EXPR_ARG (callexp, 2));
860
861 case INTRINSIC_COPYSIGN:
862 case INTRINSIC_COPYSIGNI:
863 return expand_intrinsic_copysign (callexp);
864
865 case INTRINSIC_POW:
866 return expand_intrinsic_pow (callexp);
867
868 case INTRINSIC_TOPREC:
869 case INTRINSIC_TOPRECF:
870 case INTRINSIC_TOPRECL:
871 return expand_intrinsic_toprec (callexp);
872
873 case INTRINSIC_VA_ARG:
874 case INTRINSIC_C_VA_ARG:
875 return expand_intrinsic_vaarg (callexp);
876
877 case INTRINSIC_VASTART:
878 return expand_intrinsic_vastart (callexp);
879
880 case INTRINSIC_ADDS:
881 case INTRINSIC_ADDSL:
882 case INTRINSIC_ADDU:
883 case INTRINSIC_ADDUL:
884 case INTRINSIC_SUBS:
885 case INTRINSIC_SUBSL:
886 case INTRINSIC_SUBU:
887 case INTRINSIC_SUBUL:
888 case INTRINSIC_MULS:
889 case INTRINSIC_MULSL:
890 case INTRINSIC_MULU:
891 case INTRINSIC_MULUI:
892 case INTRINSIC_MULUL:
893 case INTRINSIC_NEGS:
894 case INTRINSIC_NEGSL:
895 return expand_intrinsic_checkedint (intrinsic, callexp);
896
897 case INTRINSIC_VLOAD8:
898 case INTRINSIC_VLOAD16:
899 case INTRINSIC_VLOAD32:
900 case INTRINSIC_VLOAD64:
901 return expand_volatile_load (callexp);
902
903 case INTRINSIC_VSTORE8:
904 case INTRINSIC_VSTORE16:
905 case INTRINSIC_VSTORE32:
906 case INTRINSIC_VSTORE64:
907 return expand_volatile_store (callexp);
908
909 default:
910 gcc_unreachable ();
911 }
912 }