d: Fix regression, all 32-bit execution tests FAIL: internal error printing module...
[gcc.git] / gcc / d / intrinsics.cc
1 /* intrinsics.cc -- D language compiler intrinsics.
2 Copyright (C) 2006-2020 Free Software Foundation, Inc.
3
4 GCC is free software; you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation; either version 3, or (at your option)
7 any later version.
8
9 GCC is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
13
14 You should have received a copy of the GNU General Public License
15 along with GCC; see the file COPYING3. If not see
16 <http://www.gnu.org/licenses/>. */
17
18 #include "config.h"
19 #include "system.h"
20 #include "coretypes.h"
21
22 #include "dmd/declaration.h"
23 #include "dmd/identifier.h"
24 #include "dmd/mangle.h"
25 #include "dmd/mangle.h"
26 #include "dmd/module.h"
27 #include "dmd/template.h"
28
29 #include "tm.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
34 #include "builtins.h"
35
36 #include "d-tree.h"
37
38
39 /* An internal struct used to hold information on D intrinsics. */
40
41 struct intrinsic_decl
42 {
43 /* The DECL_INTRINSIC_CODE of this decl. */
44 intrinsic_code code;
45
46 /* The DECL_FUNCTION_CODE of this decl, if it directly maps to any. */
47 built_in_function built_in;
48
49 /* The name of the intrinsic. */
50 const char *name;
51
52 /* The module where the intrinsic is located. */
53 const char *module;
54
55 /* The mangled signature decoration of the intrinsic. */
56 const char *deco;
57
58 /* True if the intrinsic is only handled in CTFE. */
59 bool ctfeonly;
60 };
61
62 static const intrinsic_decl intrinsic_decls[] =
63 {
64 #define DEF_D_INTRINSIC(CODE, BUILTIN, NAME, MODULE, DECO, CTFE) \
65 { INTRINSIC_ ## CODE, BUILT_IN_ ## BUILTIN, NAME, MODULE, DECO, CTFE },
66
67 #include "intrinsics.def"
68
69 #undef DEF_D_INTRINSIC
70 };
71
72 /* Checks if DECL is an intrinsic or run time library function that requires
73 special processing. Sets DECL_INTRINSIC_CODE so it can be identified
74 later in maybe_expand_intrinsic. */
75
76 void
77 maybe_set_intrinsic (FuncDeclaration *decl)
78 {
79 if (!decl->ident || decl->builtin != BUILTINunknown)
80 return;
81
82 /* The builtin flag is updated only if we can evaluate the intrinsic
83 at compile-time. Such as the math or bitop intrinsics. */
84 decl->builtin = BUILTINno;
85
86 /* Check if it's a compiler intrinsic. We only require that any
87 internally recognised intrinsics are declared in a module with
88 an explicit module declaration. */
89 Module *m = decl->getModule ();
90
91 if (!m || !m->md)
92 return;
93
94 TemplateInstance *ti = decl->isInstantiated ();
95 TemplateDeclaration *td = ti ? ti->tempdecl->isTemplateDeclaration () : NULL;
96
97 const char *tname = decl->ident->toChars ();
98 const char *tmodule = m->md->toChars ();
99 const char *tdeco = (td == NULL) ? decl->type->deco : NULL;
100
101 /* Look through all D intrinsics. */
102 for (size_t i = 0; i < (int) INTRINSIC_LAST; i++)
103 {
104 if (!intrinsic_decls[i].name)
105 continue;
106
107 if (strcmp (intrinsic_decls[i].name, tname) != 0
108 || strcmp (intrinsic_decls[i].module, tmodule) != 0)
109 continue;
110
111 /* Instantiated functions would have the wrong type deco, get it from the
112 template member instead. */
113 if (tdeco == NULL)
114 {
115 if (!td || !td->onemember)
116 return;
117
118 FuncDeclaration *fd = td->onemember->isFuncDeclaration ();
119 if (fd == NULL)
120 return;
121
122 OutBuffer buf;
123 mangleToBuffer (fd->type, &buf);
124 tdeco = buf.extractChars ();
125 }
126
127 /* Matching the type deco may be a bit too strict, as it means that all
128 function attributes that end up in the signature must be kept aligned
129 between the compiler and library declaration. */
130 if (strcmp (intrinsic_decls[i].deco, tdeco) == 0)
131 {
132 intrinsic_code code = intrinsic_decls[i].code;
133
134 if (decl->csym == NULL)
135 get_symbol_decl (decl);
136
137 /* If there is no function body, then the implementation is always
138 provided by the compiler. */
139 if (!decl->fbody)
140 set_decl_built_in_function (decl->csym, BUILT_IN_FRONTEND, code);
141
142 /* Infer whether the intrinsic can be used for CTFE, let the
143 front-end know that it can be evaluated at compile-time. */
144 switch (code)
145 {
146 case INTRINSIC_VA_ARG:
147 case INTRINSIC_C_VA_ARG:
148 case INTRINSIC_VASTART:
149 case INTRINSIC_ADDS:
150 case INTRINSIC_ADDSL:
151 case INTRINSIC_ADDU:
152 case INTRINSIC_ADDUL:
153 case INTRINSIC_SUBS:
154 case INTRINSIC_SUBSL:
155 case INTRINSIC_SUBU:
156 case INTRINSIC_SUBUL:
157 case INTRINSIC_MULS:
158 case INTRINSIC_MULSL:
159 case INTRINSIC_MULU:
160 case INTRINSIC_MULUI:
161 case INTRINSIC_MULUL:
162 case INTRINSIC_NEGS:
163 case INTRINSIC_NEGSL:
164 case INTRINSIC_VLOAD8:
165 case INTRINSIC_VLOAD16:
166 case INTRINSIC_VLOAD32:
167 case INTRINSIC_VLOAD64:
168 case INTRINSIC_VSTORE8:
169 case INTRINSIC_VSTORE16:
170 case INTRINSIC_VSTORE32:
171 case INTRINSIC_VSTORE64:
172 break;
173
174 case INTRINSIC_POW:
175 {
176 /* Check that this overload of pow() is has an equivalent
177 built-in function. It could be `int pow(int, int)'. */
178 tree rettype = TREE_TYPE (TREE_TYPE (decl->csym));
179 if (mathfn_built_in (rettype, BUILT_IN_POW) != NULL_TREE)
180 decl->builtin = BUILTINyes;
181 break;
182 }
183
184 default:
185 decl->builtin = BUILTINyes;
186 break;
187 }
188
189 /* The intrinsic was marked as CTFE-only. */
190 if (intrinsic_decls[i].ctfeonly)
191 DECL_BUILT_IN_CTFE (decl->csym) = 1;
192
193 DECL_INTRINSIC_CODE (decl->csym) = code;
194 break;
195 }
196 }
197 }
198
199 /* Construct a function call to the built-in function CODE, N is the number of
200 arguments, and the `...' parameters are the argument expressions.
201 The original call expression is held in CALLEXP. */
202
203 static tree
204 call_builtin_fn (tree callexp, built_in_function code, int n, ...)
205 {
206 tree *argarray = XALLOCAVEC (tree, n);
207 va_list ap;
208
209 va_start (ap, n);
210 for (int i = 0; i < n; i++)
211 argarray[i] = va_arg (ap, tree);
212 va_end (ap);
213
214 tree exp = build_call_expr_loc_array (EXPR_LOCATION (callexp),
215 builtin_decl_explicit (code),
216 n, argarray);
217 return convert (TREE_TYPE (callexp), fold (exp));
218 }
219
220 /* Expand a front-end instrinsic call to bsf(). This takes one argument,
221 the signature to which can be either:
222
223 int bsf (uint arg);
224 int bsf (ulong arg);
225
226 This scans all bits in the given argument starting with the first,
227 returning the bit number of the first bit set. The original call
228 expression is held in CALLEXP. */
229
230 static tree
231 expand_intrinsic_bsf (tree callexp)
232 {
233 /* The bsr() intrinsic gets turned into __builtin_ctz(arg).
234 The return value is supposed to be undefined if arg is zero. */
235 tree arg = CALL_EXPR_ARG (callexp, 0);
236 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
237
238 /* Which variant of __builtin_ctz* should we call? */
239 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CTZ
240 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CTZL
241 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CTZLL
242 : END_BUILTINS;
243
244 gcc_assert (code != END_BUILTINS);
245
246 return call_builtin_fn (callexp, code, 1, arg);
247 }
248
249 /* Expand a front-end instrinsic call to bsr(). This takes one argument,
250 the signature to which can be either:
251
252 int bsr (uint arg);
253 int bsr (ulong arg);
254
255 This scans all bits in the given argument from the most significant bit
256 to the least significant, returning the bit number of the first bit set.
257 The original call expression is held in CALLEXP. */
258
259 static tree
260 expand_intrinsic_bsr (tree callexp)
261 {
262 /* The bsr() intrinsic gets turned into (size - 1) - __builtin_clz(arg).
263 The return value is supposed to be undefined if arg is zero. */
264 tree arg = CALL_EXPR_ARG (callexp, 0);
265 tree type = TREE_TYPE (arg);
266 int argsize = TYPE_PRECISION (type);
267
268 /* Which variant of __builtin_clz* should we call? */
269 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CLZ
270 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CLZL
271 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CLZLL
272 : END_BUILTINS;
273
274 gcc_assert (code != END_BUILTINS);
275
276 tree result = call_builtin_fn (callexp, code, 1, arg);
277
278 /* Handle int -> long conversions. */
279 if (TREE_TYPE (result) != type)
280 result = fold_convert (type, result);
281
282 result = fold_build2 (MINUS_EXPR, type,
283 build_integer_cst (argsize - 1, type), result);
284 return fold_convert (TREE_TYPE (callexp), result);
285 }
286
287 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
288 bt(), btc(), btr(), or bts(). These intrinsics expect to take two arguments,
289 the signature to which is:
290
291 int bt (size_t* ptr, size_t bitnum);
292
293 All intrinsics test if a bit is set and return the result of that condition.
294 Variants of `bt' will then update that bit. `btc' compliments the bit, `bts'
295 sets the bit, and `btr' resets the bit. The original call expression is
296 held in CALLEXP. */
297
298 static tree
299 expand_intrinsic_bt (intrinsic_code intrinsic, tree callexp)
300 {
301 tree ptr = CALL_EXPR_ARG (callexp, 0);
302 tree bitnum = CALL_EXPR_ARG (callexp, 1);
303 tree type = TREE_TYPE (TREE_TYPE (ptr));
304
305 /* size_t bitsize = sizeof(*ptr) * BITS_PER_UNIT; */
306 tree bitsize = fold_convert (type, TYPE_SIZE (type));
307
308 /* ptr[bitnum / bitsize] */
309 ptr = build_array_index (ptr, fold_build2 (TRUNC_DIV_EXPR, type,
310 bitnum, bitsize));
311 ptr = indirect_ref (type, ptr);
312
313 /* mask = 1 << (bitnum % bitsize); */
314 bitnum = fold_build2 (TRUNC_MOD_EXPR, type, bitnum, bitsize);
315 bitnum = fold_build2 (LSHIFT_EXPR, type, size_one_node, bitnum);
316
317 /* cond = ptr[bitnum / size] & mask; */
318 tree cond = fold_build2 (BIT_AND_EXPR, type, ptr, bitnum);
319
320 /* cond ? -1 : 0; */
321 cond = build_condition (TREE_TYPE (callexp), d_truthvalue_conversion (cond),
322 integer_minus_one_node, integer_zero_node);
323
324 /* Update the bit as needed, only testing the bit for bt(). */
325 tree_code code;
326
327 switch (intrinsic)
328 {
329 case INTRINSIC_BT:
330 case INTRINSIC_BT64:
331 return cond;
332
333 case INTRINSIC_BTC:
334 case INTRINSIC_BTC64:
335 code = BIT_XOR_EXPR;
336 break;
337
338 case INTRINSIC_BTR:
339 case INTRINSIC_BTR64:
340 bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum);
341 code = BIT_AND_EXPR;
342 break;
343
344 case INTRINSIC_BTS:
345 case INTRINSIC_BTS64:
346 code = BIT_IOR_EXPR;
347 break;
348
349 default:
350 gcc_unreachable ();
351 }
352
353 /* ptr[bitnum / size] op= mask; */
354 ptr = modify_expr (ptr, fold_build2 (code, TREE_TYPE (ptr), ptr, bitnum));
355
356 /* Store the condition result in a temporary, and return expressions in
357 correct order of evaluation. */
358 tree tmp = build_local_temp (TREE_TYPE (callexp));
359 cond = modify_expr (tmp, cond);
360
361 return compound_expr (cond, compound_expr (ptr, tmp));
362 }
363
364 /* Expand a front-end intrinsic call to popcnt(). This takes one argument, the
365 signature to which can be either:
366
367 int popcnt (uint arg);
368 int popcnt (ulong arg);
369
370 Calculates the number of set bits in an integer. The original call
371 expression is held in CALLEXP. */
372
373 static tree
374 expand_intrinsic_popcnt (tree callexp)
375 {
376 tree arg = CALL_EXPR_ARG (callexp, 0);
377 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
378
379 /* Which variant of __builtin_popcount* should we call? */
380 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_POPCOUNT
381 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTL
382 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTLL
383 : END_BUILTINS;
384
385 gcc_assert (code != END_BUILTINS);
386
387 return call_builtin_fn (callexp, code, 1, arg);
388 }
389
390 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
391 rol() or ror(). These intrinsics expect to take one or two arguments,
392 the signature to which can be either:
393
394 T rol(T) (const T value, const uint count);
395 T rol(uint count, T) (const T value);
396 T ror(T) (const T value, const uint count);
397 T ror(uint count, T) (const T value);
398
399 This bitwise rotates VALUE left or right by COUNT bit positions. */
400
401 static tree
402 expand_intrinsic_rotate (intrinsic_code intrinsic, tree callexp)
403 {
404 tree type = TREE_TYPE (callexp);
405 tree value = CALL_EXPR_ARG (callexp, 0);
406 tree count;
407 tree_code code;
408
409 /* Get the equivalent tree code for the intrinsic. */
410 if (intrinsic == INTRINSIC_ROL || intrinsic == INTRINSIC_ROL_TIARG)
411 code = LROTATE_EXPR;
412 else if (intrinsic == INTRINSIC_ROR || intrinsic == INTRINSIC_ROR_TIARG)
413 code = RROTATE_EXPR;
414 else
415 gcc_unreachable ();
416
417 /* Get the COUNT parameter. Either from the call expression arguments or the
418 template instantiation arguments. */
419 if (intrinsic == INTRINSIC_ROL || intrinsic == INTRINSIC_ROR)
420 count = CALL_EXPR_ARG (callexp, 1);
421 else
422 {
423 tree callee = CALL_EXPR_FN (callexp);
424
425 if (TREE_CODE (callee) == ADDR_EXPR)
426 callee = TREE_OPERAND (callee, 0);
427
428 /* Retrieve from the encoded template instantation. */
429 TemplateInstance *ti = DECL_LANG_FRONTEND (callee)->isInstantiated ();
430 gcc_assert (ti && ti->tiargs && ti->tiargs->length == 2);
431
432 Expression *e = isExpression ((*ti->tiargs)[0]);
433 gcc_assert (e && e->op == TOKint64);
434 count = build_expr (e, true);
435 }
436
437 return fold_build2 (code, type, value, count);
438 }
439
440 /* Expand a front-end intrinsic call to copysign(). This takes two arguments,
441 the signature to which can be either:
442
443 float copysign (T to, float from);
444 double copysign (T to, double from);
445 real copysign (T to, real from);
446
447 This computes a value composed of TO with the sign bit of FROM. The original
448 call expression is held in CALLEXP. */
449
450 static tree
451 expand_intrinsic_copysign (tree callexp)
452 {
453 tree to = CALL_EXPR_ARG (callexp, 0);
454 tree from = CALL_EXPR_ARG (callexp, 1);
455 tree type = TREE_TYPE (to);
456
457 /* Convert parameters to the same type. Prefer the first parameter unless it
458 is an integral type. */
459 if (INTEGRAL_TYPE_P (type))
460 {
461 to = fold_convert (TREE_TYPE (from), to);
462 type = TREE_TYPE (to);
463 }
464 else
465 from = fold_convert (type, from);
466
467 /* Which variant of __builtin_copysign* should we call? */
468 tree builtin = mathfn_built_in (type, BUILT_IN_COPYSIGN);
469 gcc_assert (builtin != NULL_TREE);
470
471 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
472 to, from);
473 }
474
475 /* Expand a front-end intrinsic call to pow(). This takes two arguments, the
476 signature to which can be either:
477
478 float pow (float base, T exponent);
479 double pow (double base, T exponent);
480 real pow (real base, T exponent);
481
482 This computes the value of BASE raised to the power of EXPONENT.
483 The original call expression is held in CALLEXP. */
484
485 static tree
486 expand_intrinsic_pow (tree callexp)
487 {
488 tree base = CALL_EXPR_ARG (callexp, 0);
489 tree exponent = CALL_EXPR_ARG (callexp, 1);
490 tree exptype = TREE_TYPE (exponent);
491
492 /* Which variant of __builtin_pow* should we call? */
493 built_in_function code = SCALAR_FLOAT_TYPE_P (exptype) ? BUILT_IN_POW
494 : INTEGRAL_TYPE_P (exptype) ? BUILT_IN_POWI
495 : END_BUILTINS;
496 gcc_assert (code != END_BUILTINS);
497
498 tree builtin = mathfn_built_in (TREE_TYPE (base), code);
499 gcc_assert (builtin != NULL_TREE);
500
501 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
502 base, exponent);
503 }
504
505 /* Expand a front-end intrinsic call to toPrec(). This takes one argument, the
506 signature to which can be either:
507
508 T toPrec(T)(float f);
509 T toPrec(T)(double f);
510 T toPrec(T)(real f);
511
512 This rounds the argument F to the precision of the specified floating
513 point type T. The original call expression is held in CALLEXP. */
514
515 static tree
516 expand_intrinsic_toprec (tree callexp)
517 {
518 tree f = CALL_EXPR_ARG (callexp, 0);
519 tree type = TREE_TYPE (callexp);
520
521 return convert (type, f);
522 }
523
524 /* Expand a front-end intrinsic call to va_arg(). This takes either one or two
525 arguments, the signature to which can be either:
526
527 T va_arg(T) (ref va_list ap);
528 void va_arg(T) (va_list ap, ref T parmn);
529
530 This retrieves the next variadic parameter that is type T from the given
531 va_list. If also given, store the value into parmn, otherwise return it.
532 The original call expression is held in CALLEXP. */
533
534 static tree
535 expand_intrinsic_vaarg (tree callexp)
536 {
537 tree ap = CALL_EXPR_ARG (callexp, 0);
538 tree parmn = NULL_TREE;
539 tree type;
540
541 STRIP_NOPS (ap);
542
543 if (call_expr_nargs (callexp) == 1)
544 type = TREE_TYPE (callexp);
545 else
546 {
547 parmn = CALL_EXPR_ARG (callexp, 1);
548 STRIP_NOPS (parmn);
549
550 /* The `ref' argument to va_arg is either an address or reference,
551 get the value of it. */
552 if (TREE_CODE (parmn) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (parmn)))
553 parmn = build_deref (parmn);
554 else
555 {
556 gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
557 parmn = TREE_OPERAND (parmn, 0);
558 }
559
560 type = TREE_TYPE (parmn);
561 }
562
563 /* (T) VA_ARG_EXP<ap>; */
564 tree exp = build1 (VA_ARG_EXPR, type, ap);
565
566 /* parmn = (T) VA_ARG_EXP<ap>; */
567 if (parmn != NULL_TREE)
568 exp = modify_expr (parmn, exp);
569
570 return exp;
571 }
572
573 /* Expand a front-end intrinsic call to va_start(), which takes two arguments,
574 the signature to which is:
575
576 void va_start(T) (out va_list ap, ref T parmn);
577
578 This initializes the va_list type, where parmn should be the last named
579 parameter. The original call expression is held in CALLEXP. */
580
581 static tree
582 expand_intrinsic_vastart (tree callexp)
583 {
584 tree ap = CALL_EXPR_ARG (callexp, 0);
585 tree parmn = CALL_EXPR_ARG (callexp, 1);
586
587 STRIP_NOPS (ap);
588 STRIP_NOPS (parmn);
589
590 /* The va_list argument should already have its address taken. The second
591 argument, however, is inout and that needs to be fixed to prevent a
592 warning. Could be casting, so need to check type too? */
593 gcc_assert (TREE_CODE (ap) == ADDR_EXPR
594 || (TREE_CODE (ap) == PARM_DECL
595 && POINTER_TYPE_P (TREE_TYPE (ap))));
596
597 /* Assuming nobody tries to change the return type. */
598 if (TREE_CODE (parmn) != PARM_DECL)
599 {
600 gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
601 parmn = TREE_OPERAND (parmn, 0);
602 }
603
604 return call_builtin_fn (callexp, BUILT_IN_VA_START, 2, ap, parmn);
605 }
606
607 /* Expand a front-end instrinsic call to INTRINSIC, which is either a call to
608 adds(), addu(), subs(), subu(), negs(), muls(), or mulu(). These intrinsics
609 expect to take two or three arguments, the signature to which can be either:
610
611 int adds (int x, int y, ref bool overflow);
612 long adds (long x, long y, ref bool overflow);
613 int negs (int x, ref bool overflow);
614 long negs (long x, ref bool overflow);
615
616 This performs an operation on two signed or unsigned integers, checking for
617 overflow. The overflow is sticky, meaning that a sequence of operations
618 can be done and overflow need only be checked at the end. The original call
619 expression is held in CALLEXP. */
620
621 static tree
622 expand_intrinsic_checkedint (intrinsic_code intrinsic, tree callexp)
623 {
624 tree type = TREE_TYPE (callexp);
625 tree x;
626 tree y;
627 tree overflow;
628 internal_fn icode;
629
630 /* Which variant of *_OVERFLOW should we generate? */
631 switch (intrinsic)
632 {
633 case INTRINSIC_ADDS:
634 case INTRINSIC_ADDSL:
635 case INTRINSIC_ADDU:
636 case INTRINSIC_ADDUL:
637 x = CALL_EXPR_ARG (callexp, 0);
638 y = CALL_EXPR_ARG (callexp, 1);
639 overflow = CALL_EXPR_ARG (callexp, 2);
640 icode = IFN_ADD_OVERFLOW;
641 break;
642
643 case INTRINSIC_SUBS:
644 case INTRINSIC_SUBSL:
645 case INTRINSIC_SUBU:
646 case INTRINSIC_SUBUL:
647 x = CALL_EXPR_ARG (callexp, 0);
648 y = CALL_EXPR_ARG (callexp, 1);
649 overflow = CALL_EXPR_ARG (callexp, 2);
650 icode = IFN_SUB_OVERFLOW;
651 break;
652
653 case INTRINSIC_MULS:
654 case INTRINSIC_MULSL:
655 case INTRINSIC_MULU:
656 case INTRINSIC_MULUI:
657 case INTRINSIC_MULUL:
658 x = CALL_EXPR_ARG (callexp, 0);
659 y = CALL_EXPR_ARG (callexp, 1);
660 overflow = CALL_EXPR_ARG (callexp, 2);
661 icode = IFN_MUL_OVERFLOW;
662 break;
663
664 case INTRINSIC_NEGS:
665 case INTRINSIC_NEGSL:
666 /* The negs() intrinsic gets turned into SUB_OVERFLOW (0, y). */
667 x = fold_convert (type, integer_zero_node);
668 y = CALL_EXPR_ARG (callexp, 0);
669 overflow = CALL_EXPR_ARG (callexp, 1);
670 icode = IFN_SUB_OVERFLOW;
671 break;
672
673 default:
674 gcc_unreachable ();
675 }
676
677 tree result
678 = build_call_expr_internal_loc (EXPR_LOCATION (callexp), icode,
679 build_complex_type (type), 2, x, y);
680
681 STRIP_NOPS (overflow);
682 overflow = build_deref (overflow);
683
684 /* Assign returned result to overflow parameter, however if overflow is
685 already true, maintain its value. */
686 type = TREE_TYPE (overflow);
687 result = save_expr (result);
688
689 tree exp = fold_build2 (BIT_IOR_EXPR, type, overflow,
690 fold_convert (type, imaginary_part (result)));
691 exp = modify_expr (overflow, exp);
692
693 /* Return the value of result. */
694 return compound_expr (exp, real_part (result));
695 }
696
697 /* Expand a front-end instrinsic call to volatileLoad(). This takes one
698 argument, the signature to which can be either:
699
700 ubyte volatileLoad (ubyte* ptr);
701 ushort volatileLoad (ushort* ptr);
702 uint volatileLoad (uint* ptr);
703 ulong volatileLoad (ulong* ptr);
704
705 This reads a value from the memory location indicated by ptr. Calls to
706 them are be guaranteed to not be removed (such as during DCE) or reordered
707 in the same thread. The original call expression is held in CALLEXP. */
708
709 static tree
710 expand_volatile_load (tree callexp)
711 {
712 tree ptr = CALL_EXPR_ARG (callexp, 0);
713 tree ptrtype = TREE_TYPE (ptr);
714 gcc_assert (POINTER_TYPE_P (ptrtype));
715
716 /* (T) *(volatile T *) ptr; */
717 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
718 tree result = indirect_ref (type, ptr);
719 TREE_THIS_VOLATILE (result) = 1;
720
721 return result;
722 }
723
724 /* Expand a front-end instrinsic call to volatileStore(). This takes two
725 arguments, the signature to which can be either:
726
727 void volatileStore (ubyte* ptr, ubyte value);
728 void volatileStore (ushort* ptr, ushort value);
729 void volatileStore (uint* ptr, uint value);
730 void volatileStore (ulong* ptr, ulong value);
731
732 This writes a value to the memory location indicated by ptr. Calls to
733 them are be guaranteed to not be removed (such as during DCE) or reordered
734 in the same thread. The original call expression is held in CALLEXP. */
735
736 static tree
737 expand_volatile_store (tree callexp)
738 {
739 tree ptr = CALL_EXPR_ARG (callexp, 0);
740 tree ptrtype = TREE_TYPE (ptr);
741 gcc_assert (POINTER_TYPE_P (ptrtype));
742
743 /* (T) *(volatile T *) ptr; */
744 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
745 tree result = indirect_ref (type, ptr);
746 TREE_THIS_VOLATILE (result) = 1;
747
748 /* (*(volatile T *) ptr) = value; */
749 tree value = CALL_EXPR_ARG (callexp, 1);
750 return modify_expr (result, value);
751 }
752
753 /* If CALLEXP is for an intrinsic , expand and return inlined compiler
754 generated instructions. Most map directly to GCC builtins, others
755 require a little extra work around them. */
756
757 tree
758 maybe_expand_intrinsic (tree callexp)
759 {
760 tree callee = CALL_EXPR_FN (callexp);
761
762 if (TREE_CODE (callee) == ADDR_EXPR)
763 callee = TREE_OPERAND (callee, 0);
764
765 if (TREE_CODE (callee) != FUNCTION_DECL)
766 return callexp;
767
768 /* Don't expand CTFE-only intrinsics outside of semantic processing. */
769 if (DECL_BUILT_IN_CTFE (callee) && !doing_semantic_analysis_p)
770 return callexp;
771
772 intrinsic_code intrinsic = DECL_INTRINSIC_CODE (callee);
773 built_in_function code;
774
775 switch (intrinsic)
776 {
777 case INTRINSIC_NONE:
778 return callexp;
779
780 case INTRINSIC_BSF:
781 case INTRINSIC_BSF64:
782 return expand_intrinsic_bsf (callexp);
783
784 case INTRINSIC_BSR:
785 case INTRINSIC_BSR64:
786 return expand_intrinsic_bsr (callexp);
787
788 case INTRINSIC_BT:
789 case INTRINSIC_BT64:
790 case INTRINSIC_BTC:
791 case INTRINSIC_BTC64:
792 case INTRINSIC_BTR:
793 case INTRINSIC_BTR64:
794 case INTRINSIC_BTS:
795 case INTRINSIC_BTS64:
796 return expand_intrinsic_bt (intrinsic, callexp);
797
798 case INTRINSIC_POPCNT32:
799 case INTRINSIC_POPCNT64:
800 return expand_intrinsic_popcnt (callexp);
801
802 case INTRINSIC_ROL:
803 case INTRINSIC_ROL_TIARG:
804 case INTRINSIC_ROR:
805 case INTRINSIC_ROR_TIARG:
806 return expand_intrinsic_rotate (intrinsic, callexp);
807
808 case INTRINSIC_BSWAP32:
809 case INTRINSIC_BSWAP64:
810 case INTRINSIC_CEIL:
811 case INTRINSIC_CEILF:
812 case INTRINSIC_CEILL:
813 case INTRINSIC_COSL:
814 case INTRINSIC_EXP:
815 case INTRINSIC_EXP2:
816 case INTRINSIC_EXPM1:
817 case INTRINSIC_FABSL:
818 case INTRINSIC_FLOOR:
819 case INTRINSIC_FLOORF:
820 case INTRINSIC_FLOORL:
821 case INTRINSIC_ISFINITE:
822 case INTRINSIC_ISINFINITY:
823 case INTRINSIC_ISNAN:
824 case INTRINSIC_LOG:
825 case INTRINSIC_LOG10:
826 case INTRINSIC_LOG2:
827 case INTRINSIC_RINTL:
828 case INTRINSIC_RNDTOLL:
829 case INTRINSIC_ROUND:
830 case INTRINSIC_SINL:
831 case INTRINSIC_SQRT:
832 case INTRINSIC_SQRTF:
833 case INTRINSIC_SQRTL:
834 case INTRINSIC_TAN:
835 case INTRINSIC_TRUNC:
836 code = intrinsic_decls[intrinsic].built_in;
837 gcc_assert (code != BUILT_IN_NONE);
838 return call_builtin_fn (callexp, code, 1,
839 CALL_EXPR_ARG (callexp, 0));
840
841 case INTRINSIC_FMAX:
842 case INTRINSIC_FMIN:
843 case INTRINSIC_LDEXPL:
844 code = intrinsic_decls[intrinsic].built_in;
845 gcc_assert (code != BUILT_IN_NONE);
846 return call_builtin_fn (callexp, code, 2,
847 CALL_EXPR_ARG (callexp, 0),
848 CALL_EXPR_ARG (callexp, 1));
849
850 case INTRINSIC_FMA:
851 code = intrinsic_decls[intrinsic].built_in;
852 gcc_assert (code != BUILT_IN_NONE);
853 return call_builtin_fn (callexp, code, 3,
854 CALL_EXPR_ARG (callexp, 0),
855 CALL_EXPR_ARG (callexp, 1),
856 CALL_EXPR_ARG (callexp, 2));
857
858 case INTRINSIC_COPYSIGN:
859 case INTRINSIC_COPYSIGNI:
860 return expand_intrinsic_copysign (callexp);
861
862 case INTRINSIC_POW:
863 return expand_intrinsic_pow (callexp);
864
865 case INTRINSIC_TOPREC:
866 case INTRINSIC_TOPRECF:
867 case INTRINSIC_TOPRECL:
868 return expand_intrinsic_toprec (callexp);
869
870 case INTRINSIC_VA_ARG:
871 case INTRINSIC_C_VA_ARG:
872 return expand_intrinsic_vaarg (callexp);
873
874 case INTRINSIC_VASTART:
875 return expand_intrinsic_vastart (callexp);
876
877 case INTRINSIC_ADDS:
878 case INTRINSIC_ADDSL:
879 case INTRINSIC_ADDU:
880 case INTRINSIC_ADDUL:
881 case INTRINSIC_SUBS:
882 case INTRINSIC_SUBSL:
883 case INTRINSIC_SUBU:
884 case INTRINSIC_SUBUL:
885 case INTRINSIC_MULS:
886 case INTRINSIC_MULSL:
887 case INTRINSIC_MULU:
888 case INTRINSIC_MULUI:
889 case INTRINSIC_MULUL:
890 case INTRINSIC_NEGS:
891 case INTRINSIC_NEGSL:
892 return expand_intrinsic_checkedint (intrinsic, callexp);
893
894 case INTRINSIC_VLOAD8:
895 case INTRINSIC_VLOAD16:
896 case INTRINSIC_VLOAD32:
897 case INTRINSIC_VLOAD64:
898 return expand_volatile_load (callexp);
899
900 case INTRINSIC_VSTORE8:
901 case INTRINSIC_VSTORE16:
902 case INTRINSIC_VSTORE32:
903 case INTRINSIC_VSTORE64:
904 return expand_volatile_store (callexp);
905
906 default:
907 gcc_unreachable ();
908 }
909 }