d: Refactor matching and lowering of intrinsic functions.
[gcc.git] / gcc / d / intrinsics.cc
1 /* intrinsics.cc -- D language compiler intrinsics.
2 Copyright (C) 2006-2020 Free Software Foundation, Inc.
3
4 GCC is free software; you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation; either version 3, or (at your option)
7 any later version.
8
9 GCC is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
13
14 You should have received a copy of the GNU General Public License
15 along with GCC; see the file COPYING3. If not see
16 <http://www.gnu.org/licenses/>. */
17
18 #include "config.h"
19 #include "system.h"
20 #include "coretypes.h"
21
22 #include "dmd/declaration.h"
23 #include "dmd/identifier.h"
24 #include "dmd/mangle.h"
25 #include "dmd/mangle.h"
26 #include "dmd/module.h"
27 #include "dmd/template.h"
28
29 #include "tm.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
34 #include "builtins.h"
35
36 #include "d-tree.h"
37
38
39 /* An internal struct used to hold information on D intrinsics. */
40
41 struct intrinsic_decl
42 {
43 /* The DECL_INTRINSIC_CODE of this decl. */
44 intrinsic_code code;
45
46 /* The DECL_FUNCTION_CODE of this decl, if it directly maps to any. */
47 built_in_function built_in;
48
49 /* The name of the intrinsic. */
50 const char *name;
51
52 /* The module where the intrinsic is located. */
53 const char *module;
54
55 /* The mangled signature decoration of the intrinsic. */
56 const char *deco;
57
58 /* True if the intrinsic is only handled in CTFE. */
59 bool ctfeonly;
60 };
61
62 static const intrinsic_decl intrinsic_decls[] =
63 {
64 #define DEF_D_INTRINSIC(CODE, BUILTIN, NAME, MODULE, DECO, CTFE) \
65 { INTRINSIC_ ## CODE, BUILT_IN_ ## BUILTIN, NAME, MODULE, DECO, CTFE },
66
67 #include "intrinsics.def"
68
69 #undef DEF_D_INTRINSIC
70 };
71
72 /* Checks if DECL is an intrinsic or run time library function that requires
73 special processing. Sets DECL_INTRINSIC_CODE so it can be identified
74 later in maybe_expand_intrinsic. */
75
76 void
77 maybe_set_intrinsic (FuncDeclaration *decl)
78 {
79 if (!decl->ident || decl->builtin != BUILTINunknown)
80 return;
81
82 /* The builtin flag is updated only if we can evaluate the intrinsic
83 at compile-time. Such as the math or bitop intrinsics. */
84 decl->builtin = BUILTINno;
85
86 /* Check if it's a compiler intrinsic. We only require that any
87 internally recognised intrinsics are declared in a module with
88 an explicit module declaration. */
89 Module *m = decl->getModule ();
90
91 if (!m || !m->md)
92 return;
93
94 TemplateInstance *ti = decl->isInstantiated ();
95 TemplateDeclaration *td = ti ? ti->tempdecl->isTemplateDeclaration () : NULL;
96
97 const char *tname = decl->ident->toChars ();
98 const char *tmodule = m->md->toChars ();
99 const char *tdeco = (td == NULL) ? decl->type->deco : NULL;
100
101 /* Look through all D intrinsics. */
102 for (size_t i = 0; i < (int) INTRINSIC_LAST; i++)
103 {
104 if (!intrinsic_decls[i].name)
105 continue;
106
107 if (strcmp (intrinsic_decls[i].name, tname) != 0
108 || strcmp (intrinsic_decls[i].module, tmodule) != 0)
109 continue;
110
111 /* Instantiated functions would have the wrong type deco, get it from the
112 template member instead. */
113 if (tdeco == NULL)
114 {
115 if (!td || !td->onemember)
116 return;
117
118 FuncDeclaration *fd = td->onemember->isFuncDeclaration ();
119 if (fd == NULL)
120 return;
121
122 OutBuffer buf;
123 mangleToBuffer (fd->type, &buf);
124 tdeco = buf.extractChars ();
125 }
126
127 /* Matching the type deco may be a bit too strict, as it means that all
128 function attributes that end up in the signature must be kept aligned
129 between the compiler and library declaration. */
130 if (strcmp (intrinsic_decls[i].deco, tdeco) == 0)
131 {
132 intrinsic_code code = intrinsic_decls[i].code;
133
134 if (decl->csym == NULL)
135 get_symbol_decl (decl);
136
137 /* If there is no function body, then the implementation is always
138 provided by the compiler. */
139 if (!decl->fbody)
140 set_decl_built_in_function (decl->csym, BUILT_IN_FRONTEND, code);
141
142 /* Infer whether the intrinsic can be used for CTFE, let the
143 front-end know that it can be evaluated at compile-time. */
144 switch (code)
145 {
146 case INTRINSIC_VA_ARG:
147 case INTRINSIC_C_VA_ARG:
148 case INTRINSIC_VASTART:
149 case INTRINSIC_ADDS:
150 case INTRINSIC_ADDSL:
151 case INTRINSIC_ADDU:
152 case INTRINSIC_ADDUL:
153 case INTRINSIC_SUBS:
154 case INTRINSIC_SUBSL:
155 case INTRINSIC_SUBU:
156 case INTRINSIC_SUBUL:
157 case INTRINSIC_MULS:
158 case INTRINSIC_MULSL:
159 case INTRINSIC_MULU:
160 case INTRINSIC_MULUI:
161 case INTRINSIC_MULUL:
162 case INTRINSIC_NEGS:
163 case INTRINSIC_NEGSL:
164 case INTRINSIC_VLOAD8:
165 case INTRINSIC_VLOAD16:
166 case INTRINSIC_VLOAD32:
167 case INTRINSIC_VLOAD64:
168 case INTRINSIC_VSTORE8:
169 case INTRINSIC_VSTORE16:
170 case INTRINSIC_VSTORE32:
171 case INTRINSIC_VSTORE64:
172 break;
173
174 case INTRINSIC_POW:
175 {
176 /* Check that this overload of pow() is has an equivalent
177 built-in function. It could be `int pow(int, int)'. */
178 tree rettype = TREE_TYPE (TREE_TYPE (decl->csym));
179 if (mathfn_built_in (rettype, BUILT_IN_POW) != NULL_TREE)
180 decl->builtin = BUILTINyes;
181 break;
182 }
183
184 default:
185 decl->builtin = BUILTINyes;
186 break;
187 }
188
189 /* The intrinsic was marked as CTFE-only. */
190 if (intrinsic_decls[i].ctfeonly)
191 DECL_BUILT_IN_CTFE (decl->csym) = 1;
192
193 DECL_INTRINSIC_CODE (decl->csym) = code;
194 break;
195 }
196 }
197 }
198
199 /* Construct a function call to the built-in function CODE, N is the number of
200 arguments, and the `...' parameters are the argument expressions.
201 The original call expression is held in CALLEXP. */
202
203 static tree
204 call_builtin_fn (tree callexp, built_in_function code, int n, ...)
205 {
206 tree *argarray = XALLOCAVEC (tree, n);
207 va_list ap;
208
209 va_start (ap, n);
210 for (int i = 0; i < n; i++)
211 argarray[i] = va_arg (ap, tree);
212 va_end (ap);
213
214 tree exp = build_call_expr_loc_array (EXPR_LOCATION (callexp),
215 builtin_decl_explicit (code),
216 n, argarray);
217 return convert (TREE_TYPE (callexp), fold (exp));
218 }
219
220 /* Expand a front-end instrinsic call to bsf(). This takes one argument,
221 the signature to which can be either:
222
223 int bsf (uint arg);
224 int bsf (ulong arg);
225
226 This scans all bits in the given argument starting with the first,
227 returning the bit number of the first bit set. The original call
228 expression is held in CALLEXP. */
229
230 static tree
231 expand_intrinsic_bsf (tree callexp)
232 {
233 /* The bsr() intrinsic gets turned into __builtin_ctz(arg).
234 The return value is supposed to be undefined if arg is zero. */
235 tree arg = CALL_EXPR_ARG (callexp, 0);
236 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
237
238 /* Which variant of __builtin_ctz* should we call? */
239 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CTZ
240 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CTZL
241 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CTZLL
242 : END_BUILTINS;
243
244 gcc_assert (code != END_BUILTINS);
245
246 return call_builtin_fn (callexp, code, 1, arg);
247 }
248
249 /* Expand a front-end instrinsic call to bsr(). This takes one argument,
250 the signature to which can be either:
251
252 int bsr (uint arg);
253 int bsr (ulong arg);
254
255 This scans all bits in the given argument from the most significant bit
256 to the least significant, returning the bit number of the first bit set.
257 The original call expression is held in CALLEXP. */
258
259 static tree
260 expand_intrinsic_bsr (tree callexp)
261 {
262 /* The bsr() intrinsic gets turned into (size - 1) - __builtin_clz(arg).
263 The return value is supposed to be undefined if arg is zero. */
264 tree arg = CALL_EXPR_ARG (callexp, 0);
265 tree type = TREE_TYPE (arg);
266 int argsize = TYPE_PRECISION (type);
267
268 /* Which variant of __builtin_clz* should we call? */
269 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CLZ
270 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CLZL
271 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CLZLL
272 : END_BUILTINS;
273
274 gcc_assert (code != END_BUILTINS);
275
276 tree result = call_builtin_fn (callexp, code, 1, arg);
277
278 /* Handle int -> long conversions. */
279 if (TREE_TYPE (result) != type)
280 result = fold_convert (type, result);
281
282 result = fold_build2 (MINUS_EXPR, type,
283 build_integer_cst (argsize - 1, type), result);
284 return fold_convert (TREE_TYPE (callexp), result);
285 }
286
287 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
288 bt(), btc(), btr(), or bts(). These intrinsics expect to take two arguments,
289 the signature to which is:
290
291 int bt (size_t* ptr, size_t bitnum);
292
293 All intrinsics test if a bit is set and return the result of that condition.
294 Variants of `bt' will then update that bit. `btc' compliments the bit, `bts'
295 sets the bit, and `btr' resets the bit. The original call expression is
296 held in CALLEXP. */
297
298 static tree
299 expand_intrinsic_bt (intrinsic_code intrinsic, tree callexp)
300 {
301 tree ptr = CALL_EXPR_ARG (callexp, 0);
302 tree bitnum = CALL_EXPR_ARG (callexp, 1);
303 tree type = TREE_TYPE (TREE_TYPE (ptr));
304
305 /* size_t bitsize = sizeof(*ptr) * BITS_PER_UNIT; */
306 tree bitsize = fold_convert (type, TYPE_SIZE (type));
307
308 /* ptr[bitnum / bitsize] */
309 ptr = build_array_index (ptr, fold_build2 (TRUNC_DIV_EXPR, type,
310 bitnum, bitsize));
311 ptr = indirect_ref (type, ptr);
312
313 /* mask = 1 << (bitnum % bitsize); */
314 bitnum = fold_build2 (TRUNC_MOD_EXPR, type, bitnum, bitsize);
315 bitnum = fold_build2 (LSHIFT_EXPR, type, size_one_node, bitnum);
316
317 /* cond = ptr[bitnum / size] & mask; */
318 tree cond = fold_build2 (BIT_AND_EXPR, type, ptr, bitnum);
319
320 /* cond ? -1 : 0; */
321 cond = build_condition (TREE_TYPE (callexp), d_truthvalue_conversion (cond),
322 integer_minus_one_node, integer_zero_node);
323
324 /* Update the bit as needed, only testing the bit for bt(). */
325 tree_code code;
326
327 switch (intrinsic)
328 {
329 case INTRINSIC_BT:
330 case INTRINSIC_BT64:
331 return cond;
332
333 case INTRINSIC_BTC:
334 case INTRINSIC_BTC64:
335 code = BIT_XOR_EXPR;
336 break;
337
338 case INTRINSIC_BTR:
339 case INTRINSIC_BTR64:
340 bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum);
341 code = BIT_AND_EXPR;
342 break;
343
344 case INTRINSIC_BTS:
345 case INTRINSIC_BTS64:
346 code = BIT_IOR_EXPR;
347 break;
348
349 default:
350 gcc_unreachable ();
351 }
352
353 /* ptr[bitnum / size] op= mask; */
354 if (intrinsic == INTRINSIC_BTR)
355 bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum);
356
357 ptr = modify_expr (ptr, fold_build2 (code, TREE_TYPE (ptr), ptr, bitnum));
358
359 /* Store the condition result in a temporary, and return expressions in
360 correct order of evaluation. */
361 tree tmp = build_local_temp (TREE_TYPE (callexp));
362 cond = modify_expr (tmp, cond);
363
364 return compound_expr (cond, compound_expr (ptr, tmp));
365 }
366
367 /* Expand a front-end intrinsic call to popcnt(). This takes one argument, the
368 signature to which can be either:
369
370 int popcnt (uint arg);
371 int popcnt (ulong arg);
372
373 Calculates the number of set bits in an integer. The original call
374 expression is held in CALLEXP. */
375
376 static tree
377 expand_intrinsic_popcnt (tree callexp)
378 {
379 tree arg = CALL_EXPR_ARG (callexp, 0);
380 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
381
382 /* Which variant of __builtin_popcount* should we call? */
383 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_POPCOUNT
384 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTL
385 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTLL
386 : END_BUILTINS;
387
388 gcc_assert (code != END_BUILTINS);
389
390 return call_builtin_fn (callexp, code, 1, arg);
391 }
392
393 /* Expand a front-end intrinsic call to copysign(). This takes two arguments,
394 the signature to which can be either:
395
396 float copysign (T to, float from);
397 double copysign (T to, double from);
398 real copysign (T to, real from);
399
400 This computes a value composed of TO with the sign bit of FROM. The original
401 call expression is held in CALLEXP. */
402
403 static tree
404 expand_intrinsic_copysign (tree callexp)
405 {
406 tree to = CALL_EXPR_ARG (callexp, 0);
407 tree from = CALL_EXPR_ARG (callexp, 1);
408 tree type = TREE_TYPE (to);
409
410 /* Convert parameters to the same type. Prefer the first parameter unless it
411 is an integral type. */
412 if (INTEGRAL_TYPE_P (type))
413 {
414 to = fold_convert (TREE_TYPE (from), to);
415 type = TREE_TYPE (to);
416 }
417 else
418 from = fold_convert (type, from);
419
420 /* Which variant of __builtin_copysign* should we call? */
421 tree builtin = mathfn_built_in (type, BUILT_IN_COPYSIGN);
422 gcc_assert (builtin != NULL_TREE);
423
424 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
425 to, from);
426 }
427
428 /* Expand a front-end intrinsic call to pow(). This takes two arguments, the
429 signature to which can be either:
430
431 float pow (float base, T exponent);
432 double pow (double base, T exponent);
433 real pow (real base, T exponent);
434
435 This computes the value of BASE raised to the power of EXPONENT.
436 The original call expression is held in CALLEXP. */
437
438 static tree
439 expand_intrinsic_pow (tree callexp)
440 {
441 tree base = CALL_EXPR_ARG (callexp, 0);
442 tree exponent = CALL_EXPR_ARG (callexp, 1);
443 tree exptype = TREE_TYPE (exponent);
444
445 /* Which variant of __builtin_pow* should we call? */
446 built_in_function code = SCALAR_FLOAT_TYPE_P (exptype) ? BUILT_IN_POW
447 : INTEGRAL_TYPE_P (exptype) ? BUILT_IN_POWI
448 : END_BUILTINS;
449 gcc_assert (code != END_BUILTINS);
450
451 tree builtin = mathfn_built_in (TREE_TYPE (base), code);
452 gcc_assert (builtin != NULL_TREE);
453
454 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
455 base, exponent);
456 }
457
458 /* Expand a front-end intrinsic call to toPrec(). This takes one argument, the
459 signature to which can be either:
460
461 T toPrec(T)(float f);
462 T toPrec(T)(double f);
463 T toPrec(T)(real f);
464
465 This rounds the argument F to the precision of the specified floating
466 point type T. The original call expression is held in CALLEXP. */
467
468 static tree
469 expand_intrinsic_toprec (tree callexp)
470 {
471 tree f = CALL_EXPR_ARG (callexp, 0);
472 tree type = TREE_TYPE (callexp);
473
474 return convert (type, f);
475 }
476
477 /* Expand a front-end intrinsic call to va_arg(). This takes either one or two
478 arguments, the signature to which can be either:
479
480 T va_arg(T) (ref va_list ap);
481 void va_arg(T) (va_list ap, ref T parmn);
482
483 This retrieves the next variadic parameter that is type T from the given
484 va_list. If also given, store the value into parmn, otherwise return it.
485 The original call expression is held in CALLEXP. */
486
487 static tree
488 expand_intrinsic_vaarg (tree callexp)
489 {
490 tree ap = CALL_EXPR_ARG (callexp, 0);
491 tree parmn = NULL_TREE;
492 tree type;
493
494 STRIP_NOPS (ap);
495
496 if (call_expr_nargs (callexp) == 1)
497 type = TREE_TYPE (callexp);
498 else
499 {
500 parmn = CALL_EXPR_ARG (callexp, 1);
501 STRIP_NOPS (parmn);
502 gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
503 parmn = TREE_OPERAND (parmn, 0);
504 type = TREE_TYPE (parmn);
505 }
506
507 /* (T) VA_ARG_EXP<ap>; */
508 tree exp = build1 (VA_ARG_EXPR, type, ap);
509
510 /* parmn = (T) VA_ARG_EXP<ap>; */
511 if (parmn != NULL_TREE)
512 exp = modify_expr (parmn, exp);
513
514 return exp;
515 }
516
517 /* Expand a front-end intrinsic call to va_start(), which takes two arguments,
518 the signature to which is:
519
520 void va_start(T) (out va_list ap, ref T parmn);
521
522 This initializes the va_list type, where parmn should be the last named
523 parameter. The original call expression is held in CALLEXP. */
524
525 static tree
526 expand_intrinsic_vastart (tree callexp)
527 {
528 tree ap = CALL_EXPR_ARG (callexp, 0);
529 tree parmn = CALL_EXPR_ARG (callexp, 1);
530
531 STRIP_NOPS (ap);
532 STRIP_NOPS (parmn);
533
534 /* The va_list argument should already have its address taken. The second
535 argument, however, is inout and that needs to be fixed to prevent a
536 warning. Could be casting, so need to check type too? */
537 gcc_assert (TREE_CODE (ap) == ADDR_EXPR && TREE_CODE (parmn) == ADDR_EXPR);
538
539 /* Assuming nobody tries to change the return type. */
540 parmn = TREE_OPERAND (parmn, 0);
541
542 return call_builtin_fn (callexp, BUILT_IN_VA_START, 2, ap, parmn);
543 }
544
545 /* Expand a front-end instrinsic call to INTRINSIC, which is either a call to
546 adds(), addu(), subs(), subu(), negs(), muls(), or mulu(). These intrinsics
547 expect to take two or three arguments, the signature to which can be either:
548
549 int adds (int x, int y, ref bool overflow);
550 long adds (long x, long y, ref bool overflow);
551 int negs (int x, ref bool overflow);
552 long negs (long x, ref bool overflow);
553
554 This performs an operation on two signed or unsigned integers, checking for
555 overflow. The overflow is sticky, meaning that a sequence of operations
556 can be done and overflow need only be checked at the end. The original call
557 expression is held in CALLEXP. */
558
559 static tree
560 expand_intrinsic_checkedint (intrinsic_code intrinsic, tree callexp)
561 {
562 tree type = TREE_TYPE (callexp);
563 tree x;
564 tree y;
565 tree overflow;
566 internal_fn icode;
567
568 /* Which variant of *_OVERFLOW should we generate? */
569 switch (intrinsic)
570 {
571 case INTRINSIC_ADDS:
572 case INTRINSIC_ADDSL:
573 case INTRINSIC_ADDU:
574 case INTRINSIC_ADDUL:
575 x = CALL_EXPR_ARG (callexp, 0);
576 y = CALL_EXPR_ARG (callexp, 1);
577 overflow = CALL_EXPR_ARG (callexp, 2);
578 icode = IFN_ADD_OVERFLOW;
579 break;
580
581 case INTRINSIC_SUBS:
582 case INTRINSIC_SUBSL:
583 case INTRINSIC_SUBU:
584 case INTRINSIC_SUBUL:
585 x = CALL_EXPR_ARG (callexp, 0);
586 y = CALL_EXPR_ARG (callexp, 1);
587 overflow = CALL_EXPR_ARG (callexp, 2);
588 icode = IFN_SUB_OVERFLOW;
589 break;
590
591 case INTRINSIC_MULS:
592 case INTRINSIC_MULSL:
593 case INTRINSIC_MULU:
594 case INTRINSIC_MULUI:
595 case INTRINSIC_MULUL:
596 x = CALL_EXPR_ARG (callexp, 0);
597 y = CALL_EXPR_ARG (callexp, 1);
598 overflow = CALL_EXPR_ARG (callexp, 2);
599 icode = IFN_MUL_OVERFLOW;
600 break;
601
602 case INTRINSIC_NEGS:
603 case INTRINSIC_NEGSL:
604 /* The negs() intrinsic gets turned into SUB_OVERFLOW (0, y). */
605 x = fold_convert (type, integer_zero_node);
606 y = CALL_EXPR_ARG (callexp, 0);
607 overflow = CALL_EXPR_ARG (callexp, 1);
608 icode = IFN_SUB_OVERFLOW;
609 break;
610
611 default:
612 gcc_unreachable ();
613 }
614
615 tree result
616 = build_call_expr_internal_loc (EXPR_LOCATION (callexp), icode,
617 build_complex_type (type), 2, x, y);
618
619 STRIP_NOPS (overflow);
620 overflow = build_deref (overflow);
621
622 /* Assign returned result to overflow parameter, however if overflow is
623 already true, maintain its value. */
624 type = TREE_TYPE (overflow);
625 result = save_expr (result);
626
627 tree exp = fold_build2 (BIT_IOR_EXPR, type, overflow,
628 fold_convert (type, imaginary_part (result)));
629 exp = modify_expr (overflow, exp);
630
631 /* Return the value of result. */
632 return compound_expr (exp, real_part (result));
633 }
634
635 /* Expand a front-end instrinsic call to volatileLoad(). This takes one
636 argument, the signature to which can be either:
637
638 ubyte volatileLoad (ubyte* ptr);
639 ushort volatileLoad (ushort* ptr);
640 uint volatileLoad (uint* ptr);
641 ulong volatileLoad (ulong* ptr);
642
643 This reads a value from the memory location indicated by ptr. Calls to
644 them are be guaranteed to not be removed (such as during DCE) or reordered
645 in the same thread. The original call expression is held in CALLEXP. */
646
647 static tree
648 expand_volatile_load (tree callexp)
649 {
650 tree ptr = CALL_EXPR_ARG (callexp, 0);
651 tree ptrtype = TREE_TYPE (ptr);
652 gcc_assert (POINTER_TYPE_P (ptrtype));
653
654 /* (T) *(volatile T *) ptr; */
655 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
656 tree result = indirect_ref (type, ptr);
657 TREE_THIS_VOLATILE (result) = 1;
658
659 return result;
660 }
661
662 /* Expand a front-end instrinsic call to volatileStore(). This takes two
663 arguments, the signature to which can be either:
664
665 void volatileStore (ubyte* ptr, ubyte value);
666 void volatileStore (ushort* ptr, ushort value);
667 void volatileStore (uint* ptr, uint value);
668 void volatileStore (ulong* ptr, ulong value);
669
670 This writes a value to the memory location indicated by ptr. Calls to
671 them are be guaranteed to not be removed (such as during DCE) or reordered
672 in the same thread. The original call expression is held in CALLEXP. */
673
674 static tree
675 expand_volatile_store (tree callexp)
676 {
677 tree ptr = CALL_EXPR_ARG (callexp, 0);
678 tree ptrtype = TREE_TYPE (ptr);
679 gcc_assert (POINTER_TYPE_P (ptrtype));
680
681 /* (T) *(volatile T *) ptr; */
682 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
683 tree result = indirect_ref (type, ptr);
684 TREE_THIS_VOLATILE (result) = 1;
685
686 /* (*(volatile T *) ptr) = value; */
687 tree value = CALL_EXPR_ARG (callexp, 1);
688 return modify_expr (result, value);
689 }
690
691 /* If CALLEXP is for an intrinsic , expand and return inlined compiler
692 generated instructions. Most map directly to GCC builtins, others
693 require a little extra work around them. */
694
695 tree
696 maybe_expand_intrinsic (tree callexp)
697 {
698 tree callee = CALL_EXPR_FN (callexp);
699
700 if (TREE_CODE (callee) == ADDR_EXPR)
701 callee = TREE_OPERAND (callee, 0);
702
703 if (TREE_CODE (callee) != FUNCTION_DECL)
704 return callexp;
705
706 /* Don't expand CTFE-only intrinsics outside of semantic processing. */
707 if (DECL_BUILT_IN_CTFE (callee) && !doing_semantic_analysis_p)
708 return callexp;
709
710 intrinsic_code intrinsic = DECL_INTRINSIC_CODE (callee);
711 built_in_function code;
712
713 switch (intrinsic)
714 {
715 case INTRINSIC_NONE:
716 return callexp;
717
718 case INTRINSIC_BSF:
719 case INTRINSIC_BSF64:
720 return expand_intrinsic_bsf (callexp);
721
722 case INTRINSIC_BSR:
723 case INTRINSIC_BSR64:
724 return expand_intrinsic_bsr (callexp);
725
726 case INTRINSIC_BT:
727 case INTRINSIC_BT64:
728 case INTRINSIC_BTC:
729 case INTRINSIC_BTC64:
730 case INTRINSIC_BTR:
731 case INTRINSIC_BTR64:
732 case INTRINSIC_BTS:
733 case INTRINSIC_BTS64:
734 return expand_intrinsic_bt (intrinsic, callexp);
735
736 case INTRINSIC_POPCNT32:
737 case INTRINSIC_POPCNT64:
738 return expand_intrinsic_popcnt (callexp);
739
740 case INTRINSIC_BSWAP32:
741 case INTRINSIC_BSWAP64:
742 case INTRINSIC_CEIL:
743 case INTRINSIC_CEILF:
744 case INTRINSIC_CEILL:
745 case INTRINSIC_COSL:
746 case INTRINSIC_EXP:
747 case INTRINSIC_EXP2:
748 case INTRINSIC_EXPM1:
749 case INTRINSIC_FABSL:
750 case INTRINSIC_FLOOR:
751 case INTRINSIC_FLOORF:
752 case INTRINSIC_FLOORL:
753 case INTRINSIC_ISFINITE:
754 case INTRINSIC_ISINFINITY:
755 case INTRINSIC_ISNAN:
756 case INTRINSIC_LOG:
757 case INTRINSIC_LOG10:
758 case INTRINSIC_LOG2:
759 case INTRINSIC_RINTL:
760 case INTRINSIC_RNDTOLL:
761 case INTRINSIC_ROUND:
762 case INTRINSIC_SINL:
763 case INTRINSIC_SQRT:
764 case INTRINSIC_SQRTF:
765 case INTRINSIC_SQRTL:
766 case INTRINSIC_TAN:
767 case INTRINSIC_TRUNC:
768 code = intrinsic_decls[intrinsic].built_in;
769 gcc_assert (code != BUILT_IN_NONE);
770 return call_builtin_fn (callexp, code, 1,
771 CALL_EXPR_ARG (callexp, 0));
772
773 case INTRINSIC_FMAX:
774 case INTRINSIC_FMIN:
775 case INTRINSIC_LDEXPL:
776 code = intrinsic_decls[intrinsic].built_in;
777 gcc_assert (code != BUILT_IN_NONE);
778 return call_builtin_fn (callexp, code, 2,
779 CALL_EXPR_ARG (callexp, 0),
780 CALL_EXPR_ARG (callexp, 1));
781
782 case INTRINSIC_FMA:
783 code = intrinsic_decls[intrinsic].built_in;
784 gcc_assert (code != BUILT_IN_NONE);
785 return call_builtin_fn (callexp, code, 3,
786 CALL_EXPR_ARG (callexp, 0),
787 CALL_EXPR_ARG (callexp, 1),
788 CALL_EXPR_ARG (callexp, 2));
789
790 case INTRINSIC_COPYSIGN:
791 case INTRINSIC_COPYSIGNI:
792 return expand_intrinsic_copysign (callexp);
793
794 case INTRINSIC_POW:
795 return expand_intrinsic_pow (callexp);
796
797 case INTRINSIC_TOPREC:
798 case INTRINSIC_TOPRECF:
799 case INTRINSIC_TOPRECL:
800 return expand_intrinsic_toprec (callexp);
801
802 case INTRINSIC_VA_ARG:
803 case INTRINSIC_C_VA_ARG:
804 return expand_intrinsic_vaarg (callexp);
805
806 case INTRINSIC_VASTART:
807 return expand_intrinsic_vastart (callexp);
808
809 case INTRINSIC_ADDS:
810 case INTRINSIC_ADDSL:
811 case INTRINSIC_ADDU:
812 case INTRINSIC_ADDUL:
813 case INTRINSIC_SUBS:
814 case INTRINSIC_SUBSL:
815 case INTRINSIC_SUBU:
816 case INTRINSIC_SUBUL:
817 case INTRINSIC_MULS:
818 case INTRINSIC_MULSL:
819 case INTRINSIC_MULU:
820 case INTRINSIC_MULUI:
821 case INTRINSIC_MULUL:
822 case INTRINSIC_NEGS:
823 case INTRINSIC_NEGSL:
824 return expand_intrinsic_checkedint (intrinsic, callexp);
825
826 case INTRINSIC_VLOAD8:
827 case INTRINSIC_VLOAD16:
828 case INTRINSIC_VLOAD32:
829 case INTRINSIC_VLOAD64:
830 return expand_volatile_load (callexp);
831
832 case INTRINSIC_VSTORE8:
833 case INTRINSIC_VSTORE16:
834 case INTRINSIC_VSTORE32:
835 case INTRINSIC_VSTORE64:
836 return expand_volatile_store (callexp);
837
838 default:
839 gcc_unreachable ();
840 }
841 }